code-server/packages/server/src/cli.ts

205 lines
6.8 KiB
TypeScript
Raw Normal View History

2019-01-30 21:40:01 +00:00
import { field, logger } from "@coder/logger";
import { ServerMessage, SharedProcessActiveMessage } from "@coder/protocol/src/proto";
import { Command, flags } from "@oclif/command";
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import * as WebSocket from "ws";
import { createApp } from "./server";
import { requireModule, requireFork } from "./vscode/bootstrapFork";
import { SharedProcess, SharedProcessState } from "./vscode/sharedProcess";
import { setup as setupNativeModules } from "./modules";
import { fillFs } from "./fill";
2019-02-07 17:47:00 +00:00
import { isCli, serveStatic, buildDir } from "./constants";
export class Entry extends Command {
public static description = "Start your own self-hosted browser-accessible VS Code";
public static flags = {
cert: flags.string(),
"cert-key": flags.string(),
"data-dir": flags.string({ char: "d" }),
help: flags.help(),
host: flags.string({ char: "h", default: "0.0.0.0" }),
open: flags.boolean({ char: "o", description: "Open in browser on startup" }),
port: flags.integer({ char: "p", default: 8080, description: "Port to bind on" }),
version: flags.version({ char: "v" }),
// Dev flags
"bootstrap-fork": flags.string({ hidden: true }),
"fork": flags.string({ hidden: true }),
env: flags.string({ hidden: true }),
Make everything use active evals (#30) * Add trace log level * Use active eval to implement spdlog * Split server/client active eval interfaces Since all properties are *not* valid on both sides * +200% fire resistance * Implement exec using active evaluations * Fully implement child process streams * Watch impl, move child_process back to explicitly adding events Automatically forwarding all events might be the right move, but wanna think/discuss it a bit more because it didn't come out very cleanly. * Would you like some args with that callback? * Implement the rest of child_process using active evals * Rampant memory leaks Emit "kill" to active evaluations when client disconnects in order to kill processes. Most likely won't be the final solution. * Resolve some minor issues with output panel * Implement node-pty with active evals * Provide clearTimeout to vm sandbox * Implement socket with active evals * Extract some callback logic Also remove some eval interfaces, need to re-think those. * Implement net.Server and remainder of net.Socket using active evals * Implement dispose for active evaluations * Use trace for express requests * Handle sending buffers through evaluation events * Make event logging a bit more clear * Fix some errors due to us not actually instantiating until connect/listen * is this a commit message? * We can just create the evaluator in the ctor Not sure what I was thinking. * memory leak for you, memory leak for everyone * it's a ternary now * Don't dispose automatically on close or error The code may or may not be disposable at that point. * Handle parsing buffers on the client side as well * Remove unused protobuf * Remove TypedValue * Remove unused forkProvider and test * Improve dispose pattern for active evals * Socket calls close after error; no need to bind both * Improve comment * Comment is no longer wishy washy due to explicit boolean * Simplify check for sendHandle and options * Replace _require with __non_webpack_require__ Webpack will then replace this with `require` which we then provide to the vm sandbox. * Provide path.parse * Prevent original-fs from loading * Start with a pid of -1 vscode immediately checks the PID to see if the debug process launch correctly, but of course we don't get the pid synchronously. * Pass arguments to bootstrap-fork * Fully implement streams Was causing errors because internally the stream would set this.writing to true and it would never become false, so subsequent messages would never send. * Fix serializing errors and streams emitting errors multiple times * Was emitting close to data * Fix missing path for spawned processes * Move evaluation onDispose call Now it's accurate and runs when the active evaluation has actually disposed. * Fix promisifying fs.exists * Fix some active eval callback issues * Patch existsSync in debug adapter
2019-02-19 16:17:03 +00:00
args: flags.string({ hidden: true }),
};
public static args = [{
name: "workdir",
description: "Specify working dir",
default: (): string => process.cwd(),
}];
public async run(): Promise<void> {
2019-02-07 17:47:00 +00:00
if (isCli) {
fillFs();
}
const { args, flags } = this.parse(Entry);
2019-02-22 01:32:08 +00:00
const dataDir = flags["data-dir"] || path.join(os.homedir(), ".vscode-remote");
const workingDir = args["workdir"];
2019-02-22 01:32:08 +00:00
setupNativeModules(dataDir);
2019-02-07 17:47:00 +00:00
const builtInExtensionsDir = path.join(buildDir || path.join(__dirname, ".."), "build/extensions");
if (flags["bootstrap-fork"]) {
const modulePath = flags["bootstrap-fork"];
if (!modulePath) {
logger.error("No module path specified to fork!");
process.exit(1);
}
Make everything use active evals (#30) * Add trace log level * Use active eval to implement spdlog * Split server/client active eval interfaces Since all properties are *not* valid on both sides * +200% fire resistance * Implement exec using active evaluations * Fully implement child process streams * Watch impl, move child_process back to explicitly adding events Automatically forwarding all events might be the right move, but wanna think/discuss it a bit more because it didn't come out very cleanly. * Would you like some args with that callback? * Implement the rest of child_process using active evals * Rampant memory leaks Emit "kill" to active evaluations when client disconnects in order to kill processes. Most likely won't be the final solution. * Resolve some minor issues with output panel * Implement node-pty with active evals * Provide clearTimeout to vm sandbox * Implement socket with active evals * Extract some callback logic Also remove some eval interfaces, need to re-think those. * Implement net.Server and remainder of net.Socket using active evals * Implement dispose for active evaluations * Use trace for express requests * Handle sending buffers through evaluation events * Make event logging a bit more clear * Fix some errors due to us not actually instantiating until connect/listen * is this a commit message? * We can just create the evaluator in the ctor Not sure what I was thinking. * memory leak for you, memory leak for everyone * it's a ternary now * Don't dispose automatically on close or error The code may or may not be disposable at that point. * Handle parsing buffers on the client side as well * Remove unused protobuf * Remove TypedValue * Remove unused forkProvider and test * Improve dispose pattern for active evals * Socket calls close after error; no need to bind both * Improve comment * Comment is no longer wishy washy due to explicit boolean * Simplify check for sendHandle and options * Replace _require with __non_webpack_require__ Webpack will then replace this with `require` which we then provide to the vm sandbox. * Provide path.parse * Prevent original-fs from loading * Start with a pid of -1 vscode immediately checks the PID to see if the debug process launch correctly, but of course we don't get the pid synchronously. * Pass arguments to bootstrap-fork * Fully implement streams Was causing errors because internally the stream would set this.writing to true and it would never become false, so subsequent messages would never send. * Fix serializing errors and streams emitting errors multiple times * Was emitting close to data * Fix missing path for spawned processes * Move evaluation onDispose call Now it's accurate and runs when the active evaluation has actually disposed. * Fix promisifying fs.exists * Fix some active eval callback issues * Patch existsSync in debug adapter
2019-02-19 16:17:03 +00:00
Object.assign(process.env, flags.env ? JSON.parse(flags.env) : {});
((flags.args ? JSON.parse(flags.args) : []) as string[]).forEach((arg, i) => {
// [0] contains the binary running the script (`node` for example) and
// [1] contains the script name, so the arguments come after that.
process.argv[i + 2] = arg;
});
2019-02-22 01:32:08 +00:00
return requireModule(modulePath, dataDir, builtInExtensionsDir);
}
if (flags["fork"]) {
const modulePath = flags["fork"];
return requireFork(modulePath, JSON.parse(flags.args!), builtInExtensionsDir);
}
2019-02-07 17:47:00 +00:00
if (buildDir && buildDir.startsWith(workingDir)) {
logger.error("Cannot run binary inside of BUILD_DIR", field("build_dir", buildDir), field("cwd", process.cwd()));
process.exit(1);
}
if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir);
}
2019-02-22 01:32:08 +00:00
require("spdlog");
const logDir = path.join(dataDir, "logs", new Date().toISOString().replace(/[-:.TZ]/g, ""));
process.env.VSCODE_LOGS = logDir;
const certPath = flags.cert;
const certKeyPath = flags["cert-key"];
if (certPath && !certKeyPath) {
logger.error("'--cert-key' flag is required when specifying a certificate!");
process.exit(1);
}
if (!certPath && certKeyPath) {
logger.error("'--cert' flag is required when specifying certificate key!");
process.exit(1);
}
let certData: Buffer | undefined;
let certKeyData: Buffer | undefined;
if (typeof certPath !== "undefined" && typeof certKeyPath !== "undefined") {
try {
certData = fs.readFileSync(certPath);
} catch (ex) {
logger.error(`Failed to read certificate: ${ex.message}`);
process.exit(1);
}
try {
certKeyData = fs.readFileSync(certKeyPath);
} catch (ex) {
logger.error(`Failed to read certificate key: ${ex.message}`);
process.exit(1);
}
}
logger.info("\u001B[1mvscode-remote v1.0.0");
// TODO: fill in appropriate doc url
logger.info("Additional documentation: https://coder.com/docs");
logger.info("Initializing", field("data-dir", dataDir), field("working-dir", workingDir), field("log-dir", logDir));
const sharedProcess = new SharedProcess(dataDir, builtInExtensionsDir);
const sendSharedProcessReady = (socket: WebSocket): void => {
const active = new SharedProcessActiveMessage();
active.setSocketPath(sharedProcess.socketPath);
active.setLogPath(logDir);
const serverMessage = new ServerMessage();
serverMessage.setSharedProcessActive(active);
socket.send(serverMessage.serializeBinary());
};
sharedProcess.onState((event) => {
2019-02-07 00:11:31 +00:00
if (event.state === SharedProcessState.Ready) {
app.wss.clients.forEach((c) => sendSharedProcessReady(c));
}
});
const password = "023450wf0951";
const hasCustomHttps = certData && certKeyData;
const app = await createApp((app) => {
app.use((req, res, next) => {
res.on("finish", () => {
Make everything use active evals (#30) * Add trace log level * Use active eval to implement spdlog * Split server/client active eval interfaces Since all properties are *not* valid on both sides * +200% fire resistance * Implement exec using active evaluations * Fully implement child process streams * Watch impl, move child_process back to explicitly adding events Automatically forwarding all events might be the right move, but wanna think/discuss it a bit more because it didn't come out very cleanly. * Would you like some args with that callback? * Implement the rest of child_process using active evals * Rampant memory leaks Emit "kill" to active evaluations when client disconnects in order to kill processes. Most likely won't be the final solution. * Resolve some minor issues with output panel * Implement node-pty with active evals * Provide clearTimeout to vm sandbox * Implement socket with active evals * Extract some callback logic Also remove some eval interfaces, need to re-think those. * Implement net.Server and remainder of net.Socket using active evals * Implement dispose for active evaluations * Use trace for express requests * Handle sending buffers through evaluation events * Make event logging a bit more clear * Fix some errors due to us not actually instantiating until connect/listen * is this a commit message? * We can just create the evaluator in the ctor Not sure what I was thinking. * memory leak for you, memory leak for everyone * it's a ternary now * Don't dispose automatically on close or error The code may or may not be disposable at that point. * Handle parsing buffers on the client side as well * Remove unused protobuf * Remove TypedValue * Remove unused forkProvider and test * Improve dispose pattern for active evals * Socket calls close after error; no need to bind both * Improve comment * Comment is no longer wishy washy due to explicit boolean * Simplify check for sendHandle and options * Replace _require with __non_webpack_require__ Webpack will then replace this with `require` which we then provide to the vm sandbox. * Provide path.parse * Prevent original-fs from loading * Start with a pid of -1 vscode immediately checks the PID to see if the debug process launch correctly, but of course we don't get the pid synchronously. * Pass arguments to bootstrap-fork * Fully implement streams Was causing errors because internally the stream would set this.writing to true and it would never become false, so subsequent messages would never send. * Fix serializing errors and streams emitting errors multiple times * Was emitting close to data * Fix missing path for spawned processes * Move evaluation onDispose call Now it's accurate and runs when the active evaluation has actually disposed. * Fix promisifying fs.exists * Fix some active eval callback issues * Patch existsSync in debug adapter
2019-02-19 16:17:03 +00:00
logger.trace(`\u001B[1m${req.method} ${res.statusCode} \u001B[0m${req.url}`, field("host", req.hostname), field("ip", req.ip));
});
next();
});
2019-02-07 19:50:17 +00:00
// If we're not running from the binary and we aren't serving the static
// pre-built version, use webpack to serve the web files.
2019-02-07 17:47:00 +00:00
if (!isCli && !serveStatic) {
2019-02-05 21:26:57 +00:00
const webpackConfig = require(path.join(__dirname, "..", "..", "web", "webpack.config.js"));
const compiler = require("webpack")(webpackConfig);
app.use(require("webpack-dev-middleware")(compiler, {
logger,
publicPath: webpackConfig.output.publicPath,
stats: webpackConfig.stats,
}));
app.use(require("webpack-hot-middleware")(compiler));
}
}, {
builtInExtensionsDirectory: builtInExtensionsDir,
dataDirectory: dataDir,
workingDirectory: workingDir,
}, password, hasCustomHttps ? {
key: certKeyData,
cert: certData,
} : undefined);
logger.info("Starting webserver...", field("host", flags.host), field("port", flags.port));
app.server.listen(flags.port, flags.host);
let clientId = 1;
app.wss.on("connection", (ws, req) => {
const id = clientId++;
if (sharedProcess.state === SharedProcessState.Ready) {
sendSharedProcessReady(ws);
}
logger.info(`WebSocket opened \u001B[0m${req.url}`, field("client", id), field("ip", req.socket.remoteAddress));
ws.on("close", (code) => {
logger.info(`WebSocket closed \u001B[0m${req.url}`, field("client", id), field("code", code));
});
});
if (!flags["cert-key"] && !flags.cert) {
logger.warn("No certificate specified. \u001B[1mThis could be insecure.");
// TODO: fill in appropriate doc url
logger.warn("Documentation on securing your setup: https://coder.com/docs");
}
logger.info(" ");
logger.info(`Password:\u001B[1m ${password}`);
logger.info(" ");
logger.info("Started (click the link below to open):");
logger.info(`http://localhost:${flags.port}/`);
logger.info(" ");
}
}
Entry.run(undefined, {
2019-02-07 17:47:00 +00:00
root: buildDir || __dirname,
//@ts-ignore
}).catch(require("@oclif/errors/handle"));