mirror of https://git.tuxpa.in/a/code-server.git
Quality check
This commit is contained in:
parent
e8cb6ffaa0
commit
9fdfacb314
|
@ -1,18 +1,14 @@
|
||||||
/* global require, global, process, __dirname */
|
|
||||||
if (!global.NBIN_LOADED) {
|
if (!global.NBIN_LOADED) {
|
||||||
try {
|
try {
|
||||||
const nbin = require("nbin");
|
const nbin = require("nbin");
|
||||||
nbin.shimNativeFs("{{ROOT_PATH}}");
|
nbin.shimNativeFs("{{ROOT_PATH}}");
|
||||||
global.NBIN_LOADED = true;
|
global.NBIN_LOADED = true;
|
||||||
|
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const rg = require("vscode-ripgrep");
|
const rg = require("vscode-ripgrep");
|
||||||
rg.binaryRgPath = rg.rgPath;
|
rg.binaryRgPath = rg.rgPath;
|
||||||
rg.rgPath = path.join(
|
rg.rgPath = path.join(
|
||||||
require("os").tmpdir(),
|
require("os").tmpdir(),
|
||||||
`code-server/${path.basename(rg.binaryRgPath)}`,
|
`code-server/${path.basename(rg.binaryRgPath)}`
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) { /* Not in the binary. */ }
|
||||||
// Not in the binary.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
42
src/api.ts
42
src/api.ts
|
@ -60,7 +60,6 @@ export const vscodeApi = (serviceCollection: ServiceCollection): typeof vscode =
|
||||||
FileSystemError: extHostTypes.FileSystemError,
|
FileSystemError: extHostTypes.FileSystemError,
|
||||||
FileType: FileType,
|
FileType: FileType,
|
||||||
Uri: URI,
|
Uri: URI,
|
||||||
|
|
||||||
commands: {
|
commands: {
|
||||||
executeCommand: (commandId: string, ...args: any[]): any => {
|
executeCommand: (commandId: string, ...args: any[]): any => {
|
||||||
return commandService.executeCommand(commandId, ...args);
|
return commandService.executeCommand(commandId, ...args);
|
||||||
|
@ -69,7 +68,6 @@ export const vscodeApi = (serviceCollection: ServiceCollection): typeof vscode =
|
||||||
return CommandsRegistry.registerCommand(id, command);
|
return CommandsRegistry.registerCommand(id, command);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
window: {
|
window: {
|
||||||
registerTreeDataProvider: (id: string, dataProvider: ITreeViewDataProvider): void => {
|
registerTreeDataProvider: (id: string, dataProvider: ITreeViewDataProvider): void => {
|
||||||
const view = viewsRegistry.getView(id);
|
const view = viewsRegistry.getView(id);
|
||||||
|
@ -81,7 +79,6 @@ export const vscodeApi = (serviceCollection: ServiceCollection): typeof vscode =
|
||||||
notificationService.error(message);
|
notificationService.error(message);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
workspace: {
|
workspace: {
|
||||||
registerFileSystemProvider: (scheme: string, provider: vscode.FileSystemProvider): IDisposable => {
|
registerFileSystemProvider: (scheme: string, provider: vscode.FileSystemProvider): IDisposable => {
|
||||||
return fileService.registerProvider(scheme, new FileSystemProvider(provider));
|
return fileService.registerProvider(scheme, new FileSystemProvider(provider));
|
||||||
|
@ -95,7 +92,6 @@ export const vscodeApi = (serviceCollection: ServiceCollection): typeof vscode =
|
||||||
*/
|
*/
|
||||||
export const coderApi = (serviceCollection: ServiceCollection): typeof coder => {
|
export const coderApi = (serviceCollection: ServiceCollection): typeof coder => {
|
||||||
const getService = <T>(id: ServiceIdentifier<T>): T => serviceCollection.get<T>(id) as T;
|
const getService = <T>(id: ServiceIdentifier<T>): T => serviceCollection.get<T>(id) as T;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
workbench: {
|
workbench: {
|
||||||
action: Action,
|
action: Action,
|
||||||
|
@ -103,13 +99,8 @@ export const coderApi = (serviceCollection: ServiceCollection): typeof coder =>
|
||||||
commandRegistry: CommandsRegistry,
|
commandRegistry: CommandsRegistry,
|
||||||
actionsRegistry: Registry.as<IWorkbenchActionRegistry>(ActionExtensions.WorkbenchActions),
|
actionsRegistry: Registry.as<IWorkbenchActionRegistry>(ActionExtensions.WorkbenchActions),
|
||||||
registerView: (viewId, viewName, containerId, containerName, icon): void => {
|
registerView: (viewId, viewName, containerId, containerName, icon): void => {
|
||||||
const viewContainersRegistry = Registry.as<IViewContainersRegistry>(ViewsExtensions.ViewContainersRegistry);
|
|
||||||
const viewsRegistry = Registry.as<IViewsRegistry>(ViewsExtensions.ViewsRegistry);
|
|
||||||
const container = viewContainersRegistry.registerViewContainer(containerId);
|
|
||||||
|
|
||||||
const cssClass = `extensionViewlet-${containerId}`;
|
const cssClass = `extensionViewlet-${containerId}`;
|
||||||
const id = `workbench.view.extension.${containerId}`;
|
const id = `workbench.view.extension.${containerId}`;
|
||||||
|
|
||||||
class CustomViewlet extends ViewContainerViewlet {
|
class CustomViewlet extends ViewContainerViewlet {
|
||||||
public constructor(
|
public constructor(
|
||||||
@IConfigurationService configurationService: IConfigurationService,
|
@IConfigurationService configurationService: IConfigurationService,
|
||||||
|
@ -127,44 +118,32 @@ export const coderApi = (serviceCollection: ServiceCollection): typeof coder =>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const viewletDescriptor = new ViewletDescriptor(
|
Registry.as<ViewletRegistry>(ViewletExtensions.Viewlets).registerViewlet(
|
||||||
CustomViewlet as any,
|
new ViewletDescriptor(CustomViewlet as any, id, containerName, cssClass, undefined, URI.parse(icon)),
|
||||||
id,
|
|
||||||
containerName,
|
|
||||||
cssClass,
|
|
||||||
undefined,
|
|
||||||
URI.parse(icon),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
Registry.as<ViewletRegistry>(ViewletExtensions.Viewlets).registerViewlet(viewletDescriptor);
|
Registry.as<IWorkbenchActionRegistry>(ActionExtensions.WorkbenchActions).registerWorkbenchAction(
|
||||||
|
|
||||||
const registry = Registry.as<IWorkbenchActionRegistry>(ActionExtensions.WorkbenchActions);
|
|
||||||
registry.registerWorkbenchAction(
|
|
||||||
new SyncActionDescriptor(OpenCustomViewletAction as any, id, localize("showViewlet", "Show {0}", containerName)),
|
new SyncActionDescriptor(OpenCustomViewletAction as any, id, localize("showViewlet", "Show {0}", containerName)),
|
||||||
"View: Show {0}",
|
"View: Show {0}",
|
||||||
localize("view", "View"),
|
localize("view", "View"),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Generate CSS to show the icon in the activity bar
|
// Generate CSS to show the icon in the activity bar.
|
||||||
const iconClass = `.monaco-workbench .activitybar .monaco-action-bar .action-label.${cssClass}`;
|
const iconClass = `.monaco-workbench .activitybar .monaco-action-bar .action-label.${cssClass}`;
|
||||||
createCSSRule(iconClass, `-webkit-mask: url('${icon}') no-repeat 50% 50%`);
|
createCSSRule(iconClass, `-webkit-mask: url('${icon}') no-repeat 50% 50%`);
|
||||||
|
|
||||||
const views = [{
|
const container = Registry.as<IViewContainersRegistry>(ViewsExtensions.ViewContainersRegistry).registerViewContainer(containerId);
|
||||||
|
Registry.as<IViewsRegistry>(ViewsExtensions.ViewsRegistry).registerViews([{
|
||||||
id: viewId,
|
id: viewId,
|
||||||
name: viewName,
|
name: viewName,
|
||||||
ctorDescriptor: { ctor: CustomTreeViewPanel },
|
ctorDescriptor: { ctor: CustomTreeViewPanel },
|
||||||
treeView: getService(IInstantiationService).createInstance(CustomTreeView as any, viewId, container),
|
treeView: getService(IInstantiationService).createInstance(CustomTreeView as any, viewId, container),
|
||||||
}] as ITreeViewDescriptor[];
|
}] as ITreeViewDescriptor[], container);
|
||||||
viewsRegistry.registerViews(views, container);
|
|
||||||
},
|
},
|
||||||
// Even though the enums are exactly the same, Typescript says they are
|
|
||||||
// not assignable to each other, so use `any`. I don't know if there is a
|
|
||||||
// way around this.
|
|
||||||
menuRegistry: MenuRegistry as any,
|
menuRegistry: MenuRegistry as any,
|
||||||
statusbarService: getService(IStatusbarService) as any,
|
statusbarService: getService(IStatusbarService) as any,
|
||||||
notificationService: getService(INotificationService),
|
notificationService: getService(INotificationService),
|
||||||
terminalService: getService(ITerminalService),
|
terminalService: getService(ITerminalService),
|
||||||
|
|
||||||
onFileCreate: (cb): void => {
|
onFileCreate: (cb): void => {
|
||||||
getService<IFileService>(IFileService).onAfterOperation((e) => {
|
getService<IFileService>(IFileService).onAfterOperation((e) => {
|
||||||
if (e.operation === FileOperation.CREATE) {
|
if (e.operation === FileOperation.CREATE) {
|
||||||
|
@ -198,7 +177,6 @@ export const coderApi = (serviceCollection: ServiceCollection): typeof coder =>
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
onModelAdded: (cb): void => {
|
onModelAdded: (cb): void => {
|
||||||
getService<IModelService>(IModelService).onModelAdded((e) => {
|
getService<IModelService>(IModelService).onModelAdded((e) => {
|
||||||
cb(e.uri.path, e.getLanguageIdentifier().language);
|
cb(e.uri.path, e.getLanguageIdentifier().language);
|
||||||
|
@ -214,7 +192,6 @@ export const coderApi = (serviceCollection: ServiceCollection): typeof coder =>
|
||||||
cb(e.model.uri.path, e.model.getLanguageIdentifier().language, e.oldModeId);
|
cb(e.model.uri.path, e.model.getLanguageIdentifier().language, e.oldModeId);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
onTerminalAdded: (cb): void => {
|
onTerminalAdded: (cb): void => {
|
||||||
getService<ITerminalService>(ITerminalService).onInstanceCreated(() => cb());
|
getService<ITerminalService>(ITerminalService).onInstanceCreated(() => cb());
|
||||||
},
|
},
|
||||||
|
@ -222,7 +199,6 @@ export const coderApi = (serviceCollection: ServiceCollection): typeof coder =>
|
||||||
getService<ITerminalService>(ITerminalService).onInstanceDisposed(() => cb());
|
getService<ITerminalService>(ITerminalService).onInstanceDisposed(() => cb());
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
MenuId: MenuId,
|
MenuId: MenuId,
|
||||||
Severity: Severity,
|
Severity: Severity,
|
||||||
|
@ -250,9 +226,7 @@ class FileSystemProvider implements IFileSystemProvider {
|
||||||
public readonly capabilities: FileSystemProviderCapabilities;
|
public readonly capabilities: FileSystemProviderCapabilities;
|
||||||
public readonly onDidChangeCapabilities: Event<void> = Event.None;
|
public readonly onDidChangeCapabilities: Event<void> = Event.None;
|
||||||
|
|
||||||
public constructor(
|
public constructor(private readonly provider: vscode.FileSystemProvider) {
|
||||||
private readonly provider: vscode.FileSystemProvider,
|
|
||||||
) {
|
|
||||||
this.capabilities = FileSystemProviderCapabilities.Readonly;
|
this.capabilities = FileSystemProviderCapabilities.Readonly;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -43,9 +43,6 @@ class Watcher extends DiskFileSystemProvider {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* See: src/vs/platform/remote/common/remoteAgentFileSystemChannel.ts.
|
|
||||||
*/
|
|
||||||
export class FileProviderChannel implements IServerChannel, IDisposable {
|
export class FileProviderChannel implements IServerChannel, IDisposable {
|
||||||
private readonly provider: DiskFileSystemProvider;
|
private readonly provider: DiskFileSystemProvider;
|
||||||
private readonly watchers = new Map<string, Watcher>();
|
private readonly watchers = new Map<string, Watcher>();
|
||||||
|
@ -175,9 +172,6 @@ export class FileProviderChannel implements IServerChannel, IDisposable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* See: src/vs/workbench/services/remote/common/remoteAgentEnvironmentChannel.ts.
|
|
||||||
*/
|
|
||||||
export class ExtensionEnvironmentChannel implements IServerChannel {
|
export class ExtensionEnvironmentChannel implements IServerChannel {
|
||||||
public constructor(
|
public constructor(
|
||||||
private readonly environment: IEnvironmentService,
|
private readonly environment: IEnvironmentService,
|
||||||
|
@ -245,7 +239,6 @@ export class ExtensionEnvironmentChannel implements IServerChannel {
|
||||||
};
|
};
|
||||||
|
|
||||||
return Promise.all([scanBuiltin(), scanInstalled()]).then((allExtensions) => {
|
return Promise.all([scanBuiltin(), scanInstalled()]).then((allExtensions) => {
|
||||||
// It's possible to get duplicates.
|
|
||||||
const uniqueExtensions = new Map<string, IExtensionDescription>();
|
const uniqueExtensions = new Map<string, IExtensionDescription>();
|
||||||
allExtensions.forEach((multipleExtensions) => {
|
allExtensions.forEach((multipleExtensions) => {
|
||||||
multipleExtensions.forEach((extensions) => {
|
multipleExtensions.forEach((extensions) => {
|
||||||
|
@ -254,18 +247,13 @@ export class ExtensionEnvironmentChannel implements IServerChannel {
|
||||||
if (uniqueExtensions.has(id)) {
|
if (uniqueExtensions.has(id)) {
|
||||||
const oldPath = uniqueExtensions.get(id)!.extensionLocation.fsPath;
|
const oldPath = uniqueExtensions.get(id)!.extensionLocation.fsPath;
|
||||||
const newPath = extension.extensionLocation.fsPath;
|
const newPath = extension.extensionLocation.fsPath;
|
||||||
this.log.warn(
|
this.log.warn(`${oldPath} has been overridden ${newPath}`);
|
||||||
`Extension ${id} in ${oldPath} has been overridden ${newPath}`,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
uniqueExtensions.set(id, extension);
|
uniqueExtensions.set(id, extension);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
return Array.from(uniqueExtensions.values());
|
||||||
const finalExtensions = <IExtensionDescription[]>[];
|
|
||||||
uniqueExtensions.forEach((e) => finalExtensions.push(e));
|
|
||||||
return finalExtensions;
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
33
src/cli.ts
33
src/cli.ts
|
@ -92,12 +92,7 @@ const main = async (): Promise<void> => {
|
||||||
const version = `${(pkg as any).codeServerVersion || "development"}-vsc${pkg.version}`;
|
const version = `${(pkg as any).codeServerVersion || "development"}-vsc${pkg.version}`;
|
||||||
if (args.help) {
|
if (args.help) {
|
||||||
const executable = `${product.applicationName}${os.platform() === "win32" ? ".exe" : ""}`;
|
const executable = `${product.applicationName}${os.platform() === "win32" ? ".exe" : ""}`;
|
||||||
return console.log(buildHelpMessage(
|
return console.log(buildHelpMessage(product.nameLong, executable, version, undefined, false));
|
||||||
product.nameLong, executable,
|
|
||||||
version,
|
|
||||||
undefined,
|
|
||||||
false,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (args.version) {
|
if (args.version) {
|
||||||
|
@ -116,26 +111,22 @@ const main = async (): Promise<void> => {
|
||||||
if (shouldSpawnCliProcess()) {
|
if (shouldSpawnCliProcess()) {
|
||||||
const cli = await new Promise<IMainCli>((c, e) => require(["vs/code/node/cliProcessMain"], c, e));
|
const cli = await new Promise<IMainCli>((c, e) => require(["vs/code/node/cliProcessMain"], c, e));
|
||||||
await cli.main(args);
|
await cli.main(args);
|
||||||
// There is some WriteStream instance keeping it open so force an exit.
|
return process.exit(0); // There is a WriteStream instance keeping it open.
|
||||||
return process.exit(0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const extra = args["_"] || [];
|
||||||
const options = {
|
const options = {
|
||||||
host: args.host,
|
|
||||||
allowHttp: args["allow-http"],
|
allowHttp: args["allow-http"],
|
||||||
|
auth: typeof args.auth !== "undefined" ? args.auth : true,
|
||||||
cert: args.cert,
|
cert: args.cert,
|
||||||
certKey: args["cert-key"],
|
certKey: args["cert-key"],
|
||||||
auth: typeof args.auth !== "undefined" ? args.auth : true,
|
folderUri: extra.length > 1 ? extra[extra.length - 1] : undefined,
|
||||||
|
host: args.host,
|
||||||
password: process.env.PASSWORD,
|
password: process.env.PASSWORD,
|
||||||
folderUri: args["_"] && args["_"].length > 1
|
|
||||||
? args["_"][args["_"].length - 1]
|
|
||||||
: undefined,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!options.host) {
|
if (!options.host) {
|
||||||
options.host = !options.auth || options.allowHttp
|
options.host = !options.auth || options.allowHttp ? "localhost" : "0.0.0.0";
|
||||||
? "localhost"
|
|
||||||
: "0.0.0.0";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let usingGeneratedCert = false;
|
let usingGeneratedCert = false;
|
||||||
|
@ -152,18 +143,16 @@ const main = async (): Promise<void> => {
|
||||||
usingGeneratedPassword = true;
|
usingGeneratedPassword = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
const webviewPort = typeof args["webview-port"] !== "undefined"
|
const webviewPort = args["webview-port"];
|
||||||
&& parseInt(args["webview-port"], 10) || 8444;
|
|
||||||
const webviewServer = new WebviewServer({
|
const webviewServer = new WebviewServer({
|
||||||
...options,
|
...options,
|
||||||
port: webviewPort,
|
port: typeof webviewPort !== "undefined" && parseInt(webviewPort, 10) || 8444,
|
||||||
socket: args["webview-socket"],
|
socket: args["webview-socket"],
|
||||||
});
|
});
|
||||||
|
|
||||||
const port = typeof args.port !== "undefined" && parseInt(args.port, 10) || 8443;
|
|
||||||
const server = new MainServer({
|
const server = new MainServer({
|
||||||
...options,
|
...options,
|
||||||
port,
|
port: typeof args.port !== "undefined" && parseInt(args.port, 10) || 8443,
|
||||||
socket: args.socket,
|
socket: args.socket,
|
||||||
}, webviewServer, args);
|
}, webviewServer, args);
|
||||||
|
|
||||||
|
@ -196,7 +185,7 @@ const main = async (): Promise<void> => {
|
||||||
|
|
||||||
if (!args.socket && args.open) {
|
if (!args.socket && args.open) {
|
||||||
// The web socket doesn't seem to work if using 0.0.0.0.
|
// The web socket doesn't seem to work if using 0.0.0.0.
|
||||||
const openAddress = `http://localhost:${port}`;
|
const openAddress = `http://localhost:${server.options.port}`;
|
||||||
await open(openAddress).catch(console.error);
|
await open(openAddress).catch(console.error);
|
||||||
console.log(` - Opened ${openAddress}`);
|
console.log(` - Opened ${openAddress}`);
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import { Emitter } from "vs/base/common/event";
|
||||||
import { ISocket } from "vs/base/parts/ipc/common/ipc.net";
|
import { ISocket } from "vs/base/parts/ipc/common/ipc.net";
|
||||||
import { NodeSocket } from "vs/base/parts/ipc/node/ipc.net";
|
import { NodeSocket } from "vs/base/parts/ipc/node/ipc.net";
|
||||||
import { ILogService } from "vs/platform/log/common/log";
|
import { ILogService } from "vs/platform/log/common/log";
|
||||||
import { IExtHostReadyMessage, IExtHostSocketMessage } from "vs/workbench/services/extensions/common/extensionHostProtocol";
|
import { IExtHostReadyMessage } from "vs/workbench/services/extensions/common/extensionHostProtocol";
|
||||||
|
|
||||||
import { Protocol } from "vs/server/src/protocol";
|
import { Protocol } from "vs/server/src/protocol";
|
||||||
import { uriTransformerPath } from "vs/server/src/util";
|
import { uriTransformerPath } from "vs/server/src/util";
|
||||||
|
@ -15,17 +15,11 @@ export abstract class Connection {
|
||||||
protected readonly _onClose = new Emitter<void>();
|
protected readonly _onClose = new Emitter<void>();
|
||||||
public readonly onClose = this._onClose.event;
|
public readonly onClose = this._onClose.event;
|
||||||
protected disposed: boolean = false;
|
protected disposed: boolean = false;
|
||||||
|
|
||||||
public constructor(protected protocol: Protocol) {}
|
public constructor(protected protocol: Protocol) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set up the connection on a new socket.
|
* Set up the connection on a new socket.
|
||||||
*/
|
*/
|
||||||
public abstract reconnect(socket: ISocket, buffer: VSBuffer): void;
|
public abstract reconnect(socket: ISocket, buffer: VSBuffer): void;
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up the connection.
|
|
||||||
*/
|
|
||||||
protected abstract dispose(): void;
|
protected abstract dispose(): void;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,16 +56,10 @@ export class ManagementConnection extends Connection {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Manage the extension host process.
|
|
||||||
*/
|
|
||||||
export class ExtensionHostConnection extends Connection {
|
export class ExtensionHostConnection extends Connection {
|
||||||
private process: cp.ChildProcess;
|
private process: cp.ChildProcess;
|
||||||
|
|
||||||
public constructor(
|
public constructor(protocol: Protocol, buffer: VSBuffer, private readonly log: ILogService) {
|
||||||
protocol: Protocol, buffer: VSBuffer,
|
|
||||||
private readonly log: ILogService,
|
|
||||||
) {
|
|
||||||
super(protocol);
|
super(protocol);
|
||||||
protocol.dispose();
|
protocol.dispose();
|
||||||
this.process = this.spawn(buffer);
|
this.process = this.spawn(buffer);
|
||||||
|
@ -96,23 +84,17 @@ export class ExtensionHostConnection extends Connection {
|
||||||
private sendInitMessage(buffer: VSBuffer): void {
|
private sendInitMessage(buffer: VSBuffer): void {
|
||||||
const socket = this.protocol.getUnderlyingSocket();
|
const socket = this.protocol.getUnderlyingSocket();
|
||||||
socket.pause();
|
socket.pause();
|
||||||
|
this.process.send({
|
||||||
const initMessage: IExtHostSocketMessage = {
|
|
||||||
type: "VSCODE_EXTHOST_IPC_SOCKET",
|
type: "VSCODE_EXTHOST_IPC_SOCKET",
|
||||||
initialDataChunk: (buffer.buffer as Buffer).toString("base64"),
|
initialDataChunk: (buffer.buffer as Buffer).toString("base64"),
|
||||||
skipWebSocketFrames: this.protocol.getSocket() instanceof NodeSocket,
|
skipWebSocketFrames: this.protocol.getSocket() instanceof NodeSocket,
|
||||||
};
|
}, socket);
|
||||||
|
|
||||||
this.process.send(initMessage, socket);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private spawn(buffer: VSBuffer): cp.ChildProcess {
|
private spawn(buffer: VSBuffer): cp.ChildProcess {
|
||||||
const proc = cp.fork(
|
const proc = cp.fork(
|
||||||
getPathFromAmdModule(require, "bootstrap-fork"),
|
getPathFromAmdModule(require, "bootstrap-fork"),
|
||||||
[
|
[ "--type=extensionHost", `--uriTransformerPath=${uriTransformerPath()}` ],
|
||||||
"--type=extensionHost",
|
|
||||||
`--uriTransformerPath=${uriTransformerPath()}`
|
|
||||||
],
|
|
||||||
{
|
{
|
||||||
env: {
|
env: {
|
||||||
...process.env,
|
...process.env,
|
||||||
|
@ -129,13 +111,8 @@ export class ExtensionHostConnection extends Connection {
|
||||||
|
|
||||||
proc.on("error", () => this.dispose());
|
proc.on("error", () => this.dispose());
|
||||||
proc.on("exit", () => this.dispose());
|
proc.on("exit", () => this.dispose());
|
||||||
|
proc.stdout.setEncoding("utf8").on("data", (d) => this.log.info("Extension host stdout", d));
|
||||||
proc.stdout.setEncoding("utf8");
|
proc.stderr.setEncoding("utf8").on("data", (d) => this.log.error("Extension host stderr", d));
|
||||||
proc.stderr.setEncoding("utf8");
|
|
||||||
|
|
||||||
proc.stdout.on("data", (d) => this.log.info("Extension host stdout", d));
|
|
||||||
proc.stderr.on("data", (d) => this.log.error("Extension host stderr", d));
|
|
||||||
|
|
||||||
proc.on("message", (event) => {
|
proc.on("message", (event) => {
|
||||||
if (event && event.type === "__$console") {
|
if (event && event.type === "__$console") {
|
||||||
const severity = this.log[event.severity] ? event.severity : "info";
|
const severity = this.log[event.severity] ? event.severity : "info";
|
||||||
|
@ -149,8 +126,7 @@ export class ExtensionHostConnection extends Connection {
|
||||||
this.sendInitMessage(buffer);
|
this.sendInitMessage(buffer);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
proc.on("message", listen);
|
|
||||||
|
|
||||||
return proc;
|
return proc.on("message", listen);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,10 +73,7 @@ export class Protocol extends PersistentProtocol {
|
||||||
* TODO: This ignores the authentication process entirely for now.
|
* TODO: This ignores the authentication process entirely for now.
|
||||||
*/
|
*/
|
||||||
private authenticate(_message: AuthRequest): void {
|
private authenticate(_message: AuthRequest): void {
|
||||||
this.sendMessage({
|
this.sendMessage({ type: "sign", data: "" });
|
||||||
type: "sign",
|
|
||||||
data: "",
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
164
src/server.ts
164
src/server.ts
|
@ -108,14 +108,11 @@ export interface ServerOptions {
|
||||||
}
|
}
|
||||||
|
|
||||||
export abstract class Server {
|
export abstract class Server {
|
||||||
// The underlying web server.
|
|
||||||
protected readonly server: http.Server | https.Server;
|
protected readonly server: http.Server | https.Server;
|
||||||
|
|
||||||
protected rootPath = path.resolve(__dirname, "../../../..");
|
protected rootPath = path.resolve(__dirname, "../../../..");
|
||||||
|
|
||||||
private listenPromise: Promise<string> | undefined;
|
private listenPromise: Promise<string> | undefined;
|
||||||
|
|
||||||
public constructor(protected readonly options: ServerOptions) {
|
public constructor(public readonly options: ServerOptions) {
|
||||||
if (this.options.cert && this.options.certKey) {
|
if (this.options.cert && this.options.certKey) {
|
||||||
useHttpsTransformer();
|
useHttpsTransformer();
|
||||||
const httpolyglot = require.__$__nodeRequire(path.resolve(__dirname, "../node_modules/httpolyglot/lib/index")) as typeof import("httpolyglot");
|
const httpolyglot = require.__$__nodeRequire(path.resolve(__dirname, "../node_modules/httpolyglot/lib/index")) as typeof import("httpolyglot");
|
||||||
|
@ -167,8 +164,7 @@ export abstract class Server {
|
||||||
): Promise<Response>;
|
): Promise<Response>;
|
||||||
|
|
||||||
protected async getResource(filePath: string): Promise<Response> {
|
protected async getResource(filePath: string): Promise<Response> {
|
||||||
const content = await util.promisify(fs.readFile)(filePath);
|
return { content: await util.promisify(fs.readFile)(filePath), filePath };
|
||||||
return { content, filePath };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private onRequest = async (request: http.IncomingMessage, response: http.ServerResponse): Promise<void> => {
|
private onRequest = async (request: http.IncomingMessage, response: http.ServerResponse): Promise<void> => {
|
||||||
|
@ -208,12 +204,10 @@ export abstract class Server {
|
||||||
} else if (base === "") { // Happens if it's a plain `domain.com`.
|
} else if (base === "") { // Happens if it's a plain `domain.com`.
|
||||||
base = "/";
|
base = "/";
|
||||||
}
|
}
|
||||||
if (requestPath === "/") { // Trailing slash, like `domain.com/login/`.
|
base = path.normalize(base);
|
||||||
requestPath = "";
|
if (requestPath !== "") { // "" will become "." with normalize.
|
||||||
} else if (requestPath !== "") { // "" will become "." with normalize.
|
|
||||||
requestPath = path.normalize(requestPath);
|
requestPath = path.normalize(requestPath);
|
||||||
}
|
}
|
||||||
base = path.normalize(base);
|
|
||||||
|
|
||||||
switch (base) {
|
switch (base) {
|
||||||
case "/":
|
case "/":
|
||||||
|
@ -227,8 +221,7 @@ export abstract class Server {
|
||||||
case "/login":
|
case "/login":
|
||||||
if (!this.options.auth) {
|
if (!this.options.auth) {
|
||||||
throw new HttpError("Not found", HttpCode.NotFound);
|
throw new HttpError("Not found", HttpCode.NotFound);
|
||||||
}
|
} else if (requestPath === "") {
|
||||||
if (requestPath === "") {
|
|
||||||
return this.tryLogin(request);
|
return this.tryLogin(request);
|
||||||
}
|
}
|
||||||
this.ensureGet(request);
|
this.ensureGet(request);
|
||||||
|
@ -249,27 +242,19 @@ export abstract class Server {
|
||||||
this.ensureGet(request);
|
this.ensureGet(request);
|
||||||
return { redirect: "https://" + request.headers.host + "/" };
|
return { redirect: "https://" + request.headers.host + "/" };
|
||||||
}
|
}
|
||||||
|
|
||||||
if (request.method === "POST") {
|
if (request.method === "POST") {
|
||||||
const data = await this.getData<LoginPayload>(request);
|
const data = await this.getData<LoginPayload>(request);
|
||||||
if (this.authenticate(request, data)) {
|
if (this.authenticate(request, data)) {
|
||||||
return {
|
return {
|
||||||
redirect: "https://" + request.headers.host + "/",
|
redirect: "https://" + request.headers.host + "/",
|
||||||
headers: {
|
headers: {"Set-Cookie": `password=${data.password}` }
|
||||||
"Set-Cookie": `password=${data.password}`,
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
let userAgent = request.headers["user-agent"];
|
|
||||||
const timestamp = Math.floor(new Date().getTime() / 1000);
|
|
||||||
if (Array.isArray(userAgent)) {
|
|
||||||
userAgent = userAgent.join(", ");
|
|
||||||
}
|
|
||||||
console.error("Failed login attempt", JSON.stringify({
|
console.error("Failed login attempt", JSON.stringify({
|
||||||
xForwardedFor: request.headers["x-forwarded-for"],
|
xForwardedFor: request.headers["x-forwarded-for"],
|
||||||
remoteAddress: request.connection.remoteAddress,
|
remoteAddress: request.connection.remoteAddress,
|
||||||
userAgent,
|
userAgent: request.headers["user-agent"],
|
||||||
timestamp,
|
timestamp: Math.floor(new Date().getTime() / 1000),
|
||||||
}));
|
}));
|
||||||
return this.getLogin("Invalid password", data);
|
return this.getLogin("Invalid password", data);
|
||||||
}
|
}
|
||||||
|
@ -279,23 +264,16 @@ export abstract class Server {
|
||||||
|
|
||||||
private async getLogin(error: string = "", payload?: LoginPayload): Promise<Response> {
|
private async getLogin(error: string = "", payload?: LoginPayload): Promise<Response> {
|
||||||
const filePath = path.join(this.rootPath, "out/vs/server/src/login/login.html");
|
const filePath = path.join(this.rootPath, "out/vs/server/src/login/login.html");
|
||||||
let content = await util.promisify(fs.readFile)(filePath, "utf8");
|
const content = (await util.promisify(fs.readFile)(filePath, "utf8"))
|
||||||
if (error) {
|
.replace("{{ERROR}}", error)
|
||||||
content = content.replace("{{ERROR}}", error)
|
.replace("display:none", error ? "display:block" : "display:none")
|
||||||
.replace("display:none", "display:block");
|
.replace('value=""', `value="${payload && payload.password || ""}"`);
|
||||||
}
|
|
||||||
if (payload && payload.password) {
|
|
||||||
content = content.replace('value=""', `value="${payload.password}"`);
|
|
||||||
}
|
|
||||||
return { content, filePath };
|
return { content, filePath };
|
||||||
}
|
}
|
||||||
|
|
||||||
private ensureGet(request: http.IncomingMessage): void {
|
private ensureGet(request: http.IncomingMessage): void {
|
||||||
if (request.method !== "GET") {
|
if (request.method !== "GET") {
|
||||||
throw new HttpError(
|
throw new HttpError(`Unsupported method ${request.method}`, HttpCode.BadRequest);
|
||||||
`Unsupported method ${request.method}`,
|
|
||||||
HttpCode.BadRequest,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -357,15 +335,10 @@ export abstract class Server {
|
||||||
}
|
}
|
||||||
|
|
||||||
export class MainServer extends Server {
|
export class MainServer extends Server {
|
||||||
// Used to notify the IPC server that there is a new client.
|
|
||||||
public readonly _onDidClientConnect = new Emitter<ClientConnectionEvent>();
|
public readonly _onDidClientConnect = new Emitter<ClientConnectionEvent>();
|
||||||
public readonly onDidClientConnect = this._onDidClientConnect.event;
|
public readonly onDidClientConnect = this._onDidClientConnect.event;
|
||||||
|
|
||||||
// This is separate instead of just extending this class since we can't
|
|
||||||
// use properties in the super call. This manages channels.
|
|
||||||
private readonly ipc = new IPCServer(this.onDidClientConnect);
|
private readonly ipc = new IPCServer(this.onDidClientConnect);
|
||||||
|
|
||||||
// Persistent connections. These can reconnect within a timeout.
|
|
||||||
private readonly connections = new Map<ConnectionType, Map<string, Connection>>();
|
private readonly connections = new Map<ConnectionType, Map<string, Connection>>();
|
||||||
|
|
||||||
private readonly services = new ServiceCollection();
|
private readonly services = new ServiceCollection();
|
||||||
|
@ -377,7 +350,6 @@ export class MainServer extends Server {
|
||||||
args: ParsedArgs,
|
args: ParsedArgs,
|
||||||
) {
|
) {
|
||||||
super(options);
|
super(options);
|
||||||
|
|
||||||
this.server.on("upgrade", async (request, socket) => {
|
this.server.on("upgrade", async (request, socket) => {
|
||||||
const protocol = this.createProtocol(request, socket);
|
const protocol = this.createProtocol(request, socket);
|
||||||
try {
|
try {
|
||||||
|
@ -393,12 +365,10 @@ export class MainServer extends Server {
|
||||||
|
|
||||||
public async listen(): Promise<string> {
|
public async listen(): Promise<string> {
|
||||||
const environment = (this.services.get(IEnvironmentService) as EnvironmentService);
|
const environment = (this.services.get(IEnvironmentService) as EnvironmentService);
|
||||||
const mkdirs = Promise.all([
|
const [address] = await Promise.all<string>([
|
||||||
environment.extensionsPath,
|
super.listen(), ...[
|
||||||
].map((p) => mkdirp(p)));
|
environment.extensionsPath,
|
||||||
const [address] = await Promise.all([
|
].map((p) => mkdirp(p).then(() => p)),
|
||||||
super.listen(),
|
|
||||||
mkdirs,
|
|
||||||
]);
|
]);
|
||||||
return address;
|
return address;
|
||||||
}
|
}
|
||||||
|
@ -426,22 +396,18 @@ export class MainServer extends Server {
|
||||||
|
|
||||||
private async getRoot(request: http.IncomingMessage, parsedUrl: url.UrlWithParsedQuery): Promise<Response> {
|
private async getRoot(request: http.IncomingMessage, parsedUrl: url.UrlWithParsedQuery): Promise<Response> {
|
||||||
const filePath = path.join(this.rootPath, "out/vs/code/browser/workbench/workbench.html");
|
const filePath = path.join(this.rootPath, "out/vs/code/browser/workbench/workbench.html");
|
||||||
let content = await util.promisify(fs.readFile)(filePath, "utf8");
|
let [content] = await Promise.all([
|
||||||
|
util.promisify(fs.readFile)(filePath, "utf8"),
|
||||||
const remoteAuthority = request.headers.host as string;
|
|
||||||
const transformer = getUriTransformer(remoteAuthority);
|
|
||||||
|
|
||||||
await Promise.all([
|
|
||||||
this.webviewServer.listen(),
|
this.webviewServer.listen(),
|
||||||
this.servicesPromise,
|
this.servicesPromise,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const webviewEndpoint = this.webviewServer.address(request);
|
const webviewEndpoint = this.webviewServer.address(request);
|
||||||
|
|
||||||
const cwd = process.env.VSCODE_CWD || process.cwd();
|
const cwd = process.env.VSCODE_CWD || process.cwd();
|
||||||
const workspacePath = parsedUrl.query.workspace as string | undefined;
|
const workspacePath = parsedUrl.query.workspace as string | undefined;
|
||||||
const folderPath = !workspacePath ? parsedUrl.query.folder as string | undefined || this.options.folderUri || cwd: undefined;
|
const folderPath = !workspacePath ? parsedUrl.query.folder as string | undefined || this.options.folderUri || cwd: undefined;
|
||||||
|
const remoteAuthority = request.headers.host as string;
|
||||||
|
const transformer = getUriTransformer(remoteAuthority);
|
||||||
const options: Options = {
|
const options: Options = {
|
||||||
WORKBENCH_WEB_CONGIGURATION: {
|
WORKBENCH_WEB_CONGIGURATION: {
|
||||||
workspaceUri: workspacePath
|
workspaceUri: workspacePath
|
||||||
|
@ -463,7 +429,6 @@ export class MainServer extends Server {
|
||||||
Object.keys(options).forEach((key) => {
|
Object.keys(options).forEach((key) => {
|
||||||
content = content.replace(`"{{${key}}}"`, `'${JSON.stringify(options[key])}'`);
|
content = content.replace(`"{{${key}}}"`, `'${JSON.stringify(options[key])}'`);
|
||||||
});
|
});
|
||||||
|
|
||||||
content = content.replace('{{WEBVIEW_ENDPOINT}}', webviewEndpoint);
|
content = content.replace('{{WEBVIEW_ENDPOINT}}', webviewEndpoint);
|
||||||
|
|
||||||
return { content, filePath };
|
return { content, filePath };
|
||||||
|
@ -473,83 +438,58 @@ export class MainServer extends Server {
|
||||||
if (request.headers.upgrade !== "websocket") {
|
if (request.headers.upgrade !== "websocket") {
|
||||||
throw new Error("HTTP/1.1 400 Bad Request");
|
throw new Error("HTTP/1.1 400 Bad Request");
|
||||||
}
|
}
|
||||||
|
const query = request.url ? url.parse(request.url, true).query : {};
|
||||||
const options = {
|
return new Protocol(<string>request.headers["sec-websocket-key"], socket, {
|
||||||
reconnectionToken: "",
|
reconnectionToken: <string>query.reconnectionToken || "",
|
||||||
reconnection: false,
|
reconnection: query.reconnection === "true",
|
||||||
skipWebSocketFrames: false,
|
skipWebSocketFrames: query.skipWebSocketFrames === "true",
|
||||||
};
|
});
|
||||||
|
|
||||||
if (request.url) {
|
|
||||||
const query = url.parse(request.url, true).query;
|
|
||||||
if (query.reconnectionToken) {
|
|
||||||
options.reconnectionToken = query.reconnectionToken as string;
|
|
||||||
}
|
|
||||||
if (query.reconnection === "true") {
|
|
||||||
options.reconnection = true;
|
|
||||||
}
|
|
||||||
if (query.skipWebSocketFrames === "true") {
|
|
||||||
options.skipWebSocketFrames = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return new Protocol(
|
|
||||||
request.headers["sec-websocket-key"] as string,
|
|
||||||
socket,
|
|
||||||
options,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async connect(message: ConnectionTypeRequest, protocol: Protocol): Promise<void> {
|
private async connect(message: ConnectionTypeRequest, protocol: Protocol): Promise<void> {
|
||||||
switch (message.desiredConnectionType) {
|
switch (message.desiredConnectionType) {
|
||||||
case ConnectionType.ExtensionHost:
|
case ConnectionType.ExtensionHost:
|
||||||
case ConnectionType.Management:
|
case ConnectionType.Management:
|
||||||
const debugPort = await this.getDebugPort();
|
|
||||||
const ok = message.desiredConnectionType === ConnectionType.ExtensionHost
|
|
||||||
? (debugPort ? { debugPort } : {})
|
|
||||||
: { type: "ok" };
|
|
||||||
|
|
||||||
if (!this.connections.has(message.desiredConnectionType)) {
|
if (!this.connections.has(message.desiredConnectionType)) {
|
||||||
this.connections.set(message.desiredConnectionType, new Map());
|
this.connections.set(message.desiredConnectionType, new Map());
|
||||||
}
|
}
|
||||||
|
|
||||||
const connections = this.connections.get(message.desiredConnectionType)!;
|
const connections = this.connections.get(message.desiredConnectionType)!;
|
||||||
const token = protocol.options.reconnectionToken;
|
|
||||||
|
|
||||||
|
const ok = async () => {
|
||||||
|
return message.desiredConnectionType === ConnectionType.ExtensionHost
|
||||||
|
? { debugPort: await this.getDebugPort() }
|
||||||
|
: { type: "ok" };
|
||||||
|
};
|
||||||
|
|
||||||
|
const token = protocol.options.reconnectionToken;
|
||||||
if (protocol.options.reconnection && connections.has(token)) {
|
if (protocol.options.reconnection && connections.has(token)) {
|
||||||
protocol.sendMessage(ok);
|
protocol.sendMessage(await ok());
|
||||||
const buffer = protocol.readEntireBuffer();
|
const buffer = protocol.readEntireBuffer();
|
||||||
protocol.dispose();
|
protocol.dispose();
|
||||||
return connections.get(token)!.reconnect(protocol.getSocket(), buffer);
|
return connections.get(token)!.reconnect(protocol.getSocket(), buffer);
|
||||||
}
|
} else if (protocol.options.reconnection || connections.has(token)) {
|
||||||
|
|
||||||
if (protocol.options.reconnection || connections.has(token)) {
|
|
||||||
throw new Error(protocol.options.reconnection
|
throw new Error(protocol.options.reconnection
|
||||||
? "Unrecognized reconnection token"
|
? "Unrecognized reconnection token"
|
||||||
: "Duplicate reconnection token"
|
: "Duplicate reconnection token"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
protocol.sendMessage(ok);
|
protocol.sendMessage(await ok());
|
||||||
|
|
||||||
let connection: Connection;
|
let connection: Connection;
|
||||||
if (message.desiredConnectionType === ConnectionType.Management) {
|
if (message.desiredConnectionType === ConnectionType.Management) {
|
||||||
connection = new ManagementConnection(protocol);
|
connection = new ManagementConnection(protocol);
|
||||||
this._onDidClientConnect.fire({
|
this._onDidClientConnect.fire({
|
||||||
protocol,
|
protocol, onDidClientDisconnect: connection.onClose,
|
||||||
onDidClientDisconnect: connection.onClose,
|
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
const buffer = protocol.readEntireBuffer();
|
const buffer = protocol.readEntireBuffer();
|
||||||
connection = new ExtensionHostConnection(
|
connection = new ExtensionHostConnection(
|
||||||
protocol, buffer,
|
protocol, buffer, this.services.get(ILogService) as ILogService,
|
||||||
this.services.get(ILogService) as ILogService,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
connections.set(protocol.options.reconnectionToken, connection);
|
connections.set(token, connection);
|
||||||
connection.onClose(() => {
|
connection.onClose(() => connections.delete(token));
|
||||||
connections.delete(protocol.options.reconnectionToken);
|
|
||||||
});
|
|
||||||
break;
|
break;
|
||||||
case ConnectionType.Tunnel: return protocol.tunnel();
|
case ConnectionType.Tunnel: return protocol.tunnel();
|
||||||
default: throw new Error("Unrecognized connection type");
|
default: throw new Error("Unrecognized connection type");
|
||||||
|
@ -557,14 +497,11 @@ export class MainServer extends Server {
|
||||||
}
|
}
|
||||||
|
|
||||||
private async initializeServices(args: ParsedArgs): Promise<void> {
|
private async initializeServices(args: ParsedArgs): Promise<void> {
|
||||||
|
const router = new StaticRouter((ctx: any) => ctx.clientId === "renderer");
|
||||||
const environmentService = new EnvironmentService(args, process.execPath);
|
const environmentService = new EnvironmentService(args, process.execPath);
|
||||||
const logService = new SpdLogService(RemoteExtensionLogFileName, environmentService.logsPath, getLogLevel(environmentService));
|
const logService = new SpdLogService(RemoteExtensionLogFileName, environmentService.logsPath, getLogLevel(environmentService));
|
||||||
this.ipc.registerChannel("loglevel", new LogLevelSetterChannel(logService));
|
this.ipc.registerChannel("loglevel", new LogLevelSetterChannel(logService));
|
||||||
|
|
||||||
const router = new StaticRouter((context: any) => {
|
|
||||||
return context.clientId === "renderer";
|
|
||||||
});
|
|
||||||
|
|
||||||
this.services.set(ILogService, logService);
|
this.services.set(ILogService, logService);
|
||||||
this.services.set(IEnvironmentService, environmentService);
|
this.services.set(IEnvironmentService, environmentService);
|
||||||
this.services.set(IConfigurationService, new SyncDescriptor(ConfigurationService, [environmentService.machineSettingsResource]));
|
this.services.set(IConfigurationService, new SyncDescriptor(ConfigurationService, [environmentService.machineSettingsResource]));
|
||||||
|
@ -594,11 +531,9 @@ export class MainServer extends Server {
|
||||||
this.services.set(IDialogService, new DialogChannelClient(this.ipc.getChannel("dialog", router)));
|
this.services.set(IDialogService, new DialogChannelClient(this.ipc.getChannel("dialog", router)));
|
||||||
this.services.set(IExtensionManagementService, new SyncDescriptor(ExtensionManagementService));
|
this.services.set(IExtensionManagementService, new SyncDescriptor(ExtensionManagementService));
|
||||||
|
|
||||||
const instantiationService = new InstantiationService(this.services);
|
await new Promise((resolve) => {
|
||||||
|
const instantiationService = new InstantiationService(this.services);
|
||||||
this.services.set(ILocalizationsService, instantiationService.createInstance(LocalizationsService));
|
this.services.set(ILocalizationsService, instantiationService.createInstance(LocalizationsService));
|
||||||
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
instantiationService.invokeFunction(() => {
|
instantiationService.invokeFunction(() => {
|
||||||
instantiationService.createInstance(LogsDataCleaner);
|
instantiationService.createInstance(LogsDataCleaner);
|
||||||
this.ipc.registerChannel(REMOTE_FILE_SYSTEM_CHANNEL_NAME, new FileProviderChannel(environmentService, logService));
|
this.ipc.registerChannel(REMOTE_FILE_SYSTEM_CHANNEL_NAME, new FileProviderChannel(environmentService, logService));
|
||||||
|
@ -612,9 +547,7 @@ export class MainServer extends Server {
|
||||||
this.ipc.registerChannel("gallery", galleryChannel);
|
this.ipc.registerChannel("gallery", galleryChannel);
|
||||||
const telemetryChannel = new TelemetryChannel(telemetryService);
|
const telemetryChannel = new TelemetryChannel(telemetryService);
|
||||||
this.ipc.registerChannel("telemetry", telemetryChannel);
|
this.ipc.registerChannel("telemetry", telemetryChannel);
|
||||||
// tslint:disable-next-line no-unused-expression
|
resolve(new ErrorTelemetry(telemetryService));
|
||||||
new ErrorTelemetry(telemetryService);
|
|
||||||
resolve();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -633,9 +566,6 @@ export class WebviewServer extends Server {
|
||||||
requestPath: string,
|
requestPath: string,
|
||||||
): Promise<Response> {
|
): Promise<Response> {
|
||||||
const webviewPath = path.join(this.rootPath, "out/vs/workbench/contrib/webview/browser/pre");
|
const webviewPath = path.join(this.rootPath, "out/vs/workbench/contrib/webview/browser/pre");
|
||||||
if (requestPath === "") {
|
return this.getResource(path.join(webviewPath, base, requestPath || "/index.html"));
|
||||||
requestPath = "/index.html";
|
|
||||||
}
|
|
||||||
return this.getResource(path.join(webviewPath, base, requestPath));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
268
src/tar.ts
268
src/tar.ts
|
@ -1,7 +1,7 @@
|
||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
import * as tarStream from "tar-stream";
|
import * as tarStream from "tar-stream";
|
||||||
import { promisify } from "util";
|
import * as util from "util";
|
||||||
|
|
||||||
import * as nls from "vs/nls";
|
import * as nls from "vs/nls";
|
||||||
import * as vszip from "vs/base/node/zip";
|
import * as vszip from "vs/base/node/zip";
|
||||||
|
@ -14,7 +14,6 @@ const vszipBuffer = vszip.buffer;
|
||||||
|
|
||||||
export interface IExtractOptions {
|
export interface IExtractOptions {
|
||||||
overwrite?: boolean;
|
overwrite?: boolean;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Source path within the TAR/ZIP archive. Only the files
|
* Source path within the TAR/ZIP archive. Only the files
|
||||||
* contained in this path will be extracted.
|
* contained in this path will be extracted.
|
||||||
|
@ -28,197 +27,134 @@ export interface IFile {
|
||||||
localPath?: string;
|
localPath?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
export const tar = async (tarPath: string, files: IFile[]): Promise<string> => {
|
||||||
* Override the standard VS Code behavior for zipping extensions to use the TAR
|
const pack = tarStream.pack();
|
||||||
* format instead of ZIP.
|
const chunks: Buffer[] = [];
|
||||||
*/
|
const ended = new Promise<Buffer>((resolve) => {
|
||||||
export const zip = (tarPath: string, files: IFile[]): Promise<string> => {
|
pack.on("end", () => resolve(Buffer.concat(chunks)));
|
||||||
return new Promise<string>((c, e): void => {
|
});
|
||||||
const pack = tarStream.pack();
|
pack.on("data", (chunk: Buffer) => chunks.push(chunk));
|
||||||
const chunks: Buffer[] = [];
|
for (let i = 0; i < files.length; i++) {
|
||||||
const ended = new Promise<Buffer>((res): void => {
|
const file = files[i];
|
||||||
pack.on("end", () => {
|
pack.entry({ name: file.path }, file.contents);
|
||||||
res(Buffer.concat(chunks));
|
}
|
||||||
});
|
pack.finalize();
|
||||||
});
|
await util.promisify(fs.writeFile)(tarPath, await ended);
|
||||||
pack.on("data", (chunk) => {
|
return tarPath;
|
||||||
chunks.push(chunk as Buffer);
|
};
|
||||||
});
|
|
||||||
for (let i = 0; i < files.length; i++) {
|
export const extract = async (archivePath: string, extractPath: string, options: IExtractOptions = {}, token: CancellationToken): Promise<void> => {
|
||||||
const file = files[i];
|
try {
|
||||||
pack.entry({
|
await extractTar(archivePath, extractPath, options, token);
|
||||||
name: file.path,
|
} catch (error) {
|
||||||
}, file.contents);
|
if (error.toString().includes("Invalid tar header")) {
|
||||||
|
await vszipExtract(archivePath, extractPath, options, token);
|
||||||
}
|
}
|
||||||
pack.finalize();
|
}
|
||||||
|
|
||||||
ended.then((buffer) => {
|
|
||||||
return promisify(fs.writeFile)(tarPath, buffer);
|
|
||||||
}).then(() => {
|
|
||||||
c(tarPath);
|
|
||||||
}).catch((ex) => {
|
|
||||||
e(ex);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* Override the standard VS Code behavior for extracting archives to first
|
|
||||||
* attempt to process the archive as a TAR and then fall back to the original
|
|
||||||
* implementation for processing ZIPs.
|
|
||||||
*/
|
|
||||||
export const extract = (archivePath: string, extractPath: string, options: IExtractOptions = {}, token: CancellationToken): Promise<void> => {
|
|
||||||
return new Promise<void>((c, e): void => {
|
|
||||||
extractTar(archivePath, extractPath, options, token).then(c).catch((ex) => {
|
|
||||||
if (!ex.toString().includes("Invalid tar header")) {
|
|
||||||
e(ex);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
vszipExtract(archivePath, extractPath, options, token).then(c).catch(e);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Override the standard VS Code behavior for buffering archives to first
|
|
||||||
* process the Buffer as a TAR and then fall back to the original
|
|
||||||
* implementation for processing ZIPs.
|
|
||||||
*/
|
|
||||||
export const buffer = (targetPath: string, filePath: string): Promise<Buffer> => {
|
export const buffer = (targetPath: string, filePath: string): Promise<Buffer> => {
|
||||||
return new Promise<Buffer>((c, e): void => {
|
return new Promise<Buffer>(async (resolve, reject) => {
|
||||||
let done: boolean = false;
|
|
||||||
extractAssets(targetPath, new RegExp(filePath), (assetPath: string, data: Buffer) => {
|
|
||||||
if (path.normalize(assetPath) === path.normalize(filePath)) {
|
|
||||||
done = true;
|
|
||||||
c(data);
|
|
||||||
}
|
|
||||||
}).then(() => {
|
|
||||||
if (!done) {
|
|
||||||
e("couldn't find asset " + filePath);
|
|
||||||
}
|
|
||||||
}).catch((ex) => {
|
|
||||||
if (!ex.toString().includes("Invalid tar header")) {
|
|
||||||
e(ex);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
vszipBuffer(targetPath, filePath).then(c).catch(e);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Override the standard VS Code behavior for extracting assets from archive
|
|
||||||
* Buffers to use the TAR format instead of ZIP.
|
|
||||||
*/
|
|
||||||
const extractAssets = (tarPath: string, match: RegExp, callback: (path: string, data: Buffer) => void): Promise<void> => {
|
|
||||||
return new Promise<void>(async (c, e): Promise<void> => {
|
|
||||||
try {
|
try {
|
||||||
const buffer = await promisify(fs.readFile)(tarPath);
|
let done: boolean = false;
|
||||||
const extractor = tarStream.extract();
|
await extractAssets(targetPath, new RegExp(filePath), (assetPath: string, data: Buffer) => {
|
||||||
extractor.once("error", e);
|
if (path.normalize(assetPath) === path.normalize(filePath)) {
|
||||||
extractor.on("entry", (header, stream, next) => {
|
done = true;
|
||||||
const name = header.name;
|
resolve(data);
|
||||||
if (match.test(name)) {
|
|
||||||
extractData(stream).then((data) => {
|
|
||||||
callback(name, data);
|
|
||||||
next();
|
|
||||||
}).catch(e);
|
|
||||||
stream.resume();
|
|
||||||
} else {
|
|
||||||
stream.on("end", () => {
|
|
||||||
next();
|
|
||||||
});
|
|
||||||
stream.resume();
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
extractor.on("finish", () => {
|
if (!done) {
|
||||||
c();
|
throw new Error("couldn't find asset " + filePath);
|
||||||
});
|
}
|
||||||
extractor.write(buffer);
|
} catch (error) {
|
||||||
extractor.end();
|
if (error.toString().includes("Invalid tar header")) {
|
||||||
} catch (ex) {
|
vszipBuffer(targetPath, filePath).then(resolve).catch(reject);
|
||||||
e(ex);
|
} else {
|
||||||
|
reject(error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const extractAssets = async (tarPath: string, match: RegExp, callback: (path: string, data: Buffer) => void): Promise<void> => {
|
||||||
|
const buffer = await util.promisify(fs.readFile)(tarPath);
|
||||||
|
return new Promise<void>(async (resolve, reject): Promise<void> => {
|
||||||
|
const extractor = tarStream.extract();
|
||||||
|
extractor.once("error", reject);
|
||||||
|
extractor.on("entry", async (header, stream, next) => {
|
||||||
|
const name = header.name;
|
||||||
|
if (match.test(name)) {
|
||||||
|
extractData(stream).then((data) => {
|
||||||
|
callback(name, data);
|
||||||
|
next();
|
||||||
|
}).catch(reject);
|
||||||
|
stream.resume();
|
||||||
|
} else {
|
||||||
|
stream.on("end", () => next());
|
||||||
|
stream.resume();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
extractor.on("finish", resolve);
|
||||||
|
extractor.write(buffer);
|
||||||
|
extractor.end();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
const extractData = (stream: NodeJS.ReadableStream): Promise<Buffer> => {
|
const extractData = (stream: NodeJS.ReadableStream): Promise<Buffer> => {
|
||||||
return new Promise<Buffer>((c, e): void => {
|
return new Promise((resolve, reject): void => {
|
||||||
const fileData: Buffer[] = [];
|
const fileData: Buffer[] = [];
|
||||||
stream.on("data", (data) => fileData.push(data));
|
stream.on("data", (data) => fileData.push(data));
|
||||||
stream.on("end", () => {
|
stream.on("end", () => resolve(Buffer.concat(fileData)));
|
||||||
const fd = Buffer.concat(fileData);
|
stream.on("error", reject);
|
||||||
c(fd);
|
|
||||||
});
|
|
||||||
stream.on("error", e);
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const extractTar = (tarPath: string, targetPath: string, options: IExtractOptions = {}, token: CancellationToken): Promise<void> => {
|
const extractTar = async (tarPath: string, targetPath: string, options: IExtractOptions = {}, token: CancellationToken): Promise<void> => {
|
||||||
return new Promise<void>(async (c, e): Promise<void> => {
|
const buffer = await util.promisify(fs.readFile)(tarPath);
|
||||||
try {
|
return new Promise<void>(async (resolve, reject): Promise<void> => {
|
||||||
const sourcePathRegex = new RegExp(options.sourcePath ? `^${options.sourcePath}` : "");
|
const sourcePathRegex = new RegExp(options.sourcePath ? `^${options.sourcePath}` : "");
|
||||||
const buffer = await promisify(fs.readFile)(tarPath);
|
const extractor = tarStream.extract();
|
||||||
const extractor = tarStream.extract();
|
extractor.once("error", reject);
|
||||||
extractor.once("error", e);
|
extractor.on("entry", async (header, stream, next) => {
|
||||||
extractor.on("entry", (header, stream, next) => {
|
const rawName = path.normalize(header.name);
|
||||||
const rawName = path.normalize(header.name);
|
|
||||||
|
|
||||||
const nextEntry = (): void => {
|
const nextEntry = (): void => {
|
||||||
stream.resume();
|
stream.resume();
|
||||||
next();
|
next();
|
||||||
};
|
};
|
||||||
|
|
||||||
if (token.isCancellationRequested) {
|
if (token.isCancellationRequested || !sourcePathRegex.test(rawName)) {
|
||||||
return nextEntry();
|
return nextEntry();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!sourcePathRegex.test(rawName)) {
|
const fileName = rawName.replace(sourcePathRegex, "");
|
||||||
return nextEntry();
|
const targetFileName = path.join(targetPath, fileName);
|
||||||
}
|
if (/\/$/.test(fileName)) {
|
||||||
|
return mkdirp(targetFileName).then(nextEntry);
|
||||||
|
}
|
||||||
|
|
||||||
const fileName = rawName.replace(sourcePathRegex, "");
|
const dirName = path.dirname(fileName);
|
||||||
const targetFileName = path.join(targetPath, fileName);
|
const targetDirName = path.join(targetPath, dirName);
|
||||||
if (/\/$/.test(fileName)) {
|
if (targetDirName.indexOf(targetPath) !== 0) {
|
||||||
stream.resume();
|
return reject(nls.localize("invalid file", "Error extracting {0}. Invalid file.", fileName));
|
||||||
mkdirp(targetFileName).then(() => {
|
}
|
||||||
next();
|
|
||||||
}, e);
|
|
||||||
|
|
||||||
return;
|
return mkdirp(targetDirName, undefined, token).then(() => {
|
||||||
}
|
const fstream = fs.createWriteStream(targetFileName, { mode: header.mode });
|
||||||
|
fstream.once("close", () => next());
|
||||||
const dirName = path.dirname(fileName);
|
fstream.once("error", reject);
|
||||||
const targetDirName = path.join(targetPath, dirName);
|
stream.pipe(fstream);
|
||||||
if (targetDirName.indexOf(targetPath) !== 0) {
|
stream.resume();
|
||||||
e(nls.localize("invalid file", "Error extracting {0}. Invalid file.", fileName));
|
|
||||||
|
|
||||||
return nextEntry();
|
|
||||||
}
|
|
||||||
|
|
||||||
return mkdirp(targetDirName, undefined, token).then(() => {
|
|
||||||
const fstream = fs.createWriteStream(targetFileName, { mode: header.mode });
|
|
||||||
fstream.once("close", () => {
|
|
||||||
next();
|
|
||||||
});
|
|
||||||
fstream.once("error", e);
|
|
||||||
stream.pipe(fstream);
|
|
||||||
stream.resume();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
extractor.once("finish", c);
|
});
|
||||||
extractor.write(buffer);
|
extractor.once("finish", resolve);
|
||||||
extractor.end();
|
extractor.write(buffer);
|
||||||
} catch (ex) {
|
extractor.end();
|
||||||
e(ex);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// Override original functionality so we can use tar instead of zip.
|
// Override original functionality so we can use tar instead of zip.
|
||||||
const target = vszip as typeof vszip;
|
const target = vszip as typeof vszip;
|
||||||
target.zip = zip;
|
target.zip = tar;
|
||||||
target.extract = extract;
|
target.extract = extract;
|
||||||
target.buffer = buffer;
|
target.buffer = buffer;
|
||||||
|
|
|
@ -4,8 +4,7 @@ module.exports = (remoteAuthority, https) => {
|
||||||
return {
|
return {
|
||||||
transformIncoming: (uri) => {
|
transformIncoming: (uri) => {
|
||||||
switch (uri.scheme) {
|
switch (uri.scheme) {
|
||||||
case "https": return { scheme: "file", path: uri.path };
|
case "https": case "http": return { scheme: "file", path: uri.path };
|
||||||
case "http": return { scheme: "file", path: uri.path };
|
|
||||||
case "file": return { scheme: "vscode-local", path: uri.path };
|
case "file": return { scheme: "vscode-local", path: uri.path };
|
||||||
default: return uri;
|
default: return uri;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1 @@
|
||||||
module.exports = (remoteAuthority) => {
|
module.exports = (remoteAuthority) => require("./uriTransformerHttp")(remoteAuthority, true);
|
||||||
return require("./uriTransformerHttp")(remoteAuthority, true);
|
|
||||||
};
|
|
||||||
|
|
38
src/util.ts
38
src/util.ts
|
@ -25,8 +25,6 @@ export const generateCertificate = async (): Promise<{ cert: string, certKey: st
|
||||||
util.promisify(fs.exists)(paths.certKey),
|
util.promisify(fs.exists)(paths.certKey),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
await mkdirp(tmpdir);
|
|
||||||
|
|
||||||
if (!exists[0] || !exists[1]) {
|
if (!exists[0] || !exists[1]) {
|
||||||
const pem = require.__$__nodeRequire(path.resolve(__dirname, "../node_modules/pem/lib/pem")) as typeof import("pem");
|
const pem = require.__$__nodeRequire(path.resolve(__dirname, "../node_modules/pem/lib/pem")) as typeof import("pem");
|
||||||
const certs = await new Promise<import("pem").CertificateCreationResult>((resolve, reject): void => {
|
const certs = await new Promise<import("pem").CertificateCreationResult>((resolve, reject): void => {
|
||||||
|
@ -37,6 +35,7 @@ export const generateCertificate = async (): Promise<{ cert: string, certKey: st
|
||||||
resolve(result);
|
resolve(result);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
await mkdirp(tmpdir);
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
util.promisify(fs.writeFile)(paths.cert, certs.certificate),
|
util.promisify(fs.writeFile)(paths.cert, certs.certificate),
|
||||||
util.promisify(fs.writeFile)(paths.certKey, certs.serviceKey),
|
util.promisify(fs.writeFile)(paths.certKey, certs.serviceKey),
|
||||||
|
@ -46,16 +45,10 @@ export const generateCertificate = async (): Promise<{ cert: string, certKey: st
|
||||||
return paths;
|
return paths;
|
||||||
};
|
};
|
||||||
|
|
||||||
let secure: boolean;
|
let transformer: string = "uriTransformerHttp";
|
||||||
export const useHttpsTransformer = (): void => {
|
export const useHttpsTransformer = (): string => transformer = "uriTransformerHttps";
|
||||||
secure = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const uriTransformerPath = (): string => {
|
export const uriTransformerPath = (): string => {
|
||||||
return getPathFromAmdModule(
|
return getPathFromAmdModule(require, `vs/server/src/${transformer}`);
|
||||||
require,
|
|
||||||
"vs/server/src/uriTransformerHttp" + (secure ? "s": ""),
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getUriTransformer = (remoteAuthority: string): URITransformer => {
|
export const getUriTransformer = (remoteAuthority: string): URITransformer => {
|
||||||
|
@ -87,25 +80,16 @@ export const isWsl = async (): Promise<boolean> => {
|
||||||
};
|
};
|
||||||
|
|
||||||
export const open = async (url: string): Promise<void> => {
|
export const open = async (url: string): Promise<void> => {
|
||||||
let command: string;
|
|
||||||
const args = <string[]>[];
|
const args = <string[]>[];
|
||||||
const options = <cp.SpawnOptions>{};
|
const options = <cp.SpawnOptions>{};
|
||||||
const platform = await isWsl() ? "wsl" : process.platform;
|
const platform = await isWsl() ? "wsl" : process.platform;
|
||||||
switch (platform) {
|
let command = platform === "darwin" ? "open" : "xdg-open";
|
||||||
case "darwin":
|
if (platform === "win32" || platform === "wsl") {
|
||||||
command = "open";
|
command = platform === "wsl" ? "cmd.exe" : "cmd";
|
||||||
break;
|
args.push("/c", "start", '""', "/b");
|
||||||
case "win32":
|
url = url.replace(/&/g, "^&");
|
||||||
case "wsl":
|
|
||||||
command = platform === "wsl" ? "cmd.exe" : "cmd";
|
|
||||||
args.push("/c", "start", '""', "/b");
|
|
||||||
url = url.replace(/&/g, "^&");
|
|
||||||
default:
|
|
||||||
command = "xdg-open";
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
args.push(url);
|
const proc = cp.spawn(command, [...args, url], options);
|
||||||
const proc = cp.spawn(command, args, options);
|
|
||||||
await new Promise((resolve, reject) => {
|
await new Promise((resolve, reject) => {
|
||||||
proc.on("error", reject);
|
proc.on("error", reject);
|
||||||
proc.on("close", (code) => {
|
proc.on("close", (code) => {
|
||||||
|
@ -125,8 +109,6 @@ export const unpackExecutables = async (): Promise<void> => {
|
||||||
const destination = path.join(tmpdir, path.basename(rgPath || ""));
|
const destination = path.join(tmpdir, path.basename(rgPath || ""));
|
||||||
if (rgPath && !(await util.promisify(fs.exists)(destination))) {
|
if (rgPath && !(await util.promisify(fs.exists)(destination))) {
|
||||||
await mkdirp(tmpdir);
|
await mkdirp(tmpdir);
|
||||||
// TODO: I'm not sure why but copyFile doesn't work in the Docker build.
|
|
||||||
// await util.promisify(fs.copyFile)(rgPath, destination);
|
|
||||||
await util.promisify(fs.writeFile)(destination, await util.promisify(fs.readFile)(rgPath));
|
await util.promisify(fs.writeFile)(destination, await util.promisify(fs.readFile)(rgPath));
|
||||||
await util.promisify(fs.chmod)(destination, "755");
|
await util.promisify(fs.chmod)(destination, "755");
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue