code-server-2/src/tar.ts

166 lines
5.1 KiB
TypeScript
Raw Normal View History

import * as fs from "fs";
import * as path from "path";
import * as tarStream from "tar-stream";
2019-07-19 22:43:54 +00:00
import * as util from "util";
2019-07-11 22:12:52 +00:00
import * as nls from "vs/nls";
import * as vszip from "vs/base/node/zip";
import { CancellationToken } from "vs/base/common/cancellation";
import { mkdirp } from "vs/base/node/pfs";
2019-07-02 21:55:54 +00:00
// We will be overriding these, so keep a reference to the original.
const vszipExtract = vszip.extract;
const vszipBuffer = vszip.buffer;
export interface IExtractOptions {
overwrite?: boolean;
/**
* Source path within the TAR/ZIP archive. Only the files
* contained in this path will be extracted.
*/
sourcePath?: string;
}
export interface IFile {
path: string;
contents?: Buffer | string;
localPath?: string;
}
2019-07-19 22:43:54 +00:00
export const tar = async (tarPath: string, files: IFile[]): Promise<string> => {
const pack = tarStream.pack();
const chunks: Buffer[] = [];
const ended = new Promise<Buffer>((resolve) => {
pack.on("end", () => resolve(Buffer.concat(chunks)));
});
2019-07-19 22:43:54 +00:00
pack.on("data", (chunk: Buffer) => chunks.push(chunk));
for (let i = 0; i < files.length; i++) {
const file = files[i];
pack.entry({ name: file.path }, file.contents);
}
pack.finalize();
await util.promisify(fs.writeFile)(tarPath, await ended);
return tarPath;
};
2019-07-19 22:43:54 +00:00
export const extract = async (archivePath: string, extractPath: string, options: IExtractOptions = {}, token: CancellationToken): Promise<void> => {
try {
await extractTar(archivePath, extractPath, options, token);
} catch (error) {
if (error.toString().includes("Invalid tar header")) {
await vszipExtract(archivePath, extractPath, options, token);
}
}
};
export const buffer = (targetPath: string, filePath: string): Promise<Buffer> => {
2019-07-19 22:43:54 +00:00
return new Promise<Buffer>(async (resolve, reject) => {
try {
let done: boolean = false;
await extractAssets(targetPath, new RegExp(filePath), (assetPath: string, data: Buffer) => {
if (path.normalize(assetPath) === path.normalize(filePath)) {
done = true;
resolve(data);
}
});
if (!done) {
2019-07-19 22:43:54 +00:00
throw new Error("couldn't find asset " + filePath);
}
2019-07-19 22:43:54 +00:00
} catch (error) {
if (error.toString().includes("Invalid tar header")) {
vszipBuffer(targetPath, filePath).then(resolve).catch(reject);
} else {
reject(error);
}
2019-07-19 22:43:54 +00:00
}
});
};
2019-07-19 22:43:54 +00:00
const extractAssets = async (tarPath: string, match: RegExp, callback: (path: string, data: Buffer) => void): Promise<void> => {
const buffer = await util.promisify(fs.readFile)(tarPath);
return new Promise<void>(async (resolve, reject): Promise<void> => {
const extractor = tarStream.extract();
extractor.once("error", reject);
extractor.on("entry", async (header, stream, next) => {
const name = header.name;
if (match.test(name)) {
extractData(stream).then((data) => {
callback(name, data);
next();
}).catch(reject);
stream.resume();
} else {
stream.on("end", () => next());
stream.resume();
}
});
extractor.on("finish", resolve);
extractor.write(buffer);
extractor.end();
});
};
const extractData = (stream: NodeJS.ReadableStream): Promise<Buffer> => {
2019-07-19 22:43:54 +00:00
return new Promise((resolve, reject): void => {
const fileData: Buffer[] = [];
stream.on("data", (data) => fileData.push(data));
2019-07-19 22:43:54 +00:00
stream.on("end", () => resolve(Buffer.concat(fileData)));
stream.on("error", reject);
});
};
2019-07-19 22:43:54 +00:00
const extractTar = async (tarPath: string, targetPath: string, options: IExtractOptions = {}, token: CancellationToken): Promise<void> => {
const buffer = await util.promisify(fs.readFile)(tarPath);
return new Promise<void>(async (resolve, reject): Promise<void> => {
const sourcePathRegex = new RegExp(options.sourcePath ? `^${options.sourcePath}` : "");
const extractor = tarStream.extract();
extractor.once("error", reject);
extractor.on("entry", async (header, stream, next) => {
const rawName = path.normalize(header.name);
const nextEntry = (): void => {
stream.resume();
next();
};
if (token.isCancellationRequested || !sourcePathRegex.test(rawName)) {
return nextEntry();
}
2019-07-19 22:43:54 +00:00
const fileName = rawName.replace(sourcePathRegex, "");
const targetFileName = path.join(targetPath, fileName);
if (/\/$/.test(fileName)) {
return mkdirp(targetFileName).then(nextEntry);
}
2019-07-19 22:43:54 +00:00
const dirName = path.dirname(fileName);
const targetDirName = path.join(targetPath, dirName);
if (targetDirName.indexOf(targetPath) !== 0) {
return reject(nls.localize("invalid file", "Error extracting {0}. Invalid file.", fileName));
}
2019-07-19 22:43:54 +00:00
return mkdirp(targetDirName, undefined, token).then(() => {
const fstream = fs.createWriteStream(targetFileName, { mode: header.mode });
fstream.once("close", () => next());
fstream.once("error", reject);
stream.pipe(fstream);
stream.resume();
});
2019-07-19 22:43:54 +00:00
});
extractor.once("finish", resolve);
extractor.write(buffer);
extractor.end();
});
};
2019-07-02 21:55:54 +00:00
2019-07-31 20:29:11 +00:00
/**
* Override original functionality so we can use extensions that are in a tar in
* addition to zips.
*/
export const enableExtensionTars = (): void => {
const target = vszip as typeof vszip;
target.zip = tar;
target.extract = extract;
target.buffer = buffer;
};