2019-06-23 21:58:00 +02:00
|
|
|
const fs = require("fs");
|
|
|
|
const path = require("path");
|
|
|
|
const tar = require("tar-stream");
|
2019-01-06 22:41:49 +01:00
|
|
|
const tarPack = tar.pack();
|
2019-06-23 21:58:00 +02:00
|
|
|
const ZstdCodec = require("zstd-codec").ZstdCodec;
|
|
|
|
const util = require("util");
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-01-07 12:18:59 +01:00
|
|
|
const COMPRESSION_LEVEL = 5;
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-06-23 21:58:00 +02:00
|
|
|
const SUPPORTED_FORMATS = [
|
|
|
|
[/\.(jpg|png|gif|svg|bmp)$/i, "images"],
|
|
|
|
[/\.(mp4|m4v|mkv|webm|flv|f4v|ogv)$/i, "videos"],
|
|
|
|
[/\.(mp3|m4a|aac|wav|flac|ogg|opus)$/i, "audios"]
|
|
|
|
];
|
|
|
|
|
|
|
|
function getMediaType(fileName) {
|
|
|
|
const res = SUPPORTED_FORMATS.reduce((ret, testpair) => {
|
|
|
|
const [regex, mediaType] = testpair;
|
|
|
|
|
|
|
|
return regex.test(ret) ? mediaType : ret;
|
|
|
|
}, fileName);
|
|
|
|
|
|
|
|
return res !== fileName ? res : "others";
|
|
|
|
}
|
|
|
|
|
2019-02-04 06:30:02 +01:00
|
|
|
function mkdirSyncRecursive(dir) {
|
2019-02-04 06:47:15 +01:00
|
|
|
let segments = dir.split(path.sep);
|
2019-02-04 06:30:02 +01:00
|
|
|
|
2019-02-04 06:47:15 +01:00
|
|
|
for (let i = 1; i <= segments.length; i++) {
|
2019-06-23 21:58:00 +02:00
|
|
|
let segment = segments.slice(0, i).join("/");
|
2019-02-04 06:30:02 +01:00
|
|
|
if (segment.length > 0 && !fs.existsSync(segment)) {
|
2019-02-04 06:47:15 +01:00
|
|
|
fs.mkdirSync(segment);
|
2019-02-04 06:30:02 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-06 22:41:49 +01:00
|
|
|
// async readdir
|
|
|
|
const readdir = async (path, options) => {
|
2019-06-23 21:58:00 +02:00
|
|
|
return new Promise(resolve => {
|
2019-01-06 22:41:49 +01:00
|
|
|
fs.readdir(path, options, (err, files) => {
|
2019-06-23 21:58:00 +02:00
|
|
|
if (err) {
|
2019-01-06 22:41:49 +01:00
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
resolve(files);
|
2019-06-23 21:58:00 +02:00
|
|
|
});
|
|
|
|
});
|
2019-01-06 22:41:49 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
// async readFile
|
|
|
|
const readFile = util.promisify(fs.readFile);
|
|
|
|
|
|
|
|
function generateFirstEntry(options) {
|
2019-06-23 21:58:00 +02:00
|
|
|
return "{}";
|
2019-01-06 22:41:49 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
function writeFirstEntry(options, tarPack) {
|
2019-06-23 21:58:00 +02:00
|
|
|
tarPack.entry({ name: "." }, generateFirstEntry(options), err => {
|
|
|
|
if (err) {
|
2019-01-06 22:41:49 +01:00
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
function getFileReadStream(options) {
|
2019-06-23 21:58:00 +02:00
|
|
|
const fileName = options.fileName || "package.lbry";
|
2019-01-06 22:41:49 +01:00
|
|
|
return fs.createReadStream(fileName);
|
|
|
|
}
|
|
|
|
|
|
|
|
function getFileWriteStream(options) {
|
2019-06-23 21:58:00 +02:00
|
|
|
const fileName = options.fileName || "package.lbry";
|
2019-01-06 22:41:49 +01:00
|
|
|
return fs.createWriteStream(fileName);
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getZstd() {
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
try {
|
|
|
|
ZstdCodec.run(zstd => {
|
|
|
|
const Streaming = new zstd.Streaming();
|
|
|
|
resolve(Streaming);
|
|
|
|
});
|
2019-06-23 21:58:00 +02:00
|
|
|
} catch (e) {
|
2019-01-06 22:41:49 +01:00
|
|
|
reject(e);
|
|
|
|
}
|
2019-06-23 21:58:00 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
async function catagoriseFilesIndex(runCommand, root, index) {
|
|
|
|
const files = { images: "", audios: "", videos: "", others: "" };
|
|
|
|
|
|
|
|
for (let file of index) {
|
|
|
|
const mediaType = getMediaType(path.basename(file));
|
|
|
|
files[mediaType] += `"${file}",`;
|
|
|
|
}
|
|
|
|
|
|
|
|
let contents = "";
|
|
|
|
for (let mediaType in files) {
|
|
|
|
contents += `${mediaType}=[${files[mediaType]}];\n`;
|
|
|
|
}
|
|
|
|
|
|
|
|
const bufferedContents = Buffer.from(contents);
|
|
|
|
const relativeFilePath = path.join(root, "files.js");
|
|
|
|
await runCommand(relativeFilePath, bufferedContents);
|
|
|
|
}
|
|
|
|
|
|
|
|
async function processTemplate(runCommand, root, userDefinedTemplate) {
|
|
|
|
const template = userDefinedTemplate
|
|
|
|
? userDefinedTemplate
|
|
|
|
: require("./templates").default;
|
|
|
|
|
|
|
|
for (const resource of template.resources) {
|
|
|
|
if (resource.type === "static") {
|
|
|
|
const bufferedContents = Buffer.from(resource.contents);
|
|
|
|
const relativeFilePath = path.join(root, resource.name);
|
|
|
|
await runCommand(relativeFilePath, bufferedContents);
|
|
|
|
}
|
|
|
|
}
|
2019-01-06 22:41:49 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
async function walkAndRun(runCommand, dir, root) {
|
|
|
|
let files = await readdir(dir);
|
|
|
|
|
2019-06-23 21:58:00 +02:00
|
|
|
for (let file of files) {
|
2019-01-06 22:41:49 +01:00
|
|
|
const currentPath = path.join(dir, file);
|
|
|
|
|
|
|
|
if (fs.statSync(currentPath).isDirectory()) {
|
2019-01-07 11:52:07 +01:00
|
|
|
await walkAndRun(runCommand, currentPath);
|
2019-01-06 22:41:49 +01:00
|
|
|
} else {
|
2019-06-23 21:58:00 +02:00
|
|
|
const fileContents = await readFile(path.normalize(currentPath));
|
|
|
|
await runCommand(currentPath, fileContents);
|
2019-01-06 22:41:49 +01:00
|
|
|
}
|
|
|
|
}
|
2019-06-23 21:58:00 +02:00
|
|
|
}
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-01-07 11:52:07 +01:00
|
|
|
async function writeStream(stream, data) {
|
2019-06-23 21:58:00 +02:00
|
|
|
return new Promise(resolve => {
|
2019-01-07 11:52:07 +01:00
|
|
|
stream.write(data);
|
2019-06-23 21:58:00 +02:00
|
|
|
stream.end(resolve);
|
2019-01-07 11:52:07 +01:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-01-06 22:41:49 +01:00
|
|
|
async function packDirectory(directory, options = {}) {
|
2019-06-23 21:58:00 +02:00
|
|
|
let filesIndex = [];
|
|
|
|
|
2019-01-07 11:52:07 +01:00
|
|
|
const zstd = await getZstd();
|
2019-01-06 22:41:49 +01:00
|
|
|
const packRoot = directory;
|
|
|
|
const fileWriteStream = getFileWriteStream(options);
|
|
|
|
|
|
|
|
tarPack.pipe(fileWriteStream);
|
2019-01-07 12:18:59 +01:00
|
|
|
writeFirstEntry(options, tarPack);
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-06-23 21:58:00 +02:00
|
|
|
const makeChunkIterator = contents => {
|
|
|
|
const chunkSize = 2048;
|
|
|
|
let position = 0;
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-06-23 21:58:00 +02:00
|
|
|
const iterator = {
|
|
|
|
next: function() {
|
|
|
|
const endIndex = position + chunkSize;
|
|
|
|
const result = {
|
|
|
|
value: contents.slice(position, endIndex),
|
|
|
|
done: position >= contents.length
|
|
|
|
};
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-06-23 21:58:00 +02:00
|
|
|
position = endIndex;
|
|
|
|
return result;
|
|
|
|
},
|
|
|
|
[Symbol.iterator]: function() {
|
|
|
|
return this;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
return iterator;
|
|
|
|
};
|
|
|
|
|
|
|
|
const createEntry = async (file, contents) => {
|
|
|
|
const chunkIterator = makeChunkIterator(contents);
|
|
|
|
contents = zstd.compressChunks(
|
|
|
|
chunkIterator,
|
|
|
|
contents.length,
|
|
|
|
COMPRESSION_LEVEL
|
|
|
|
);
|
|
|
|
let name = path.relative(packRoot, file).replace("\\", "/");
|
|
|
|
if (/^\.\//.test(name)) {
|
2019-01-06 22:41:49 +01:00
|
|
|
name = name.slice(2);
|
|
|
|
}
|
2019-06-23 21:58:00 +02:00
|
|
|
const entry = tarPack.entry({ name, size: contents.length }, err => {
|
|
|
|
if (err) {
|
2019-01-06 22:41:49 +01:00
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
});
|
2019-01-07 11:52:07 +01:00
|
|
|
await writeStream(entry, contents);
|
|
|
|
entry.end();
|
2019-06-23 21:58:00 +02:00
|
|
|
filesIndex.push(name);
|
|
|
|
};
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-06-23 21:58:00 +02:00
|
|
|
await walkAndRun(createEntry, directory, packRoot);
|
|
|
|
if (options.useTemplate === true) {
|
|
|
|
await processTemplate(createEntry, packRoot);
|
|
|
|
await catagoriseFilesIndex(createEntry, packRoot, filesIndex);
|
|
|
|
}
|
|
|
|
tarPack.finalize();
|
|
|
|
}
|
2019-01-07 11:52:07 +01:00
|
|
|
|
2019-06-23 21:58:00 +02:00
|
|
|
function strToBuffer(string) {
|
2019-01-07 11:52:07 +01:00
|
|
|
let arrayBuffer = new ArrayBuffer(string.length * 1);
|
|
|
|
let newUint = new Uint8Array(arrayBuffer);
|
|
|
|
newUint.forEach((_, i) => {
|
|
|
|
newUint[i] = string.charCodeAt(i);
|
|
|
|
});
|
|
|
|
return newUint;
|
|
|
|
}
|
|
|
|
|
|
|
|
function streamToBuffer(stream) {
|
2019-06-23 21:58:00 +02:00
|
|
|
const chunks = [];
|
2019-01-07 11:52:07 +01:00
|
|
|
return new Promise((resolve, reject) => {
|
2019-06-23 21:58:00 +02:00
|
|
|
stream.on("data", chunk => chunks.push(chunk));
|
|
|
|
stream.on("error", reject);
|
|
|
|
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
|
|
|
});
|
2019-01-07 11:52:07 +01:00
|
|
|
}
|
|
|
|
|
2019-01-06 22:41:49 +01:00
|
|
|
async function unpackDirectory(directory, options = {}) {
|
2019-06-23 21:58:00 +02:00
|
|
|
return new Promise(async resolve => {
|
|
|
|
if (!fs.existsSync(directory)) {
|
2019-02-04 06:30:02 +01:00
|
|
|
mkdirSyncRecursive(directory);
|
2019-01-11 06:58:00 +01:00
|
|
|
}
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-01-11 06:58:00 +01:00
|
|
|
const fileReadStream = getFileReadStream(options);
|
|
|
|
const zstd = await getZstd();
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-01-11 06:58:00 +01:00
|
|
|
const extract = tar.extract();
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-06-23 21:58:00 +02:00
|
|
|
extract.on("entry", async (header, fileStream, next) => {
|
2019-01-11 06:58:00 +01:00
|
|
|
let contents = await streamToBuffer(fileStream);
|
|
|
|
contents = new Uint8Array(contents);
|
|
|
|
|
2019-01-17 19:43:09 +01:00
|
|
|
// Must be chunked to avoid issues with fixed memory limits.
|
|
|
|
const chunkIterator = (() => {
|
|
|
|
const chunkSize = 2048;
|
|
|
|
let position = 0;
|
|
|
|
|
|
|
|
const iterator = {
|
|
|
|
next: function() {
|
|
|
|
const endIndex = position + chunkSize;
|
|
|
|
const result = {
|
|
|
|
value: contents.slice(position, endIndex),
|
2019-06-23 21:58:00 +02:00
|
|
|
done: position >= contents.length
|
2019-01-17 19:43:09 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
position = endIndex;
|
|
|
|
return result;
|
|
|
|
},
|
2019-06-23 21:58:00 +02:00
|
|
|
[Symbol.iterator]: function() {
|
|
|
|
return this;
|
|
|
|
}
|
2019-01-17 19:43:09 +01:00
|
|
|
};
|
|
|
|
return iterator;
|
|
|
|
})();
|
|
|
|
|
|
|
|
contents = zstd.decompressChunks(chunkIterator);
|
2019-01-11 06:58:00 +01:00
|
|
|
|
2019-06-23 21:58:00 +02:00
|
|
|
if (!/^\./.test(header.name)) {
|
2019-01-11 06:58:00 +01:00
|
|
|
const writePath = path.join(directory, header.name);
|
2019-01-07 11:52:07 +01:00
|
|
|
|
2019-01-11 06:58:00 +01:00
|
|
|
try {
|
2019-02-04 06:30:02 +01:00
|
|
|
mkdirSyncRecursive(path.dirname(writePath));
|
2019-01-11 16:39:37 +01:00
|
|
|
} catch (e) {
|
2019-02-04 06:47:15 +01:00
|
|
|
console.log(e);
|
2019-01-11 16:39:37 +01:00
|
|
|
// Directory exists
|
2019-01-11 06:58:00 +01:00
|
|
|
}
|
2019-01-11 16:39:37 +01:00
|
|
|
const fileWriteStream = fs.createWriteStream(writePath);
|
2019-01-11 06:58:00 +01:00
|
|
|
fileWriteStream.write(contents);
|
|
|
|
fileWriteStream.end();
|
|
|
|
next();
|
|
|
|
} else {
|
|
|
|
fileStream.resume();
|
|
|
|
next();
|
2019-01-07 12:18:59 +01:00
|
|
|
}
|
2019-01-11 06:58:00 +01:00
|
|
|
});
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-06-23 21:58:00 +02:00
|
|
|
extract.on("finish", () => {
|
2019-01-11 06:58:00 +01:00
|
|
|
resolve(true);
|
|
|
|
});
|
2019-01-06 22:41:49 +01:00
|
|
|
|
2019-01-11 06:58:00 +01:00
|
|
|
fileReadStream.pipe(extract);
|
|
|
|
});
|
2019-01-06 22:41:49 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
// DO NOT USE until converted to use `compressChunks`
|
|
|
|
async function packPaths(root, pathsArray, options = {}) {
|
|
|
|
const fileWriteStream = getFileWriteStream(options);
|
|
|
|
const zstd = await getZstd();
|
|
|
|
|
|
|
|
tarPack.pipe(fileWriteStream);
|
|
|
|
writeFirstEntry(options, tarPack);
|
|
|
|
|
|
|
|
for(let name of pathsArray) {
|
|
|
|
let contents = await readFile(path.join(root, name));
|
|
|
|
|
|
|
|
contents = new Uint8Array(contents);
|
|
|
|
contents = zstd.compress(contents, COMPRESSION_LEVEL);
|
|
|
|
|
|
|
|
if(/^\.\//.test(name)) {
|
|
|
|
name = name.slice(2);
|
|
|
|
}
|
|
|
|
|
|
|
|
const entry = tarPack.entry({ name, size: contents.length }, (err) => {
|
|
|
|
if(err) {
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
entry.end(contents);
|
|
|
|
}
|
|
|
|
tarPack.finalize();
|
|
|
|
}
|
|
|
|
*/
|
|
|
|
|
|
|
|
module.exports = {
|
|
|
|
packDirectory,
|
2019-06-23 21:58:00 +02:00
|
|
|
unpackDirectory
|
|
|
|
};
|