lbry-unity/LBRY 3D Viewer/Assets/Editor/LBRY/lbry-format~/index.js

232 lines
5.3 KiB
JavaScript
Raw Normal View History

2019-01-06 23:48:29 +01:00
const fs = require('fs');
const path = require('path');
const tar = require('tar-stream');
const tarPack = tar.pack();
const ZstdCodec = require('zstd-codec').ZstdCodec;
const util = require('util');
const COMPRESSION_LEVEL = 5;
// async readdir
const readdir = async (path, options) => {
return new Promise((resolve) => {
fs.readdir(path, options, (err, files) => {
if(err) {
throw err;
}
resolve(files);
})
})
};
// async readFile
const readFile = util.promisify(fs.readFile);
function generateFirstEntry(options) {
return '{}';
}
function writeFirstEntry(options, tarPack) {
tarPack.entry({ name: '.' }, generateFirstEntry(options), (err) => {
if(err) {
throw err;
}
});
}
function getFileReadStream(options) {
const fileName = options.fileName || 'package.lbry';
return fs.createReadStream(fileName);
}
function getFileWriteStream(options) {
const fileName = options.fileName || 'package.lbry';
return fs.createWriteStream(fileName);
}
async function getZstd() {
return new Promise((resolve, reject) => {
try {
ZstdCodec.run(zstd => {
const Streaming = new zstd.Streaming();
resolve(Streaming);
});
} catch(e) {
reject(e);
}
})
}
async function walkAndRun(runCommand, dir, root) {
let files = await readdir(dir);
for(let file of files) {
const currentPath = path.join(dir, file);
if (fs.statSync(currentPath).isDirectory()) {
2019-01-07 12:20:56 +01:00
await walkAndRun(runCommand, currentPath);
2019-01-06 23:48:29 +01:00
} else {
2019-01-07 12:20:56 +01:00
await runCommand(currentPath);
2019-01-06 23:48:29 +01:00
}
}
};
2019-01-07 12:20:56 +01:00
async function writeStream(stream, data) {
return new Promise((resolve) => {
stream.write(data);
stream.end(resolve)
});
}
2019-01-06 23:48:29 +01:00
async function packDirectory(directory, options = {}) {
2019-01-07 12:20:56 +01:00
const zstd = await getZstd();
2019-01-06 23:48:29 +01:00
const packRoot = directory;
const fileWriteStream = getFileWriteStream(options);
tarPack.pipe(fileWriteStream);
writeFirstEntry(options, tarPack);
2019-01-07 12:20:56 +01:00
await walkAndRun(async (file) => {
contents = await readFile(path.normalize(file));
2019-01-06 23:48:29 +01:00
// Must be chunked to avoid issues with fixed memory limits.
const chunkIterator = (() => {
2019-01-07 12:20:56 +01:00
const chunkSize = 2048;
2019-01-06 23:48:29 +01:00
let position = 0;
const iterator = {
next: function() {
const endIndex = position + chunkSize;
const result = {
value: contents.slice(position, endIndex),
2019-01-07 12:20:56 +01:00
done: position >= contents.length,
2019-01-06 23:48:29 +01:00
};
position = endIndex;
return result;
},
[Symbol.iterator]: function() { return this }
};
return iterator;
})();
contents = zstd.compressChunks(chunkIterator, contents.length, COMPRESSION_LEVEL);
2019-01-11 19:16:02 +01:00
let name = path.relative(packRoot, file).replace('\\', '/');
2019-01-06 23:48:29 +01:00
if(/^\.\//.test(name)) {
name = name.slice(2);
}
const entry = tarPack.entry({ name, size: contents.length }, (err) => {
if(err) {
throw err;
}
});
2019-01-07 12:20:56 +01:00
await writeStream(entry, contents);
2019-01-11 19:16:02 +01:00
2019-01-07 12:20:56 +01:00
entry.end();
2019-01-06 23:48:29 +01:00
}, directory, packRoot);
2019-01-07 12:20:56 +01:00
tarPack.finalize();
2019-01-06 23:48:29 +01:00
};
2019-01-07 12:20:56 +01:00
function strToBuffer (string) {
let arrayBuffer = new ArrayBuffer(string.length * 1);
let newUint = new Uint8Array(arrayBuffer);
newUint.forEach((_, i) => {
newUint[i] = string.charCodeAt(i);
});
return newUint;
}
function streamToBuffer(stream) {
const chunks = []
return new Promise((resolve, reject) => {
stream.on('data', chunk => chunks.push(chunk))
stream.on('error', reject)
stream.on('end', () => resolve(Buffer.concat(chunks)))
})
}
2019-01-06 23:48:29 +01:00
async function unpackDirectory(directory, options = {}) {
2019-01-11 19:16:02 +01:00
return new Promise(async (resolve) => {
if(!fs.existsSync(directory)) {
fs.mkdirSync(directory);
}
2019-01-06 23:48:29 +01:00
2019-01-11 19:16:02 +01:00
const fileReadStream = getFileReadStream(options);
const zstd = await getZstd();
2019-01-06 23:48:29 +01:00
2019-01-11 19:16:02 +01:00
const extract = tar.extract();
2019-01-06 23:48:29 +01:00
2019-01-11 19:16:02 +01:00
extract.on('entry', async (header, fileStream, next) => {
let contents = await streamToBuffer(fileStream);
contents = new Uint8Array(contents);
contents = zstd.decompress(contents);
2019-01-07 12:20:56 +01:00
2019-01-11 19:16:02 +01:00
if(!/^\./.test(header.name)) {
const writePath = path.join(directory, header.name);
2019-01-06 23:48:29 +01:00
2019-01-11 19:16:02 +01:00
try {
fs.mkdirSync(path.dirname(writePath), { recursive: true });
} catch (e) {
// Directory exists
}
const fileWriteStream = fs.createWriteStream(writePath);
fileWriteStream.write(contents);
fileWriteStream.end();
next();
} else {
fileStream.resume();
next();
2019-01-07 12:20:56 +01:00
}
2019-01-11 19:16:02 +01:00
});
2019-01-06 23:48:29 +01:00
2019-01-11 19:16:02 +01:00
extract.on('finish', () => {
resolve(true);
});
2019-01-06 23:48:29 +01:00
2019-01-11 19:16:02 +01:00
fileReadStream.pipe(extract);
});
2019-01-06 23:48:29 +01:00
}
/*
// DO NOT USE until converted to use `compressChunks`
async function packPaths(root, pathsArray, options = {}) {
const fileWriteStream = getFileWriteStream(options);
const zstd = await getZstd();
tarPack.pipe(fileWriteStream);
writeFirstEntry(options, tarPack);
for(let name of pathsArray) {
let contents = await readFile(path.join(root, name));
contents = new Uint8Array(contents);
contents = zstd.compress(contents, COMPRESSION_LEVEL);
if(/^\.\//.test(name)) {
name = name.slice(2);
}
const entry = tarPack.entry({ name, size: contents.length }, (err) => {
if(err) {
throw err;
}
});
entry.end(contents);
}
tarPack.finalize();
}
*/
module.exports = {
packDirectory,
unpackDirectory,
}