add option to pack template
This commit is contained in:
parent
9fd036d494
commit
896b3bae3c
3 changed files with 212 additions and 116 deletions
175
index.js
175
index.js
|
@ -1,17 +1,33 @@
|
||||||
const fs = require('fs');
|
const fs = require("fs");
|
||||||
const path = require('path');
|
const path = require("path");
|
||||||
const tar = require('tar-stream');
|
const tar = require("tar-stream");
|
||||||
const tarPack = tar.pack();
|
const tarPack = tar.pack();
|
||||||
const ZstdCodec = require('zstd-codec').ZstdCodec;
|
const ZstdCodec = require("zstd-codec").ZstdCodec;
|
||||||
const util = require('util');
|
const util = require("util");
|
||||||
|
|
||||||
const COMPRESSION_LEVEL = 5;
|
const COMPRESSION_LEVEL = 5;
|
||||||
|
|
||||||
|
const SUPPORTED_FORMATS = [
|
||||||
|
[/\.(jpg|png|gif|svg|bmp)$/i, "images"],
|
||||||
|
[/\.(mp4|m4v|mkv|webm|flv|f4v|ogv)$/i, "videos"],
|
||||||
|
[/\.(mp3|m4a|aac|wav|flac|ogg|opus)$/i, "audios"]
|
||||||
|
];
|
||||||
|
|
||||||
|
function getMediaType(fileName) {
|
||||||
|
const res = SUPPORTED_FORMATS.reduce((ret, testpair) => {
|
||||||
|
const [regex, mediaType] = testpair;
|
||||||
|
|
||||||
|
return regex.test(ret) ? mediaType : ret;
|
||||||
|
}, fileName);
|
||||||
|
|
||||||
|
return res !== fileName ? res : "others";
|
||||||
|
}
|
||||||
|
|
||||||
function mkdirSyncRecursive(dir) {
|
function mkdirSyncRecursive(dir) {
|
||||||
let segments = dir.split(path.sep);
|
let segments = dir.split(path.sep);
|
||||||
|
|
||||||
for (let i = 1; i <= segments.length; i++) {
|
for (let i = 1; i <= segments.length; i++) {
|
||||||
let segment = segments.slice(0, i).join('/');
|
let segment = segments.slice(0, i).join("/");
|
||||||
if (segment.length > 0 && !fs.existsSync(segment)) {
|
if (segment.length > 0 && !fs.existsSync(segment)) {
|
||||||
fs.mkdirSync(segment);
|
fs.mkdirSync(segment);
|
||||||
}
|
}
|
||||||
|
@ -20,38 +36,38 @@ function mkdirSyncRecursive(dir) {
|
||||||
|
|
||||||
// async readdir
|
// async readdir
|
||||||
const readdir = async (path, options) => {
|
const readdir = async (path, options) => {
|
||||||
return new Promise((resolve) => {
|
return new Promise(resolve => {
|
||||||
fs.readdir(path, options, (err, files) => {
|
fs.readdir(path, options, (err, files) => {
|
||||||
if(err) {
|
if (err) {
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
resolve(files);
|
resolve(files);
|
||||||
})
|
});
|
||||||
})
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// async readFile
|
// async readFile
|
||||||
const readFile = util.promisify(fs.readFile);
|
const readFile = util.promisify(fs.readFile);
|
||||||
|
|
||||||
function generateFirstEntry(options) {
|
function generateFirstEntry(options) {
|
||||||
return '{}';
|
return "{}";
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeFirstEntry(options, tarPack) {
|
function writeFirstEntry(options, tarPack) {
|
||||||
tarPack.entry({ name: '.' }, generateFirstEntry(options), (err) => {
|
tarPack.entry({ name: "." }, generateFirstEntry(options), err => {
|
||||||
if(err) {
|
if (err) {
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function getFileReadStream(options) {
|
function getFileReadStream(options) {
|
||||||
const fileName = options.fileName || 'package.lbry';
|
const fileName = options.fileName || "package.lbry";
|
||||||
return fs.createReadStream(fileName);
|
return fs.createReadStream(fileName);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getFileWriteStream(options) {
|
function getFileWriteStream(options) {
|
||||||
const fileName = options.fileName || 'package.lbry';
|
const fileName = options.fileName || "package.lbry";
|
||||||
return fs.createWriteStream(fileName);
|
return fs.createWriteStream(fileName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,34 +78,69 @@ async function getZstd() {
|
||||||
const Streaming = new zstd.Streaming();
|
const Streaming = new zstd.Streaming();
|
||||||
resolve(Streaming);
|
resolve(Streaming);
|
||||||
});
|
});
|
||||||
} catch(e) {
|
} catch (e) {
|
||||||
reject(e);
|
reject(e);
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function catagoriseFilesIndex(runCommand, root, index) {
|
||||||
|
const files = { images: "", audios: "", videos: "", others: "" };
|
||||||
|
|
||||||
|
for (let file of index) {
|
||||||
|
const mediaType = getMediaType(path.basename(file));
|
||||||
|
files[mediaType] += `"${file}",`;
|
||||||
|
}
|
||||||
|
|
||||||
|
let contents = "";
|
||||||
|
for (let mediaType in files) {
|
||||||
|
contents += `${mediaType}=[${files[mediaType]}];\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const bufferedContents = Buffer.from(contents);
|
||||||
|
const relativeFilePath = path.join(root, "files.js");
|
||||||
|
await runCommand(relativeFilePath, bufferedContents);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processTemplate(runCommand, root, userDefinedTemplate) {
|
||||||
|
const template = userDefinedTemplate
|
||||||
|
? userDefinedTemplate
|
||||||
|
: require("./templates").default;
|
||||||
|
|
||||||
|
for (const resource of template.resources) {
|
||||||
|
if (resource.type === "static") {
|
||||||
|
const bufferedContents = Buffer.from(resource.contents);
|
||||||
|
const relativeFilePath = path.join(root, resource.name);
|
||||||
|
await runCommand(relativeFilePath, bufferedContents);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function walkAndRun(runCommand, dir, root) {
|
async function walkAndRun(runCommand, dir, root) {
|
||||||
let files = await readdir(dir);
|
let files = await readdir(dir);
|
||||||
|
|
||||||
for(let file of files) {
|
for (let file of files) {
|
||||||
const currentPath = path.join(dir, file);
|
const currentPath = path.join(dir, file);
|
||||||
|
|
||||||
if (fs.statSync(currentPath).isDirectory()) {
|
if (fs.statSync(currentPath).isDirectory()) {
|
||||||
await walkAndRun(runCommand, currentPath);
|
await walkAndRun(runCommand, currentPath);
|
||||||
} else {
|
} else {
|
||||||
await runCommand(currentPath);
|
const fileContents = await readFile(path.normalize(currentPath));
|
||||||
|
await runCommand(currentPath, fileContents);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
async function writeStream(stream, data) {
|
async function writeStream(stream, data) {
|
||||||
return new Promise((resolve) => {
|
return new Promise(resolve => {
|
||||||
stream.write(data);
|
stream.write(data);
|
||||||
stream.end(resolve)
|
stream.end(resolve);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function packDirectory(directory, options = {}) {
|
async function packDirectory(directory, options = {}) {
|
||||||
|
let filesIndex = [];
|
||||||
|
|
||||||
const zstd = await getZstd();
|
const zstd = await getZstd();
|
||||||
const packRoot = directory;
|
const packRoot = directory;
|
||||||
const fileWriteStream = getFileWriteStream(options);
|
const fileWriteStream = getFileWriteStream(options);
|
||||||
|
@ -97,11 +148,7 @@ async function packDirectory(directory, options = {}) {
|
||||||
tarPack.pipe(fileWriteStream);
|
tarPack.pipe(fileWriteStream);
|
||||||
writeFirstEntry(options, tarPack);
|
writeFirstEntry(options, tarPack);
|
||||||
|
|
||||||
await walkAndRun(async (file) => {
|
const makeChunkIterator = contents => {
|
||||||
contents = await readFile(path.normalize(file));
|
|
||||||
|
|
||||||
// Must be chunked to avoid issues with fixed memory limits.
|
|
||||||
const chunkIterator = (() => {
|
|
||||||
const chunkSize = 2048;
|
const chunkSize = 2048;
|
||||||
let position = 0;
|
let position = 0;
|
||||||
|
|
||||||
|
@ -110,40 +157,49 @@ async function packDirectory(directory, options = {}) {
|
||||||
const endIndex = position + chunkSize;
|
const endIndex = position + chunkSize;
|
||||||
const result = {
|
const result = {
|
||||||
value: contents.slice(position, endIndex),
|
value: contents.slice(position, endIndex),
|
||||||
done: position >= contents.length,
|
done: position >= contents.length
|
||||||
};
|
};
|
||||||
|
|
||||||
position = endIndex;
|
position = endIndex;
|
||||||
return result;
|
return result;
|
||||||
},
|
},
|
||||||
[Symbol.iterator]: function() { return this }
|
[Symbol.iterator]: function() {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
return iterator;
|
return iterator;
|
||||||
})();
|
};
|
||||||
|
|
||||||
contents = zstd.compressChunks(chunkIterator, contents.length, COMPRESSION_LEVEL);
|
const createEntry = async (file, contents) => {
|
||||||
|
const chunkIterator = makeChunkIterator(contents);
|
||||||
let name = path.relative(packRoot, file).replace('\\', '/');
|
contents = zstd.compressChunks(
|
||||||
|
chunkIterator,
|
||||||
if(/^\.\//.test(name)) {
|
contents.length,
|
||||||
|
COMPRESSION_LEVEL
|
||||||
|
);
|
||||||
|
let name = path.relative(packRoot, file).replace("\\", "/");
|
||||||
|
if (/^\.\//.test(name)) {
|
||||||
name = name.slice(2);
|
name = name.slice(2);
|
||||||
}
|
}
|
||||||
|
const entry = tarPack.entry({ name, size: contents.length }, err => {
|
||||||
const entry = tarPack.entry({ name, size: contents.length }, (err) => {
|
if (err) {
|
||||||
if(err) {
|
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
await writeStream(entry, contents);
|
await writeStream(entry, contents);
|
||||||
|
|
||||||
entry.end();
|
entry.end();
|
||||||
}, directory, packRoot);
|
filesIndex.push(name);
|
||||||
|
};
|
||||||
|
|
||||||
|
await walkAndRun(createEntry, directory, packRoot);
|
||||||
|
if (options.useTemplate === true) {
|
||||||
|
await processTemplate(createEntry, packRoot);
|
||||||
|
await catagoriseFilesIndex(createEntry, packRoot, filesIndex);
|
||||||
|
}
|
||||||
tarPack.finalize();
|
tarPack.finalize();
|
||||||
};
|
}
|
||||||
|
|
||||||
|
function strToBuffer(string) {
|
||||||
function strToBuffer (string) {
|
|
||||||
let arrayBuffer = new ArrayBuffer(string.length * 1);
|
let arrayBuffer = new ArrayBuffer(string.length * 1);
|
||||||
let newUint = new Uint8Array(arrayBuffer);
|
let newUint = new Uint8Array(arrayBuffer);
|
||||||
newUint.forEach((_, i) => {
|
newUint.forEach((_, i) => {
|
||||||
|
@ -153,18 +209,17 @@ function strToBuffer (string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function streamToBuffer(stream) {
|
function streamToBuffer(stream) {
|
||||||
const chunks = []
|
const chunks = [];
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
stream.on('data', chunk => chunks.push(chunk))
|
stream.on("data", chunk => chunks.push(chunk));
|
||||||
stream.on('error', reject)
|
stream.on("error", reject);
|
||||||
stream.on('end', () => resolve(Buffer.concat(chunks)))
|
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async function unpackDirectory(directory, options = {}) {
|
async function unpackDirectory(directory, options = {}) {
|
||||||
return new Promise(async (resolve) => {
|
return new Promise(async resolve => {
|
||||||
if(!fs.existsSync(directory)) {
|
if (!fs.existsSync(directory)) {
|
||||||
mkdirSyncRecursive(directory);
|
mkdirSyncRecursive(directory);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -173,7 +228,7 @@ async function unpackDirectory(directory, options = {}) {
|
||||||
|
|
||||||
const extract = tar.extract();
|
const extract = tar.extract();
|
||||||
|
|
||||||
extract.on('entry', async (header, fileStream, next) => {
|
extract.on("entry", async (header, fileStream, next) => {
|
||||||
let contents = await streamToBuffer(fileStream);
|
let contents = await streamToBuffer(fileStream);
|
||||||
contents = new Uint8Array(contents);
|
contents = new Uint8Array(contents);
|
||||||
|
|
||||||
|
@ -187,20 +242,22 @@ async function unpackDirectory(directory, options = {}) {
|
||||||
const endIndex = position + chunkSize;
|
const endIndex = position + chunkSize;
|
||||||
const result = {
|
const result = {
|
||||||
value: contents.slice(position, endIndex),
|
value: contents.slice(position, endIndex),
|
||||||
done: position >= contents.length,
|
done: position >= contents.length
|
||||||
};
|
};
|
||||||
|
|
||||||
position = endIndex;
|
position = endIndex;
|
||||||
return result;
|
return result;
|
||||||
},
|
},
|
||||||
[Symbol.iterator]: function() { return this }
|
[Symbol.iterator]: function() {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
return iterator;
|
return iterator;
|
||||||
})();
|
})();
|
||||||
|
|
||||||
contents = zstd.decompressChunks(chunkIterator);
|
contents = zstd.decompressChunks(chunkIterator);
|
||||||
|
|
||||||
if(!/^\./.test(header.name)) {
|
if (!/^\./.test(header.name)) {
|
||||||
const writePath = path.join(directory, header.name);
|
const writePath = path.join(directory, header.name);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -219,7 +276,7 @@ async function unpackDirectory(directory, options = {}) {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
extract.on('finish', () => {
|
extract.on("finish", () => {
|
||||||
resolve(true);
|
resolve(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -260,5 +317,5 @@ async function packPaths(root, pathsArray, options = {}) {
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
packDirectory,
|
packDirectory,
|
||||||
unpackDirectory,
|
unpackDirectory
|
||||||
}
|
};
|
||||||
|
|
|
@ -1,22 +1,29 @@
|
||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
const {
|
const { packDirectory, unpackDirectory } = require("lbry-format");
|
||||||
packDirectory,
|
const path = require("path");
|
||||||
unpackDirectory,
|
|
||||||
} = require('lbry-format');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
require('yargs')
|
require("yargs")
|
||||||
.scriptName('lbry-pack')
|
.scriptName("lbry-pack")
|
||||||
.usage('$0 <cmd> [args]')
|
.usage("$0 <cmd> [args]")
|
||||||
.command('pack [directory] [file]', 'Pack a directory', (yargs) => {
|
.command(
|
||||||
yargs.positional('directory', {
|
"pack [directory] [file] [-t]",
|
||||||
default: './src',
|
"Pack a directory",
|
||||||
describe: 'The source directory to pack'
|
yargs => {
|
||||||
}).positional('file', {
|
yargs
|
||||||
describe: 'Output file',
|
.positional("directory", {
|
||||||
default: './package.lbry',
|
default: "./src",
|
||||||
|
describe: "The source directory to pack"
|
||||||
})
|
})
|
||||||
}, function (argv) {
|
.positional("file", {
|
||||||
|
describe: "Output file",
|
||||||
|
default: "./package.lbry"
|
||||||
|
})
|
||||||
|
.option("t", {
|
||||||
|
alias: "template",
|
||||||
|
describe: "Use web template"
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function(argv) {
|
||||||
console.log(`Packing ${argv.directory} to ${argv.file}`);
|
console.log(`Packing ${argv.directory} to ${argv.file}`);
|
||||||
|
|
||||||
const resolvedDirectory = path.resolve(argv.directory);
|
const resolvedDirectory = path.resolve(argv.directory);
|
||||||
|
@ -24,28 +31,35 @@ require('yargs')
|
||||||
|
|
||||||
packDirectory(resolvedDirectory, {
|
packDirectory(resolvedDirectory, {
|
||||||
fileName: resolvedfile,
|
fileName: resolvedfile,
|
||||||
|
useTemplate: argv.template
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.command(
|
||||||
|
"unpack [directory] [file]",
|
||||||
|
"Unpack a file",
|
||||||
|
yargs => {
|
||||||
|
yargs
|
||||||
|
.positional("destination", {
|
||||||
|
type: "string",
|
||||||
|
default: "./src",
|
||||||
|
describe: "The folder destination to unpack to"
|
||||||
})
|
})
|
||||||
.command('unpack [directory] [file]', 'Unpack a file', (yargs) => {
|
.positional("file", {
|
||||||
yargs.positional('destination', {
|
describe: "Input filename",
|
||||||
type: 'string',
|
default: "package.lbry"
|
||||||
default: './src',
|
});
|
||||||
describe: 'The folder destination to unpack to'
|
},
|
||||||
}).positional('file', {
|
function(argv) {
|
||||||
describe: 'Input filename',
|
|
||||||
default: 'package.lbry',
|
|
||||||
})
|
|
||||||
}, function (argv) {
|
|
||||||
console.log(`Packing ${argv.directory} to ${argv.file}`);
|
console.log(`Packing ${argv.directory} to ${argv.file}`);
|
||||||
|
|
||||||
|
|
||||||
const resolvedDirectory = path.resolve(argv.directory);
|
const resolvedDirectory = path.resolve(argv.directory);
|
||||||
const resolvedfile = path.resolve(argv.file);
|
const resolvedfile = path.resolve(argv.file);
|
||||||
|
|
||||||
unpackDirectory(resolvedDirectory, {
|
unpackDirectory(resolvedDirectory, {
|
||||||
fileName: resolvedfile,
|
fileName: resolvedfile
|
||||||
});
|
});
|
||||||
})
|
}
|
||||||
|
)
|
||||||
.help()
|
.help()
|
||||||
.demandCommand()
|
.demandCommand().argv;
|
||||||
.argv
|
|
||||||
|
|
25
templates.js
Normal file
25
templates.js
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
module.exports = {
|
||||||
|
default: {
|
||||||
|
resources: [
|
||||||
|
{
|
||||||
|
type: "static",
|
||||||
|
name: "index.html",
|
||||||
|
contents:
|
||||||
|
'<!doctype html><title>Gallary Slideshow</title><meta name=viewport content="width=device-width,initial-scale=1" charset=utf-8><link rel=stylesheet href=index.css><script src=files.js charset=utf-8></script><div id=container class=slideshow-container><a class=prev onclick=plusSlides(-1)>❮</a> <a class=next onclick=plusSlides(1)>❯</a></div><script src=index.js charset=utf-8></script>'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "static",
|
||||||
|
name: "index.css",
|
||||||
|
contents:
|
||||||
|
"body,html{width:100%;height:100%;display:flex;background-color:#001}*{margin:0;padding:0}.slideshow-container{margin:auto;background-color:#000;display:flex;height:80%;width:80%;justify-content:space-between;align-items:center;border-radius:3px 3px 3px 3px}.mySlides{order:1;margin:auto;max-width:100%;max-height:100%}.next,.prev{order:0;cursor:pointer;width:auto;padding:16px;color:#fff;font-weight:700;font-size:18px;transition:.6s ease;border-radius:0 3px 3px 0;user-select:none}.next{order:2;border-radius:3px 0 0 3px}.next:hover,.prev:hover{background-color:#eee;color:#000}.fade{-webkit-animation-name:fade;-webkit-animation-duration:1s;animation-name:fade;animation-duration:1s}@-webkit-keyframes fade{from{opacity:.4}to{opacity:1}}@keyframes fade{from{opacity:.4}to{opacity:1}}@media only screen and (max-width:300px){.next,.prev,.text{font-size:11px}}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "static",
|
||||||
|
name: "index.js",
|
||||||
|
contents:
|
||||||
|
'const container=document.getElementById("container");for(let e=0;e<images.length;e++){const n=document.createElement("img");n.className="mySlides fade",n.src=images[e],container.appendChild(n)}let slideIndex=1;function plusSlides(e){showSlides(slideIndex+=e)}function showSlides(e){const n=document.getElementsByClassName("mySlides");e>n.length&&(slideIndex=1),e<1&&(slideIndex=n.length);for(let e=0;e<n.length;e++)n[e].style.display="none";n[slideIndex-1].style.display="block"}showSlides(slideIndex);'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
title: "Gallary Slideshow"
|
||||||
|
}
|
||||||
|
};
|
Loading…
Reference in a new issue