Use chunkedDecompress to resolve more memory limits encountered
This commit is contained in:
parent
ca33e18e5a
commit
e2f6d8f68a
1 changed files with 22 additions and 1 deletions
23
index.js
23
index.js
|
@ -166,7 +166,28 @@ async function unpackDirectory(directory, options = {}) {
|
||||||
let contents = await streamToBuffer(fileStream);
|
let contents = await streamToBuffer(fileStream);
|
||||||
contents = new Uint8Array(contents);
|
contents = new Uint8Array(contents);
|
||||||
|
|
||||||
contents = zstd.decompress(contents);
|
// Must be chunked to avoid issues with fixed memory limits.
|
||||||
|
const chunkIterator = (() => {
|
||||||
|
const chunkSize = 2048;
|
||||||
|
let position = 0;
|
||||||
|
|
||||||
|
const iterator = {
|
||||||
|
next: function() {
|
||||||
|
const endIndex = position + chunkSize;
|
||||||
|
const result = {
|
||||||
|
value: contents.slice(position, endIndex),
|
||||||
|
done: position >= contents.length,
|
||||||
|
};
|
||||||
|
|
||||||
|
position = endIndex;
|
||||||
|
return result;
|
||||||
|
},
|
||||||
|
[Symbol.iterator]: function() { return this }
|
||||||
|
};
|
||||||
|
return iterator;
|
||||||
|
})();
|
||||||
|
|
||||||
|
contents = zstd.decompressChunks(chunkIterator);
|
||||||
|
|
||||||
if(!/^\./.test(header.name)) {
|
if(!/^\./.test(header.name)) {
|
||||||
const writePath = path.join(directory, header.name);
|
const writePath = path.join(directory, header.name);
|
||||||
|
|
Loading…
Reference in a new issue