Merge pull request #942 from jessopb/lbrynetFileWait
checks if file is ready using sdk file_list and outpoint
This commit is contained in:
commit
629d133fd2
3 changed files with 49 additions and 28 deletions
|
@ -1,11 +1,9 @@
|
|||
const { getClaim } = require('../../../../lbrynet');
|
||||
const {
|
||||
createFileRecordDataAfterGet,
|
||||
} = require('../../../../models/utils/createFileRecordData.js');
|
||||
const { getClaim } = require('server/lbrynet');
|
||||
const { createFileRecordDataAfterGet } = require('server/models/utils/createFileRecordData.js');
|
||||
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
|
||||
const getClaimData = require('server/utils/getClaimData');
|
||||
const chainquery = require('chainquery').default;
|
||||
const db = require('../../../../models');
|
||||
const db = require('server/models');
|
||||
const logger = require('winston');
|
||||
const awaitFileSize = require('server/utils/awaitFileSize');
|
||||
|
||||
|
@ -38,7 +36,7 @@ const claimGet = async ({ ip, originalUrl, params }, res) => {
|
|||
if (!claimData) {
|
||||
throw new Error('claim/get: getClaimData failed to get file blobs');
|
||||
}
|
||||
let fileReady = await awaitFileSize(lbrynetResult.download_path, 2000000, 10000, 250);
|
||||
const fileReady = await awaitFileSize(lbrynetResult.outpoint, 2000000, 10000, 250);
|
||||
|
||||
if (fileReady !== 'ready') {
|
||||
throw new Error('claim/get: failed to get file after 10 seconds');
|
||||
|
|
|
@ -53,6 +53,26 @@ module.exports = {
|
|||
});
|
||||
});
|
||||
},
|
||||
getFileListFileByOutpoint(outpoint) {
|
||||
logger.debug(`lbryApi >> Getting File_List for "${outpoint}"`);
|
||||
const gaStartTime = Date.now();
|
||||
return new Promise((resolve, reject) => {
|
||||
axios
|
||||
.post(lbrynetUri, {
|
||||
method: 'file_list',
|
||||
params: {
|
||||
outpoint,
|
||||
},
|
||||
})
|
||||
.then(response => {
|
||||
sendGATimingEvent('lbrynet', 'getFileList', 'FILE_LIST', gaStartTime, Date.now());
|
||||
handleLbrynetResponse(response, resolve, reject);
|
||||
})
|
||||
.catch(error => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
},
|
||||
async abandonClaim({ claimId }) {
|
||||
logger.debug(`lbryApi >> Abandon claim "${claimId}"`);
|
||||
const gaStartTime = Date.now();
|
||||
|
|
|
@ -1,27 +1,30 @@
|
|||
const fs = require('fs');
|
||||
const { promisify } = require('util');
|
||||
const { getFileListFileByOutpoint } = require('server/lbrynet');
|
||||
const logger = require('winston');
|
||||
|
||||
const fsstat = promisify(fs.stat);
|
||||
const awaitFileSize = (path, sizeInBytes, timeout, interval) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let totalTime = 0;
|
||||
let timer = setInterval(() => {
|
||||
totalTime = totalTime + interval;
|
||||
fsstat(path)
|
||||
.then(stats => {
|
||||
if (stats.size > sizeInBytes) {
|
||||
clearInterval(interval);
|
||||
resolve('ready');
|
||||
}
|
||||
if (totalTime > timeout) {
|
||||
const error = new Error('File did not arrive in time');
|
||||
error.name = 'FILE_NOT_ARRIVED';
|
||||
reject(error);
|
||||
}
|
||||
})
|
||||
.catch();
|
||||
}, interval);
|
||||
function delay(t) {
|
||||
return new Promise(function(resolve) {
|
||||
setTimeout(resolve, t);
|
||||
});
|
||||
}
|
||||
|
||||
const awaitFileSize = (outpoint, size, interval, timeout) => {
|
||||
logger.debug('awaitFileSize');
|
||||
let start = Date.now();
|
||||
function checkFileList() {
|
||||
logger.debug('checkFileList');
|
||||
return getFileListFileByOutpoint(outpoint).then(result => {
|
||||
logger.debug('File List Result', result);
|
||||
if (result[0]['completed'] === true || result[0]['written_bytes'] > size) {
|
||||
logger.debug('FILE READY');
|
||||
return 'ready';
|
||||
} else if (timeout !== 0 && Date.now() - start > timeout) {
|
||||
throw new Error('Timeout on awaitFileSize');
|
||||
} else {
|
||||
return delay(interval).then(checkFileList);
|
||||
}
|
||||
});
|
||||
}
|
||||
return checkFileList();
|
||||
};
|
||||
|
||||
module.exports = awaitFileSize;
|
||||
|
|
Loading…
Reference in a new issue