WIP removing dependency on localdb claim table

This commit is contained in:
jessop 2019-04-01 10:23:14 -04:00
parent 13cee683c3
commit cf24a39d0b
22 changed files with 439 additions and 295 deletions

View file

@ -7,8 +7,7 @@ import Img from 'react-image';
const AssetPreview = ({ defaultThumbnail, claimData }) => {
const {name, fileExt, contentType, thumbnail, title, blocked, transactionTime = 0} = claimData;
const showUrl = createCanonicalLink({asset: {...claimData}});
console.log(transactionTime)
const embedUrl = `${showUrl}.${fileExt}`;
const embedUrl = `${showUrl}.${fileExt}?thumbnail=true`;
const ago = Date.now() / 1000 - transactionTime;
const dayInSeconds = 60 * 60 * 24;
const monthInSeconds = dayInSeconds * 30;

14
package-lock.json generated
View file

@ -3319,6 +3319,11 @@
"resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz",
"integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk="
},
"clone": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz",
"integrity": "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18="
},
"clone-deep": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-2.0.2.tgz",
@ -8917,6 +8922,15 @@
"resolved": "https://registry.npmjs.org/nocache/-/nocache-2.0.0.tgz",
"integrity": "sha1-ICtIAhoMTL3i34DeFaF0Q8i0OYA="
},
"node-cache": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/node-cache/-/node-cache-4.2.0.tgz",
"integrity": "sha512-obRu6/f7S024ysheAjoYFEEBqqDWv4LOMNJEuO8vMeEw2AT4z+NCzO4hlc2lhI4vATzbCQv6kke9FVdx0RbCOw==",
"requires": {
"clone": "2.x",
"lodash": "4.x"
}
},
"node-fetch": {
"version": "2.1.2",
"resolved": "http://registry.npmjs.org/node-fetch/-/node-fetch-2.1.2.tgz",

View file

@ -57,6 +57,7 @@
"mime-types": "^2.1.21",
"module-alias": "^2.1.0",
"mysql2": "^1.6.4",
"node-cache": "^4.2.0",
"npm": "^6.3.0",
"passport": "^0.4.0",
"passport-local": "^1.0.0",

View file

@ -13,6 +13,9 @@ const getterMethods = {
return mime.extension(this.content_type) ? mime.extension(this.content_type) : 'jpg';
}
},
generated_outpoint() {
return `${this.transaction_hash_id}:${this.vout}`;
},
};
export default (sequelize, { BOOLEAN, DATE, DECIMAL, ENUM, INTEGER, STRING, TEXT }) =>

View file

@ -187,19 +187,18 @@ export default (db, table, sequelize) => ({
}
},
resolveClaim: async (name, claimId) => {
logger.debug(`Claim.resolveClaim: ${name} ${claimId}`);
resolveClaim: async claimId => {
logger.debug(`Claim.resolveClaim: ${claimId}`);
return table
.findAll({
where: { name, claim_id: claimId },
where: { claim_id: claimId },
})
.then(claimArray => {
if (claimArray.length === 0) {
return null;
} else if (claimArray.length !== 1) {
logger.warn(`more than one record matches ${name}#${claimId} in db.Claim`);
logger.warn(`more than one record matches${claimId} in db.Claim`);
}
return claimArray[0];
});
},
@ -223,23 +222,22 @@ export default (db, table, sequelize) => ({
return claimArray[0];
});
},
getOutpoint: async (name, claimId) => {
logger.debug(`finding outpoint for ${name}#${claimId}`);
getOutpoint: async claimId => {
logger.debug(`finding outpoint for ${claimId}`);
return await table
.findAll({
where: { name, claim_id: claimId },
attributes: ['transaction_hash_id'],
where: { claim_id: claimId },
attributes: ['transaction_hash_id', 'vout'],
})
.then(result => {
if (result.length === 0) {
throw new Error(`no record found for ${name}#${claimId}`);
throw new Error(`no record found for ${claimId}`);
} else if (result.length !== 1) {
logger.warn(`more than one record matches ${name}#${claimId} in db.Claim`);
logger.warn(`more than one record matches ${claimId} in db.Claim`);
}
return result[0].transaction_hash_id;
return `${result[0].transaction_hash_id}:${result[0].vout}`;
});
},

View file

@ -1,31 +1,37 @@
const logger = require('winston');
const db = require('server/models');
const chainquery = require('chainquery').default;
const { abandonClaim } = require('server/lbrynet');
const deleteFile = require('../publish/deleteFile.js');
const authenticateUser = require('../publish/authentication.js');
/*
route to abandon a claim through the daemon
DO AFTER THE REST WORKS
*/
const claimAbandon = async (req, res) => {
const {claimId} = req.body;
const {user} = req;
const { claimId } = req.body;
const { user } = req;
try {
// This must not depend on db.Claim
const [channel, claim] = await Promise.all([
authenticateUser(user.channelName, null, null, user),
db.Claim.findOne({where: {claimId}}),
chainquery.claim.queries.resolveClaim(claimId),
]);
if (!claim) throw new Error('That channel does not exist');
if (!channel.channelName) throw new Error('You don\'t own this channel');
if (!claim) throw new Error('That claim does not exist');
if (!channel.channelName) throw new Error("You don't own this channel");
await abandonClaim({claimId});
const file = await db.File.findOne({where: {claimId}});
await abandonClaim({ claimId });
// Add file_delete here.
// Using db.File just to get the path. Use file_list.
const file = await db.File.findOne({ where: { claimId } });
await Promise.all([
deleteFile(file.filePath),
db.File.destroy({where: {claimId}}),
db.Claim.destroy({where: {claimId}}),
db.File.destroy({ where: { claimId } }),
// Remove this
db.Claim.destroy({ where: { claimId } }),
]);
logger.debug(`Claim abandoned: ${claimId}`);
res.status(200).json({

View file

@ -1,8 +1,7 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const getClaimData = require('server/utils/getClaimData');
const fetchClaimData = require('server/utils/fetchClaimData');
const chainquery = require('chainquery').default;
const db = require('server/models');
const logger = require('winston');
/*
route to return data for a claim
@ -22,7 +21,7 @@ const claimData = async ({ ip, originalUrl, body, params }, res) => {
res.status(200).json({
success: true,
data : await getClaimData(resolvedClaim),
data: await getClaimData(resolvedClaim),
});
} catch (error) {
handleErrorResponse(originalUrl, ip, error, res);

View file

@ -7,6 +7,11 @@ const db = require('server/models');
const logger = require('winston');
const awaitFileSize = require('server/utils/awaitFileSize');
const isBot = require('isbot');
const publishCache = require('server/utils/publishCache');
const RETRY_MS = 250;
const TIMEOUT_MS = 15000;
const MIN_BYTES = 15000000;
/*
@ -15,20 +20,19 @@ const isBot = require('isbot');
*/
const claimGet = async ({ ip, originalUrl, params, headers }, res) => {
logger.debug(`claim/get params`, params);
const name = params.name;
const claimId = params.claimId;
try {
let claimInfo = await chainquery.claim.queries.resolveClaim(name, claimId).catch(() => {});
if (claimInfo) {
logger.info('claim/get: claim resolved in chainquery');
}
if (!claimInfo) {
claimInfo = await db.Claim.resolveClaim(name, claimId);
}
if (!claimInfo) {
let claimDataFromChainquery = await chainquery.claim.queries
.resolveClaim(claimId)
.catch(() => null);
if (!claimDataFromChainquery) {
throw new Error('claim/get: resolveClaim: No matching uri found in Claim table');
}
if (headers && headers['user-agent'] && isBot(headers['user-agent'])) {
let lbrynetResolveResult = await resolveUri(`${name}#${claimId}`);
const { message, completed } = lbrynetResolveResult;
@ -39,25 +43,36 @@ const claimGet = async ({ ip, originalUrl, params, headers }, res) => {
});
return true;
}
let lbrynetResult = await getClaim(`${name}#${claimId}`);
if (!lbrynetResult) {
throw new Error(`claim/get: getClaim Unable to Get ${name}#${claimId}`);
}
const claimData = await getClaimData(claimInfo);
if (!claimData) {
throw new Error('claim/get: getClaimData failed to get file blobs');
}
const fileReady = await awaitFileSize(lbrynetResult.outpoint, 10000000, 250, 10000);
const claimData = await getClaimData(claimDataFromChainquery);
const fileReady = await awaitFileSize(lbrynetResult.outpoint, MIN_BYTES, RETRY_MS, TIMEOUT_MS);
if (fileReady !== 'ready') {
throw new Error('claim/get: failed to get file after 10 seconds');
}
const fileData = await createFileRecordDataAfterGet(claimData, lbrynetResult);
const fileData = await createFileRecordDataAfterGet(claimData, lbrynetResult).catch(() => null);
if (!fileData) {
throw new Error('claim/get: createFileRecordDataAfterGet failed to create file in time');
logger.info(
'claim/get: createFileRecordDataAfterGet failed to create file dimensions in time'
);
}
const upsertCriteria = { name, claimId };
await db.upsert(db.File, fileData, upsertCriteria, 'File');
const upsertResult = await db
.upsert(db.File, fileData, upsertCriteria, 'File')
.catch(() => null);
if (!upsertResult) {
logger.info('claim/get: DB file upsert failed');
}
const { message, completed } = lbrynetResult;
res.status(200).json({
success: true,

View file

@ -1,6 +1,7 @@
const db = require('server/models');
const chainquery = require('chainquery').default;
const logger = require('winston');
const publishCache = require('server/utils/publishCache');
const { handleErrorResponse } = require('server/controllers/utils/errorHandlers.js');
const getClaimId = require('server/controllers/utils/getClaimId.js');
@ -24,8 +25,17 @@ const claimLongId = ({ ip, originalUrl, body, params }, res) => {
getClaimId(channelName, channelClaimId, claimName, claimId)
.then(fullClaimId => {
claimId = fullClaimId;
return chainquery.claim.queries.getOutpoint(claimName, fullClaimId).catch(() => {});
if (!fullClaimId) {
throw new Error('Unable to get fullClaimId');
}
return chainquery.claim.queries.getOutpoint(fullClaimId).catch(() => {
logger.debug(`failed to get claimId from chainQuery given ${claimName} and ${fullClaimId}`);
});
})
// Remove this, replace with file_list
// In the event that we need the longId of a claim just published
// check to see if shortClaimId matches cache, then verify
// Should we also verify
.then(outpointResult => {
if (!outpointResult) {
return db.Claim.getOutpoint(claimName, claimId);
@ -52,10 +62,10 @@ const claimLongId = ({ ip, originalUrl, body, params }, res) => {
});
}
if (error === BLOCKED_CLAIM) {
return res.status(410).json({
return res.status(451).json({
success: false,
message:
'In response to a complaint we received under the US Digital Millennium Copyright Act, we have blocked access to this content from our applications. For more details, see https://lbry.com/faq/dmca',
'In response to a complaint we received under the US Digital Millennium Copyright Act, we have blocked access to this content from our applications. For more details, see https://lbry.io/faq/dmca',
});
}
handleErrorResponse(originalUrl, ip, error, res);

View file

@ -8,7 +8,7 @@ const {
const { sendGATimingEvent } = require('server/utils/googleAnalytics.js');
const isApprovedChannel = require('@globalutils/isApprovedChannel');
const {
publishing: { publishOnlyApproved, approvedChannels },
publishing: { publishOnlyApproved, approvedChannels, thumbnailChannel, thumbnailChannelId },
} = require('@config/siteConfig');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
@ -24,7 +24,7 @@ const authenticateUser = require('./authentication.js');
const chainquery = require('chainquery').default;
const createCanonicalLink = require('@globalutils/createCanonicalLink');
const publishCache = require('server/utils/publishCache');
const CLAIM_TAKEN = 'CLAIM_TAKEN';
const UNAPPROVED_CHANNEL = 'UNAPPROVED_CHANNEL';
@ -69,6 +69,7 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
thumbnailFileType,
title,
claimData,
thumbData,
claimId;
// record the start time of the request
gaStartTime = Date.now();
@ -133,20 +134,27 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
};
throw error;
}
let promises = [];
promises.push(publish(publishParams, fileName, fileType, filePath));
// publish the thumbnail, if one exists
if (thumbnailPublishParams) {
publish(thumbnailPublishParams, thumbnailFileName, thumbnailFileType);
promises.push(publish(thumbnailPublishParams, thumbnailFileName, thumbnailFileType));
}
// publish the asset
return publish(publishParams, fileName, fileType, filePath);
return Promise.all(promises);
})
.then(publishResults => {
logger.info('Publish success >', publishResults);
claimData = publishResults;
logger.debug('Publish success >', publishResults[0]);
if (publishResults[1]) {
logger.debug('Thumb Publish success >', publishResults[1]);
thumbData = publishResults[1];
}
claimData = publishResults[0];
({ claimId } = claimData);
if (channelName) {
logger.info(`api/claim/publish: claimData.certificateId ${claimData.certificateId}`);
logger.verbose(`api/claim/publish: claimData.certificateId ${claimData.certificateId}`);
return chainquery.claim.queries.getShortClaimIdFromLongClaimId(
claimData.certificateId,
channelName
@ -167,6 +175,23 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
canonicalUrl = createCanonicalLink({ asset: { ...claimData, shortId } });
}
// make sure we can look up the claimId until chainquery has it
let canonicalThumbUrl;
if (thumbData) {
canonicalThumbUrl = createCanonicalLink({
asset: {
channelName: thumbnailChannel,
channelShortId: thumbnailChannelId,
name: thumbData.name,
},
});
logger.verbose('canonicalThumbUrl', canonicalThumbUrl);
publishCache.set(canonicalThumbUrl, thumbData.claimId);
publishCache.set(thumbData.claimId, thumbData);
}
publishCache.set(canonicalUrl, claimData.claimId);
publishCache.set(claimData.claimId, claimData);
res.status(200).json({
success: true,
message: 'publish completed successfully',
@ -180,7 +205,6 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
claimData,
},
});
// record the publish end time and send to google analytics
sendGATimingEvent('end-to-end', 'publish', fileType, gaStartTime, Date.now());
})
.catch(error => {

View file

@ -1,8 +1,10 @@
const logger = require('winston');
const db = require('../../../../models');
const { publishClaim } = require('../../../../lbrynet');
const { createFileRecordDataAfterPublish } = require('../../../../models/utils/createFileRecordData.js');
const { createClaimRecordDataAfterPublish } = require('../../../../models/utils/createClaimRecordData.js');
const { createFileRecordDataAfterPublish } = require('server/models/utils/createFileRecordData.js');
const {
createClaimRecordDataAfterPublish,
} = require('server/models/utils/createClaimRecordData.js');
const deleteFile = require('./deleteFile.js');
const publish = async (publishParams, fileName, fileType) => {
@ -29,30 +31,39 @@ const publish = async (publishParams, fileName, fileType) => {
const certificateId = channel ? channel.channelClaimId : null;
const channelName = channel ? channel.channelName : null;
const claimRecord = await createClaimRecordDataAfterPublish(certificateId, channelName, fileName, fileType, publishParams, publishResults);
const {claimId} = claimRecord;
const upsertCriteria = {name: publishParams.name, claimId};
const claimRecord = await createClaimRecordDataAfterPublish(
certificateId,
channelName,
fileName,
fileType,
publishParams,
publishResults
);
const { claimId } = claimRecord;
const upsertCriteria = { name: publishParams.name, claimId };
if (newFile) {
// this is the problem
//
fileRecord = await createFileRecordDataAfterPublish(fileName, fileType, publishParams, publishResults);
fileRecord = await createFileRecordDataAfterPublish(
fileName,
fileType,
publishParams,
publishResults
);
} else {
fileRecord = await db.File.findOne({where: {claimId}}).then(result => result.dataValues);
fileRecord = await db.File.findOne({ where: { claimId } }).then(result => result.dataValues);
}
// TODO: refactor when SDK has dimension info
const [file, claim] = await Promise.all([
db.upsert(db.File, fileRecord, upsertCriteria, 'File'),
db.upsert(db.Claim, claimRecord, upsertCriteria, 'Claim'),
]);
logger.info(`File and Claim records successfully created (${publishParams.name})`);
logger.debug(`File and Claim records successfully created (${publishParams.name})`);
// Remove this
await Promise.all([file.setClaim(claim), claim.setFile(file)]);
logger.debug(`File and Claim records successfully associated (${publishParams.name})`);
await Promise.all([
file.setClaim(claim),
claim.setFile(file),
]);
logger.info(`File and Claim records successfully associated (${publishParams.name})`);
return Object.assign({}, claimRecord, {outpoint});
return Object.assign({}, claimRecord, { outpoint });
} catch (err) {
// parse daemon response when err is a string
// this needs work
@ -61,7 +72,8 @@ const publish = async (publishParams, fileName, fileType) => {
if (publishParams.file_path) {
await deleteFile(publishParams.file_path);
}
const message = error.error && error.error.message ? error.error.message : 'Unknown publish error';
const message =
error.error && error.error.message ? error.error.message : 'Unknown publish error';
return {
error: true,
message,

View file

@ -1,5 +1,4 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const db = require('../../../../models');
const chainquery = require('chainquery').default;
/*
@ -9,14 +8,15 @@ const chainquery = require('chainquery').default;
*/
const claimShortId = async ({ ip, originalUrl, body, params }, res) => {
// TODO: use new sdk partialId features when available
try {
let shortId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(params.longId, params.name).catch(() => {});
let shortId = await chainquery.claim.queries
.getShortClaimIdFromLongClaimId(params.longId, params.name)
.catch(() => {
return params.longId;
});
if (!shortId) {
shortId = await db.Claim.getShortClaimIdFromLongClaimId(params.longId, params.name);
}
res.status(200).json({success: true, data: shortId});
res.status(200).json({ success: true, data: shortId });
} catch (error) {
handleErrorResponse(originalUrl, ip, error, res);
}

View file

@ -14,6 +14,8 @@ const authenticateUser = require('../publish/authentication.js');
const createThumbnailPublishParams = require('../publish/createThumbnailPublishParams.js');
const chainquery = require('chainquery').default;
const createCanonicalLink = require('@globalutils/createCanonicalLink');
const { getFileListFileByOutpoint } = require('server/lbrynet');
const publishCache = require('server/utils/publishCache');
/*
route to update a claim through the daemon
@ -69,6 +71,7 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
licenseUrl,
name,
nsfw,
outpoint,
thumbnailFileName,
thumbnailFilePath,
thumbnailFileType,
@ -105,31 +108,34 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
}
// check channel authorization
//
// Assume that if this is an update,
// chainquery probably has the claim in mempool
// so no cache check needed
authenticateUser(channelName, channelId, channelPassword, user)
.then(({ channelName, channelClaimId }) => {
if (!channelId) {
channelId = channelClaimId;
}
return chainquery.claim.queries
.resolveClaimInChannel(name, channelClaimId)
.then(claim => claim.dataValues);
return chainquery.claim.queries.resolveClaimInChannel(name, channelClaimId).then(claim => {
outpoint = claim.generated_outpoint;
return claim.dataValues;
});
})
.then(claim => {
claimRecord = claim;
if (claimRecord.content_type === 'video/mp4' && files.file) {
thumbnailUpdate = true;
}
if (!files.file || thumbnailUpdate) {
return Promise.all([
db.File.findOne({ where: { name, claimId: claim.claim_id } }),
resolveUri(`${claim.name}#${claim.claim_id}`),
]);
// return Promise.all([
// db.File.findOne({ where: { name, claimId: claim.claim_id } }),
return getFileListFileByOutpoint(outpoint);
// ]);
}
return [null, null];
})
.then(([fileResult, resolution]) => {
}) // remove fileResult
.then(fileListResult => {
logger.verbose('fileListResult', fileListResult);
metadata = Object.assign(
{},
{
@ -151,7 +157,6 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
channel_id: channelId,
metadata,
};
if (files.file) {
if (thumbnailUpdate) {
// publish new thumbnail
@ -172,8 +177,8 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
publishParams['file_path'] = filePath;
}
} else {
fileName = fileResult.fileName;
fileType = fileResult.fileType;
fileName = fileListResult.file_name;
fileType = fileListResult.mime_type;
publishParams['thumbnail'] = claimRecord.thumbnail_url;
}
@ -205,6 +210,8 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
} else {
canonicalUrl = createCanonicalLink({ asset: { ...publishResult, shortId } });
}
publishCache.set(canonicalUrl, publishResult.claimId);
publishCache.set(publishResult.claimId, publishResult);
if (publishResult.error) {
res.status(400).json({

View file

@ -4,6 +4,7 @@ const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const { getFileListFileByOutpoint } = require('server/lbrynet');
const chainquery = require('chainquery').default;
const publishCache = require('server/utils/publishCache');
/*
@ -11,31 +12,29 @@ const chainquery = require('chainquery').default;
*/
const fileAvailability = ({ ip, originalUrl, params }, res) => {
const fileAvailability = async ({ ip, originalUrl, params }, res) => {
const name = params.name;
const claimId = params.claimId;
let outpoint;
logger.debug(`fileAvailability params: name:${name} claimId:${claimId}`);
// TODO: we probably eventually want to check the publishCache for the claimId too,
// and shop the outpoint to file_list.
return chainquery.claim.queries
.resolveClaim(name, claimId)
.then(result => {
return `${result.dataValues.transaction_hash_id}:${result.dataValues.vout}`;
})
.then(outpoint => {
logger.debug(`fileAvailability: outpoint: ${outpoint}`);
return getFileListFileByOutpoint(outpoint);
})
.then(result => {
if (result && result[0]) {
return res.status(200).json({ success: true, data: true });
} else {
res.status(200).json({ success: true, data: false });
}
})
.catch(error => {
handleErrorResponse(originalUrl, ip, error, res);
});
try {
if (publishCache.get(claimId)) {
return res.status(200).json({ success: true, data: true });
} else {
outpoint = await chainquery.claim.queries.resolveClaim(claimId).generated_outpoint;
}
logger.debug(`fileAvailability: outpoint: ${outpoint}`);
let fileData = getFileListFileByOutpoint(outpoint);
if (fileData && fileData[0]) {
return res.status(200).json({ success: true, data: true });
} else {
res.status(200).json({ success: true, data: false });
}
} catch (error) {
handleErrorResponse(originalUrl, ip, error, res);
}
};
module.exports = fileAvailability;

View file

@ -1,6 +1,7 @@
const logger = require('winston');
const db = require('../../../models');
const getClaimId = require('../../utils/getClaimId');
const publishCache = require('server/utils/publishCache');
const {
details: { host, title: siteTitle },

View file

@ -1,13 +1,16 @@
const logger = require('winston');
const db = require('../../../models');
const db = require('server/models');
const chainquery = require('chainquery').default;
const isApprovedChannel = require('../../../../utils/isApprovedChannel');
const isApprovedChannel = require('@globalutils/isApprovedChannel');
const { getFileListFileByOutpoint, getClaim } = require('server/lbrynet');
const getClaimId = require('../../utils/getClaimId.js');
const { handleErrorResponse } = require('../../utils/errorHandlers.js');
const awaitFileSize = require('server/utils/awaitFileSize');
const serveFile = require('./serveFile.js');
const parseQueryString = require('server/utils/parseQuerystring');
const publishCache = require('server/utils/publishCache');
const isBot = require('isbot');
const NO_CHANNEL = 'NO_CHANNEL';
const NO_CLAIM = 'NO_CLAIM';
@ -15,118 +18,111 @@ const BLOCKED_CLAIM = 'BLOCKED_CLAIM';
const NO_FILE = 'NO_FILE';
const CONTENT_UNAVAILABLE = 'CONTENT_UNAVAILABLE';
const RETRY_MS = 250;
const TIMEOUT_MS = 15000;
const MIN_BYTES = 15000000;
const {
publishing: { serveOnlyApproved, approvedChannels },
} = require('@config/siteConfig');
const getClaimIdAndServeAsset = (
const getClaimIdAndServeAsset = async (
channelName,
channelClaimId,
claimName,
claimId,
partialClaimId,
originalUrl,
ip,
res,
headers
) => {
getClaimId(channelName, channelClaimId, claimName, claimId)
.then(fullClaimId => {
claimId = fullClaimId;
return chainquery.claim.queries.resolveClaim(claimName, fullClaimId).catch(() => {});
})
.then(claim => {
if (!claim) {
logger.debug('Full claim id:', claimId);
return db.Claim.findOne({
where: {
name: claimName,
claimId,
},
});
let outpoint;
let channelId;
let cqResult;
let claimId = '';
try {
const queryObject = parseQueryString(originalUrl);
claimId = await getClaimId(channelName, channelClaimId, claimName, partialClaimId);
if (publishCache.get(claimId)) {
outpoint = publishCache.get(claimId).outpoint;
channelId = publishCache.get(claimId).certificateId;
} else {
cqResult = await chainquery.claim.queries.resolveClaim(claimId);
if (!cqResult || !cqResult.dataValues) {
throw new Error(NO_CLAIM);
}
outpoint = cqResult.generated_outpoint;
channelId = channelClaimId || cqResult.dataValues.publisher_id;
}
if (serveOnlyApproved && !isApprovedChannel({ longId: channelId }, approvedChannels)) {
throw new Error(CONTENT_UNAVAILABLE);
}
// This throws "BLOCKED_CLAIM" on error
await db.Blocked.isNotBlocked(outpoint);
return claim;
})
.then(claim => {
let claimDataValues = claim.dataValues;
if (
serveOnlyApproved &&
!isApprovedChannel(
{ longId: claimDataValues.publisher_id || claimDataValues.certificateId },
approvedChannels
)
) {
throw new Error(CONTENT_UNAVAILABLE);
let fileListResult = await getFileListFileByOutpoint(outpoint);
if (fileListResult && fileListResult[0]) {
serveFile(fileListResult[0], res, originalUrl);
} else if (!isBot(headers['user-agent'])) {
let lbrynetResult = await getClaim(`${claimName}#${claimId}`);
if (!lbrynetResult || !lbrynetResult.claim_id) {
throw new Error('LBRYNET_NO_GET');
}
let outpoint =
claimDataValues.outpoint ||
`${claimDataValues.transaction_hash_id}:${claimDataValues.vout}`;
logger.debug('Outpoint:', outpoint);
return db.Blocked.isNotBlocked(outpoint).then(() => {
// If content was found, is approved, and not blocked - log a view.
if (headers && headers['user-agent'] && /LBRY/.test(headers['user-agent']) === false) {
db.Views.create({
time: Date.now(),
isChannel: false,
claimId: claimDataValues.claim_id || claimDataValues.claimId,
publisherId: claimDataValues.publisher_id || claimDataValues.certificateId,
ip,
});
}
let fileReady = await awaitFileSize(lbrynetResult.outpoint, MIN_BYTES, RETRY_MS, TIMEOUT_MS);
if (fileReady !== 'ready') {
throw new Error('claim/get: failed to get file after 15 seconds');
}
serveFile(lbrynetResult, res, originalUrl);
}
if (
(headers && headers['user-agent'] && /LBRY/.test(headers['user-agent']) === false) ||
(queryObject && !queryObject.hasOwnProperty('thumbnail'))
) {
db.Views.create({
time: Date.now(),
isChannel: false,
claimId: claimId,
publisherId: channelId,
ip,
});
})
.then(() => {
return db.File.findOne({
where: {
claimId,
name: claimName,
},
}
} catch (error) {
if (error === NO_CLAIM) {
logger.debug('no claim found');
return res.status(404).json({
success: false,
message: 'No matching claim id could be found for that url',
});
})
.then(fileRecord => {
if (!fileRecord) {
throw NO_FILE;
}
serveFile(fileRecord.dataValues, res, originalUrl);
})
.catch(error => {
if (error === NO_CLAIM) {
logger.debug('no claim found');
return res.status(404).json({
success: false,
message: 'No matching claim id could be found for that url',
});
}
if (error === NO_CHANNEL) {
logger.debug('no channel found');
return res.status(404).json({
success: false,
message: 'No matching channel id could be found for that url',
});
}
if (error === CONTENT_UNAVAILABLE) {
logger.debug('unapproved channel');
return res.status(400).json({
success: false,
message: 'This content is unavailable',
});
}
if (error === BLOCKED_CLAIM) {
logger.debug('claim was blocked');
return res.status(451).json({
success: false,
message:
'In response to a complaint we received under the US Digital Millennium Copyright Act, we have blocked access to this content from our applications. For more details, see https://lbry.com/faq/dmca',
});
}
if (error === NO_FILE) {
logger.debug('no file available');
return res.status(307).redirect(`/api/claim/get/${claimName}/${claimId}`);
}
handleErrorResponse(originalUrl, ip, error, res);
});
}
if (error === NO_CHANNEL) {
logger.debug('no channel found');
return res.status(404).json({
success: false,
message: 'No matching channel id could be found for that url',
});
}
if (error === CONTENT_UNAVAILABLE) {
logger.debug('unapproved channel');
return res.status(400).json({
success: false,
message: 'This content is unavailable',
});
}
if (error === BLOCKED_CLAIM) {
logger.debug('claim was blocked');
return res.status(451).json({
success: false,
message:
'In response to a complaint we received under the US Digital Millennium Copyright Act, we have blocked access to this content from our applications. For more details, see https://lbry.io/faq/dmca',
});
}
if (error === NO_FILE) {
logger.debug('no file available');
return res.status(307).redirect(`/api/claim/get/${claimName}/${claimId}`);
}
handleErrorResponse(originalUrl, ip, error, res);
}
};
module.exports = getClaimIdAndServeAsset;

View file

@ -1,26 +1,18 @@
const logger = require('winston');
const transformImage = require('./transformImage');
const parseQueryString = require('server/utils/parseQuerystring');
const isValidQueryObject = require('server/utils/isValidQueryObj');
const {
serving: { dynamicFileSizing },
} = require('@config/siteConfig');
const { enabled: dynamicEnabled } = dynamicFileSizing;
const serveFile = async ({ filePath, fileType }, res, originalUrl) => {
const queryObject = {};
// TODO: replace quick/dirty try catch with better practice
try {
originalUrl
.split('?')[1]
.split('&')
.map(pair => {
if (pair.includes('=')) {
let parr = pair.split('=');
queryObject[parr[0]] = parr[1];
} else queryObject[pair] = true;
});
} catch (e) {}
const serveFile = async (
{ download_path: filePath, mime_type: fileType, total_bytes: totalBytes },
res,
originalUrl
) => {
const queryObject = parseQueryString(originalUrl) || {};
if (!fileType) {
logger.error(`no fileType provided for ${filePath}`);

View file

@ -1,23 +1,25 @@
const logger = require('winston');
const db = require('../../models');
const chainquery = require('chainquery').default;
const publishCache = require('server/utils/publishCache');
const createCanonicalLink = require('@globalutils/createCanonicalLink');
const getClaimIdByChannel = async (channelName, channelClaimId, claimName) => {
logger.debug(`getClaimIdByChannel(${channelName}, ${channelClaimId}, ${claimName})`);
const canonicalUrl = createCanonicalLink({
asset: { channelName, channelShortId: channelClaimId, name: claimName },
});
let channelId = await chainquery.claim.queries.getLongClaimId(channelName, channelClaimId);
if (channelId === null) {
channelId = await db.Certificate.getLongChannelId(channelName, channelClaimId);
}
let claimId = await chainquery.claim.queries.getClaimIdByLongChannelId(channelId, claimName);
if (claimId === null) {
claimId = db.Claim.getClaimIdByLongChannelId(channelId, claimName);
}
const claimId = publishCache.get(canonicalUrl)
? publishCache.get(canonicalUrl)
: await chainquery.claim.queries.getClaimIdByLongChannelId(channelId, claimName);
// TODO: Revisit with sdk-provided partialIds
logger.debug(`getClaimIdByChannel returns`, claimId);
return claimId;
};
@ -26,12 +28,10 @@ const getClaimId = async (channelName, channelClaimId, name, claimId) => {
if (channelName) {
return getClaimIdByChannel(channelName, channelClaimId, name);
} else {
let claimIdResult = await chainquery.claim.queries.getLongClaimId(name, claimId);
if (!claimIdResult) {
claimIdResult = await db.Claim.getLongClaimId(name, claimId);
}
const canonicalUrl = createCanonicalLink({ asset: { name: name, claimId } });
let claimIdResult = publishCache.get(canonicalUrl)
? publishCache.get(canonicalUrl)
: await chainquery.claim.queries.getLongClaimId(name, claimId);
return claimIdResult;
}
};

View file

@ -1,25 +1,24 @@
const chainquery = require('chainquery').default;
const db = require('server/models');
const { getFileListFileByOutpoint } = require('server/lbrynet');
const publishCache = require('server/utils/publishCache');
const logger = require('winston');
const fetchClaimData = async params => {
let { claimId, claimName: name } = params;
if (claimId === 'none') claimId = null;
let { claimId } = params;
logger.debug('fetchClaimData params:', params);
const [cq, local] = await Promise.all([
chainquery.claim.queries.resolveClaim(name, claimId).catch(() => {}),
db.Claim.resolveClaim(name, claimId).catch(() => {}),
]);
// Todo: don't use localdb to get post publish content
if (!cq && !local) {
return null;
if (claimId === 'none') {
claimId = null;
}
if (cq && cq.name === name && !local) {
return cq;
const publishData = publishCache.get(claimId);
const outpoint = publishData && publishData.outpoint;
if (outpoint) {
return getFileListFileByOutpoint(outpoint);
} else {
return chainquery.claim.queries.resolveClaim(claimId).catch(() => null);
}
if (local && local.name === name && !cq) {
return local;
}
return local.updatedAt > cq.modified_at ? local : cq;
};
module.exports = fetchClaimData;

View file

@ -3,31 +3,39 @@ const {
assetDefaults: { thumbnail },
} = require('@config/siteConfig');
const chainquery = require('chainquery').default;
// const { getClaim } = require('server/lbrynet');
const mime = require('mime-types');
const { isBlocked } = require('./blockList');
const publishCache = require('server/utils/publishCache');
const logger = require('winston');
module.exports = async (data, chName = null, chShortId = null) => {
// TODO: Refactor getching the channel name out; requires invasive changes.
const dataVals = data.dataValues ? data.dataValues : data;
const txid = dataVals.transaction_hash_id || dataVals.txid;
let nout;
if (typeof dataVals.vout === 'number') {
nout = dataVals.vout;
let dataFromFileList, dataFromChainquery, outpoint, certificateId;
if (data && data.dataValues) {
dataFromChainquery = data.dataValues;
outpoint = data.generated_outpoint;
certificateId = dataFromChainquery.publisher_id;
} else if (data && data[0] && data[0].outpoint) {
dataFromFileList = data[0];
logger.debug('USE CACHE: claimid:', dataFromFileList.claim_id);
outpoint = dataFromFileList.outpoint;
let publishResult = dataFromFileList.claim_id && publishCache.get(dataFromFileList.claim_id);
logger.debug(`getClaimData: publishResult:`, publishResult);
certificateId = publishResult.certificateId;
} else {
nout = dataVals.nout;
throw new Error(`NO DATA, CLYDE`);
}
const outpoint = `${txid}:${nout}`;
const certificateId = dataVals.publisher_id || dataVals.certificateId;
const fileExt = data.generated_extension || dataVals.fileExt;
let channelShortId = chShortId;
let channelName = chName;
// TODO: Factor blocked out
let blocked;
let blocked = false;
if (isBlocked(outpoint)) {
logger.debug('blocking content');
blocked = true;
}
@ -40,27 +48,51 @@ module.exports = async (data, chName = null, chShortId = null) => {
.getShortClaimIdFromLongClaimId(certificateId, channelName)
.catch(() => null);
}
// Find a solution for the legacy application/octet-stream file extensions
return {
name: dataVals.name,
title: dataVals.title,
certificateId,
channelName,
channelShortId,
contentType: dataVals.content_type || data.contentType,
claimId: dataVals.claim_id || data.claimId,
fileExt: fileExt,
description: dataVals.description,
nsfw: dataVals.is_nsfw,
thumbnail: dataVals.thumbnail_url || data.thumbnail || thumbnail,
outpoint,
host,
pending: Boolean(dataVals.height === 0),
blocked: blocked,
license: dataVals.license,
licenseUrl: dataVals.license_url,
transactionTime: dataVals.transaction_time,
};
if (dataFromFileList && dataFromFileList.outpoint) {
// file_list values due to recent publish
console.log('ClaimName', dataFromFileList.claim_name);
return {
name: dataFromFileList.claim_name,
title: dataFromFileList.metadata.title,
certificateId,
channelName,
channelShortId,
contentType: dataFromFileList.mime_type || dataFromFileList.media_type,
claimId: dataFromFileList.claim_id,
fileExt: mime.extension(dataFromFileList.mime_type),
description: dataFromFileList.metadata.description,
nsfw: dataFromFileList.metadata.nsfw,
thumbnail: dataFromFileList.metadata.thumbnail,
outpoint,
host,
pending: false,
blocked: blocked,
license: dataFromFileList.metadata.license,
licenseUrl: dataFromFileList.metadata.license_url,
transactionTime: 0,
};
} else {
// chainquery result values
console.log('ClaimName', dataFromChainquery.name);
return {
name: dataFromChainquery.name,
title: dataFromChainquery.title,
certificateId,
channelName,
channelShortId,
contentType: dataFromChainquery.content_type,
claimId: dataFromChainquery.claim_id,
fileExt: data.generated_extension,
description: dataFromChainquery.description,
nsfw: dataFromChainquery.is_nsfw,
thumbnail: dataFromChainquery.thumbnail_url,
outpoint,
host,
pending: Boolean(dataFromChainquery.height === 0),
blocked: blocked,
license: dataFromChainquery.license,
licenseUrl: dataFromChainquery.license_url,
transactionTime: dataFromChainquery.transaction_time,
};
}
};

View file

@ -0,0 +1,16 @@
const queryObject = {};
// TODO: replace quick/dirty try catch with better practice
module.exports = originalUrl => {
try {
originalUrl
.split('?')[1]
.split('&')
.map(pair => {
if (pair.includes('=')) {
let parr = pair.split('=');
queryObject[parr[0]] = parr[1];
} else queryObject[pair] = true;
});
return queryObject;
} catch (e) {}
};

View file

@ -0,0 +1,21 @@
const NodeCache = require('node-cache');
const CACHE_TIMEOUT_IN_SECONDS = 10;
const publishCache = new NodeCache({ stdTTL: CACHE_TIMEOUT_IN_SECONDS });
/*
This module is used for tracking recent publishes for
the brief time before they show up on chainquery.
It will be used in Publish, Update, Availability and other situations.
On publish, we'll publishCache.set( 'claimId', publishData.{outpoint}' ).
On view we'll publishCache.get( 'claimId' ) and use the
outPoint to do a sdk file list query.
Entries will be removed when chainquery returns matching value.
_decide whether we're cloning objects or getting references: useClones: true (default)
*/
module.exports = publishCache;