Add chainquery dependencies for Spee.ch, does not include migrations #593
6 changed files with 76 additions and 35 deletions
|
@ -835,6 +835,21 @@ const isShortClaimId = (claimId) => {
|
|||
|
||||
var claimQueries = (db, table, sequelize) => ({
|
||||
|
||||
getClaimChannelName: async (publisher_id) => {
|
||||
return await table.findAll({
|
||||
where : { claim_id: publisher_id },
|
||||
attributes: ['name'],
|
||||
}).then(result => {
|
||||
if(result.length === 0) {
|
||||
throw new Error(`no record found for ${claimId}`);
|
||||
} else if(result.length !== 1) {
|
||||
logger$1.warn(`more than one record matches ${claimId} in db.Claim`);
|
||||
}
|
||||
|
||||
return result[0].name;
|
||||
});
|
||||
},
|
||||
|
||||
getShortClaimIdFromLongClaimId: async (claimId, claimName) => {
|
||||
logger$1.debug(`claim.getShortClaimIdFromLongClaimId for ${claimName}#${claimId}`);
|
||||
return await table.findAll({
|
||||
|
|
|
@ -34,6 +34,21 @@ const isShortClaimId = (claimId) => {
|
|||
|
||||
export default (db, table, sequelize) => ({
|
||||
|
||||
getClaimChannelName: async (publisher_id) => {
|
||||
return await table.findAll({
|
||||
where : { claim_id: publisher_id },
|
||||
attributes: ['name'],
|
||||
}).then(result => {
|
||||
if(result.length === 0) {
|
||||
throw new Error(`no record found for ${claimId}`);
|
||||
} else if(result.length !== 1) {
|
||||
logger.warn(`more than one record matches ${claimId} in db.Claim`);
|
||||
}
|
||||
|
||||
return result[0].name;
|
||||
});
|
||||
},
|
||||
|
||||
getShortClaimIdFromLongClaimId: async (claimId, claimName) => {
|
||||
logger.debug(`claim.getShortClaimIdFromLongClaimId for ${claimName}#${claimId}`);
|
||||
return await table.findAll({
|
||||
|
|
|
@ -7,7 +7,7 @@ const getChannelClaims = async (channelName, channelShortId, page) => {
|
|||
const channelId = await chainquery.claim.queries.getLongClaimId(channelName, channelShortId);
|
||||
const channelClaims = await chainquery.claim.queries.getAllChannelClaims(channelId);
|
||||
|
||||
const processedChannelClaims = channelClaims.map((claim) => getClaimData(claim));
|
||||
const processedChannelClaims = await channelClaims.map((claim) => getClaimData(claim));
|
||||
|
||||
return returnPaginatedChannelClaims(channelName, channelId, processedChannelClaims, page);
|
||||
};
|
||||
|
|
|
@ -9,34 +9,32 @@ const db = require('../../../../models');
|
|||
|
||||
*/
|
||||
|
||||
const claimData = ({ ip, originalUrl, body, params }, res) => {
|
||||
const claimData = async ({ ip, originalUrl, body, params }, res) => {
|
||||
It's also, unfortunately, required since we don't wait for It's also, unfortunately, required since we don't wait for `chainquery` to have the claim before rendering the content page. We have a number of nuances that require the fallback and prevent us from fully dropping tables.
|
||||
const claimName = params.claimName;
|
||||
let claimId = params.claimId;
|
||||
if (claimId === 'none') claimId = null;
|
||||
chainquery.claim.queries.resolveClaim(claimName, claimId).catch(() => {})
|
||||
.then(claimInfo => {
|
||||
if (!claimInfo) {
|
||||
// Not found remote, try local
|
||||
return db.Claim.resolveClaim(claimName, claimId)
|
||||
}
|
||||
return claimInfo
|
||||
})
|
||||
.then(claimInfo => {
|
||||
if (!claimInfo) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'No claim could be found',
|
||||
});
|
||||
}
|
||||
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
data : getClaimData(claimInfo),
|
||||
try {
|
||||
let resolvedClaim = await chainquery.claim.queries.resolveClaim(claimName, claimId).catch(() => {});
|
||||
|
||||
if(!resolvedClaim) {
|
||||
resolvedClaim = await db.Claim.resolveClaim(claimName, claimId);
|
||||
}
|
||||
|
||||
if (!resolvedClaim) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'No claim could be found',
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
handleErrorResponse(originalUrl, ip, error, res);
|
||||
}
|
||||
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
data : await getClaimData(resolvedClaim),
|
||||
});
|
||||
} catch(error) {
|
||||
handleErrorResponse(originalUrl, ip, error, res);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = claimData;
|
||||
|
|
|
@ -31,7 +31,7 @@ const claimGet = async ({ ip, originalUrl, params }, res) => {
|
|||
throw new Error(`Unable to Get ${name}#${claimId}`);
|
||||
}
|
||||
|
||||
let fileData = await createFileRecordDataAfterGet(getClaimData(claimData), lbrynetResult);
|
||||
let fileData = await createFileRecordDataAfterGet(await getClaimData(claimData), lbrynetResult);
|
||||
const upsertCriteria = { name, claimId };
|
||||
await db.upsert(db.File, fileData, upsertCriteria, 'File');
|
||||
|
||||
|
|
|
@ -1,13 +1,26 @@
|
|||
const { details: { host } } = require('@config/siteConfig');
|
||||
const chainquery = require('chainquery');
|
||||
|
||||
module.exports = (data) => ({
|
||||
name: data.name,
|
||||
title: data.title,
|
||||
contentType: data.content_type || data.contentType,
|
||||
claimId: data.claim_id || data.claimId,
|
||||
fileExt: data.generated_extension || data.fileExt,
|
||||
description: data.description,
|
||||
thumbnail: data.generated_thumbnail || data.thumbnail,
|
||||
outpoint: data.transaction_hash_id || data.outpoint,
|
||||
host,
|
||||
})
|
||||
module.exports = async (data) => {
|
||||
// TODO: Refactor getching the channel name out; requires invasive changes.
|
||||
const certificateId = data.publisher_id || data.certificateId;
|
||||
let channelName = data.channelName;
|
||||
|
||||
if(certificateId && !channelName) {
|
||||
channelName = await chainquery.claim.queries.getClaimChannelName(certificateId).catch(()=>{});
|
||||
}
|
||||
|
||||
return ({
|
||||
name: data.name,
|
||||
title: data.title,
|
||||
certificateId,
|
||||
channelName,
|
||||
contentType: data.content_type || data.contentType,
|
||||
claimId: data.claim_id || data.claimId,
|
||||
fileExt: data.generated_extension || data.fileExt,
|
||||
description: data.description,
|
||||
thumbnail: data.generated_thumbnail || data.thumbnail,
|
||||
outpoint: data.transaction_hash_id || data.outpoint,
|
||||
host,
|
||||
})
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue
I like the backup for now. Good for resilience for intial cutover.