Move channels and claims to chainquery
This commit is contained in:
parent
a25e736ee1
commit
c9fdc9c3cf
49 changed files with 10772 additions and 202 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -5,6 +5,7 @@
|
|||
node_modules
|
||||
|
||||
client/build
|
||||
server/chainquery/build
|
||||
|
||||
client_custom/build
|
||||
client_custom/scss
|
||||
|
@ -22,4 +23,4 @@ public/bundle/bundle.js.map
|
|||
public/bundle/Lekton-*
|
||||
public/bundle/style.css
|
||||
|
||||
uploads
|
||||
uploads
|
||||
|
|
|
@ -7,7 +7,7 @@ import { selectSiteHost } from '../selectors/site';
|
|||
|
||||
function * retrieveFile (action) {
|
||||
const name = action.data.name;
|
||||
const claimId = action.data.claimId;
|
||||
const claimId = action.data.claim_id || action.data.claimId;
|
||||
const host = yield select(selectSiteHost);
|
||||
// see if the file is available
|
||||
let isAvailable;
|
||||
|
|
19
package-lock.json
generated
19
package-lock.json
generated
|
@ -908,6 +908,12 @@
|
|||
"integrity": "sha512-EIjmpvnHj+T4nMcKwHwxZKUfDmphIKJc2qnEMhSoOvr1lYEQpuRKRz8orWr//krYIIArS/KGGLfL2YGVUYXmIA==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/estree": {
|
||||
"version": "0.0.39",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.39.tgz",
|
||||
"integrity": "sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/geojson": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-1.0.6.tgz",
|
||||
|
@ -12030,6 +12036,16 @@
|
|||
"inherits": "2.0.3"
|
||||
}
|
||||
},
|
||||
"rollup": {
|
||||
"version": "0.66.2",
|
||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-0.66.2.tgz",
|
||||
"integrity": "sha512-+rOLjWO170M3Y2jyyGU4ZJuTu1T1KuKNyH+RszHRzQdsuI5TulRbkSM4vlaMnwcxHm4XdgBNZ1mmNzhQIImbiQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/estree": "0.0.39",
|
||||
"@types/node": "10.9.4"
|
||||
}
|
||||
},
|
||||
"run-async": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz",
|
||||
|
@ -14152,6 +14168,9 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"winston-slack-webhook": {
|
||||
"version": "github:billbitt/winston-slack-webhook#ce99792bdf6473a9da69c91772eb4ecec4979b6c"
|
||||
},
|
||||
"wkx": {
|
||||
"version": "0.4.5",
|
||||
"resolved": "https://registry.npmjs.org/wkx/-/wkx-0.4.5.tgz",
|
||||
|
|
|
@ -15,8 +15,10 @@
|
|||
"prestart": "builder run bundle",
|
||||
"start": "node server.js",
|
||||
"start:build": "builder run start",
|
||||
"chainquery:build": "rollup ./server/chainquery/index.js --file ./server/chainquery/bundle.js --format cjs",
|
||||
"devtools:server": "ndb server.js",
|
||||
"devtools:chainquery": "ndb ./server/chainquery/index.debug.js",
|
||||
"devtools:chainquery": "npm run devtools:chainquery:build && ndb ./server/chainquery/bundle.debug.js",
|
||||
"devtools:chainquery:build": "rollup ./server/chainquery/index.debug.js --file ./server/chainquery/bundle.debug.js --format cjs",
|
||||
"test": "mocha --recursive",
|
||||
"test:no-lbc": "npm test -- --grep @usesLbc --invert",
|
||||
"test:server": "mocha --recursive './server/**/*.test.js'",
|
||||
|
@ -115,6 +117,7 @@
|
|||
"nodemon": "^1.17.5",
|
||||
"redux-devtools": "^3.4.1",
|
||||
"regenerator-transform": "^0.13.0",
|
||||
"rollup": "^0.66.2",
|
||||
"sass-loader": "^7.1.0",
|
||||
"sequelize-cli": "^4.0.0",
|
||||
"style-loader": "^0.21.0",
|
||||
|
|
1079
server/chainquery/bundle.debug.js
Normal file
1079
server/chainquery/bundle.debug.js
Normal file
File diff suppressed because it is too large
Load diff
1075
server/chainquery/bundle.js
Normal file
1075
server/chainquery/bundle.js
Normal file
File diff suppressed because it is too large
Load diff
|
@ -1,9 +1,6 @@
|
|||
console.log('Loading `chainquery`, please wait...')
|
||||
|
||||
require('@babel/polyfill');
|
||||
require('@babel/register');
|
||||
|
||||
const chainquery = require('./');
|
||||
import chainquery from './index'
|
||||
|
||||
global.chainquery = chainquery.default ? chainquery.default : chainquery;
|
||||
|
||||
|
|
|
@ -3,17 +3,63 @@ const logger = require('winston');
|
|||
|
||||
import abnormalClaimTable from './tables/abnormalClaimTable';
|
||||
import addressTable from './tables/addressTable';
|
||||
import applicationStatusTable from './tables/applicationStatusTable';
|
||||
import blockTable from './tables/blockTable';
|
||||
import claimTable from './tables/claimTable';
|
||||
import gorpMigrationsTable from './tables/gorpMigrationsTable';
|
||||
import inputTable from './tables/inputTable';
|
||||
import jobStatusTable from './tables/jobStatusTable';
|
||||
import outputTable from './tables/outputTable';
|
||||
import supportTable from './tables/supportTable';
|
||||
import transactionAddressTable from './tables/transactionAddressTable';
|
||||
import transactionTable from './tables/transactionTable';
|
||||
|
||||
import abnormalClaimQueries from './queries/abnormalClaimQueries';
|
||||
import addressQueries from './queries/addressQueries';
|
||||
import blockQueries from './queries/blockQueries';
|
||||
import claimQueries from './queries/claimQueries';
|
||||
import inputQueries from './queries/inputQueries';
|
||||
import outputQueries from './queries/outputQueries';
|
||||
import supportQueries from './queries/supportQueries';
|
||||
import transactionAddressQueries from './queries/transactionAddressQueries';
|
||||
import transactionQueries from './queries/transactionQueries';
|
||||
|
||||
const DATABASE_STRUCTURE = {
|
||||
'abnormal_claim': {
|
||||
table: abnormalClaimTable,
|
||||
queries: abnormalClaimQueries,
|
||||
},
|
||||
'address': {
|
||||
table: addressTable,
|
||||
queries: addressQueries,
|
||||
},
|
||||
'block': {
|
||||
table: blockTable,
|
||||
queries: blockQueries,
|
||||
},
|
||||
'claim': {
|
||||
table: claimTable,
|
||||
queries: claimQueries,
|
||||
},
|
||||
'input': {
|
||||
table: inputTable,
|
||||
queries: inputQueries,
|
||||
},
|
||||
'output': {
|
||||
table: outputTable,
|
||||
queries: outputQueries,
|
||||
},
|
||||
'support': {
|
||||
table: supportTable,
|
||||
queries: supportQueries,
|
||||
},
|
||||
'transaction_address': {
|
||||
table: transactionAddressTable,
|
||||
queries: transactionAddressQueries,
|
||||
},
|
||||
'transaction': {
|
||||
table: transactionTable,
|
||||
queries: transactionQueries,
|
||||
},
|
||||
};
|
||||
|
||||
const {
|
||||
host,
|
||||
port,
|
||||
|
@ -45,22 +91,19 @@ const sequelize = new Sequelize(database, username, password, {
|
|||
});
|
||||
|
||||
const db = {};
|
||||
db.abnormal_claim = sequelize.import('abnormal_claim', abnormalClaimTable.createModel);
|
||||
db.application_status = sequelize.import('application_status', applicationStatusTable.createModel);
|
||||
db.address = sequelize.import('address', addressTable.createModel);
|
||||
db.block = sequelize.import('block', blockTable.createModel);
|
||||
db.claim = sequelize.import('claim', claimTable.createModel);
|
||||
db.gorp_migrations = sequelize.import('gorp_migrations', gorpMigrationsTable.createModel);
|
||||
db.input = sequelize.import('input', inputTable.createModel);
|
||||
db.job_status = sequelize.import('job_status', jobStatusTable.createModel);
|
||||
db.output = sequelize.import('output', outputTable.createModel);
|
||||
db.support = sequelize.import('support', supportTable.createModel);
|
||||
db.transaction_address = sequelize.import('transaction_address', transactionAddressTable.createModel);
|
||||
db.transaction = sequelize.import('transaction', transactionTable.createModel);
|
||||
const DATABASE_STRUCTURE_KEYS = Object.keys(DATABASE_STRUCTURE);
|
||||
|
||||
for(let i = 0; i < DATABASE_STRUCTURE_KEYS.length; i++) {
|
||||
let dbKey = DATABASE_STRUCTURE_KEYS[i];
|
||||
let currentData = DATABASE_STRUCTURE[dbKey];
|
||||
|
||||
db[dbKey] = currentData.table.createModel(sequelize, Sequelize);
|
||||
db[dbKey].queries = currentData.queries(db, db[dbKey]);
|
||||
}
|
||||
|
||||
// run model.association for each model in the db object that has an association
|
||||
logger.info('associating chainquery db models...');
|
||||
Object.keys(db).forEach(modelName => {
|
||||
DATABASE_STRUCTURE_KEYS.forEach(modelName => {
|
||||
if (db[modelName].associate) {
|
||||
logger.info('Associating chainquery model:', modelName);
|
||||
db[modelName].associate(db);
|
||||
|
|
|
@ -59,7 +59,7 @@ export default (sequelize, {
|
|||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
//getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
||||
|
|
|
@ -31,7 +31,7 @@ export default (sequelize, {
|
|||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
//getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
const getterMethods = {
|
||||
// Add as needed, prefix all methods with `generated`
|
||||
}
|
||||
|
||||
export default (sequelize, {
|
||||
STRING, BOOLEAN, INTEGER, TEXT, DECIMAL
|
||||
}) => sequelize.define(
|
||||
'application_status',
|
||||
{
|
||||
id: {
|
||||
primaryKey: true,
|
||||
type: INTEGER,
|
||||
set() { },
|
||||
},
|
||||
app_version: {
|
||||
type: INTEGER,
|
||||
set() { },
|
||||
},
|
||||
data_version: {
|
||||
type: INTEGER,
|
||||
set() { },
|
||||
},
|
||||
api_version: {
|
||||
type: INTEGER,
|
||||
set() { },
|
||||
},
|
||||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
|
@ -91,7 +91,7 @@ export default (sequelize, {
|
|||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
//getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
||||
|
|
|
@ -3,7 +3,7 @@ const logger = require('winston');
|
|||
const {
|
||||
assetDefaults: { thumbnail: defaultThumbnail },
|
||||
details: { host }
|
||||
} = require('../../../config/siteConfig'); // TODO: Change back to '@config/siteConfig' when done testing
|
||||
} = require('../../config/siteConfig'); // TODO: Fix paths for rollup
|
||||
|
||||
const getterMethods = {
|
||||
generated_extension() {
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
const getterMethods = {
|
||||
// Add as needed, prefix all methods with `generated`
|
||||
}
|
||||
|
||||
export default (sequelize, {
|
||||
STRING, BOOLEAN, INTEGER, TEXT, DECIMAL
|
||||
}) => sequelize.define(
|
||||
'gorp_migrations',
|
||||
{
|
||||
id: {
|
||||
primaryKey: true,
|
||||
type: INTEGER,
|
||||
set() { },
|
||||
},
|
||||
applied_at: {
|
||||
type: INTEGER,
|
||||
set() { },
|
||||
},
|
||||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
|
@ -71,7 +71,7 @@ export default (sequelize, {
|
|||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
//getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
const getterMethods = {
|
||||
// Add as needed, prefix all methods with `generated`
|
||||
}
|
||||
|
||||
export default (sequelize, {
|
||||
STRING, BOOLEAN, INTEGER, TEXT, DECIMAL
|
||||
}) => sequelize.define(
|
||||
'job_status',
|
||||
{
|
||||
job_name: {
|
||||
primaryKey: true,
|
||||
type: STRING,
|
||||
set() { },
|
||||
},
|
||||
last_sync: {
|
||||
type: INTEGER,
|
||||
set() { },
|
||||
},
|
||||
is_success: {
|
||||
type: INTEGER,
|
||||
set() { },
|
||||
},
|
||||
error_message: {
|
||||
type: TEXT,
|
||||
set() { },
|
||||
}
|
||||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
|
@ -71,7 +71,7 @@ export default (sequelize, {
|
|||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
//getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
||||
|
|
|
@ -43,7 +43,7 @@ export default (sequelize, {
|
|||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
//getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
||||
|
|
|
@ -12,7 +12,7 @@ export default (sequelize, {
|
|||
type: INTEGER,
|
||||
set() { },
|
||||
},
|
||||
addess_id: {
|
||||
address_id: {
|
||||
primaryKey: true,
|
||||
type: INTEGER,
|
||||
set() { },
|
||||
|
@ -28,7 +28,7 @@ export default (sequelize, {
|
|||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
//getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
||||
|
|
|
@ -67,7 +67,7 @@ export default (sequelize, {
|
|||
},
|
||||
{
|
||||
freezeTableName: true,
|
||||
getterMethods,
|
||||
//getterMethods,
|
||||
timestamps: false, // don't use default timestamps columns
|
||||
}
|
||||
);
|
||||
|
|
3
server/chainquery/queries/abnormalClaimQueries.js
Normal file
3
server/chainquery/queries/abnormalClaimQueries.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
export default (db, table) => ({
|
||||
example: () => table.findAll(),
|
||||
})
|
3
server/chainquery/queries/addressQueries.js
Normal file
3
server/chainquery/queries/addressQueries.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
export default (db, table) => ({
|
||||
example: () => table.findAll(),
|
||||
})
|
3
server/chainquery/queries/blockQueries.js
Normal file
3
server/chainquery/queries/blockQueries.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
export default (db, table) => ({
|
||||
example: () => table.findAll(),
|
||||
})
|
184
server/chainquery/queries/claimQueries.js
Normal file
184
server/chainquery/queries/claimQueries.js
Normal file
|
@ -0,0 +1,184 @@
|
|||
const logger = require('winston');
|
||||
|
||||
const returnShortId = (claimsArray, longId) => {
|
||||
let claimIndex;
|
||||
let shortId = longId.substring(0, 1); // default short id is the first letter
|
||||
let shortIdLength = 0;
|
||||
// find the index of this claim id
|
||||
claimIndex = claimsArray.findIndex(element => {
|
||||
return element.claim_id === longId;
|
||||
});
|
||||
if (claimIndex < 0) {
|
||||
throw new Error('claim id not found in claims list');
|
||||
}
|
||||
// get an array of all claims with lower height
|
||||
let possibleMatches = claimsArray.slice(0, claimIndex);
|
||||
// remove certificates with the same prefixes until none are left.
|
||||
while (possibleMatches.length > 0) {
|
||||
shortIdLength += 1;
|
||||
shortId = longId.substring(0, shortIdLength);
|
||||
possibleMatches = possibleMatches.filter(element => {
|
||||
return (element.claim_id && (element.claim_id.substring(0, shortIdLength) === shortId));
|
||||
});
|
||||
}
|
||||
return shortId;
|
||||
};
|
||||
|
||||
const isLongClaimId = (claimId) => {
|
||||
return (claimId && (claimId.length === 40));
|
||||
}
|
||||
|
||||
const isShortClaimId = (claimId) => {
|
||||
return (claimId && (claimId.length < 40));
|
||||
}
|
||||
|
||||
export default (db, table) => ({
|
||||
|
||||
getShortClaimIdFromLongClaimId: async (claimId, claimName) => {
|
||||
logger.debug(`claim.getShortClaimIdFromLongClaimId for ${claimName}#${claimId}`);
|
||||
return await table.findAll({
|
||||
where: { name: claimName },
|
||||
order: [['height', 'ASC']],
|
||||
}).then(result => {
|
||||
if(result.length === 0) {
|
||||
throw new Error('No claim(s) found with that claim name');
|
||||
}
|
||||
|
||||
return returnShortId(result, claimId);
|
||||
});
|
||||
},
|
||||
|
||||
getAllChannelClaims: async (channelClaimId) => {
|
||||
logger.debug(`claim.getAllChannelClaims for ${channelClaimId}`);
|
||||
return await table.findAll({
|
||||
where: { publisher_id: channelClaimId },
|
||||
order: [['height', 'DESC']],
|
||||
raw : true, // returns an array of only data, not an array of instances
|
||||
})
|
||||
.then(channelClaimsArray => {
|
||||
if(channelClaimsArray.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return channelClaimsArray;
|
||||
})
|
||||
},
|
||||
|
||||
getClaimIdByLongChannelId: async (channelClaimId, claimName) => {
|
||||
logger.debug(`finding claim id for claim ${claimName} from channel ${channelClaimId}`);
|
||||
return await table.findAll({
|
||||
where: { name: claimName, publisher_id: channelClaimId },
|
||||
order: [['id', 'ASC']],
|
||||
})
|
||||
.then(result => {
|
||||
switch (result.length) {
|
||||
case 0:
|
||||
return null;
|
||||
case 1:
|
||||
return result[0].claim_id;
|
||||
default:
|
||||
// Does this actually happen??? (from converted code)
|
||||
logger.warn(`${result.length} records found for "${claimName}" in channel "${channelClaimId}"`);
|
||||
return result[0].claim_id;
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
validateLongClaimId: async (name, claimId) => {
|
||||
return await table.findOne({
|
||||
where: {
|
||||
name,
|
||||
claim_id: claimId,
|
||||
},
|
||||
}).then(result => {
|
||||
if (!result) {
|
||||
return false;
|
||||
}
|
||||
return claimId;
|
||||
});
|
||||
},
|
||||
|
||||
getLongClaimIdFromShortClaimId: async (name, shortId) => {
|
||||
return await table.findAll({
|
||||
where: {
|
||||
name,
|
||||
claim_id: {
|
||||
[sequelize.Op.like]: `${shortId}%`,
|
||||
}},
|
||||
order: [['height', 'ASC']],
|
||||
})
|
||||
.then(result => {
|
||||
if(result.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return result[0].claim_id;
|
||||
});
|
||||
},
|
||||
|
||||
getTopFreeClaimIdByClaimName: async (name) => {
|
||||
return await table.findAll({
|
||||
// TODO: Limit 1
|
||||
where: { name },
|
||||
order: [['effective_amount', 'DESC'], ['height', 'ASC']],
|
||||
}).then(result => {
|
||||
if(result.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return result[0].claim_id;
|
||||
})
|
||||
},
|
||||
|
||||
getLongClaimId: async (claimName, claimId) => {
|
||||
// TODO: Add failure case
|
||||
logger.debug(`getLongClaimId(${claimName}, ${claimId})`);
|
||||
if (isLongClaimId(claimId)) {
|
||||
return table.queries.validateLongClaimId(claimName, claimId);
|
||||
} else if (isShortClaimId(claimId)) {
|
||||
return table.queries.getLongClaimIdFromShortClaimId(claimName, claimId);
|
||||
} else {
|
||||
return table.queries.getTopFreeClaimIdByClaimName(claimName);
|
||||
}
|
||||
},
|
||||
|
||||
resolveClaim: async (name, claimId) => {
|
||||
logger.debug(`Claim.resolveClaim: ${name} ${claimId}`);
|
||||
return table.findAll({
|
||||
where: { name, claim_id: claimId },
|
||||
}).then(claimArray => {
|
||||
if(claimArray.length === 0) {
|
||||
return null;
|
||||
} else if(claimArray.length !== 1) {
|
||||
logger.warn(`more than one record matches ${name}#${claimId} in db.Claim`);
|
||||
}
|
||||
|
||||
return claimArray[0];
|
||||
});
|
||||
},
|
||||
|
||||
getOutpoint: async (name, claimId) => {
|
||||
logger.debug(`finding outpoint for ${name}#${claimId}`);
|
||||
|
||||
return await table.findAll({
|
||||
where : { name, claim_id: claimId },
|
||||
attributes: ['transaction_hash_id'],
|
||||
}).then(result => {
|
||||
if(result.length === 0) {
|
||||
throw new Error(`no record found for ${name}#${claimId}`);
|
||||
} else if(result.length !== 1) {
|
||||
logger.warn(`more than one record matches ${name}#${claimId} in db.Claim`);
|
||||
}
|
||||
|
||||
return result[0].transaction_hash_id;
|
||||
});
|
||||
},
|
||||
|
||||
getCurrentHeight: async () => {
|
||||
return await table
|
||||
.max('height')
|
||||
.then(result => {
|
||||
return (result || 100000);
|
||||
});
|
||||
},
|
||||
|
||||
})
|
3
server/chainquery/queries/inputQueries.js
Normal file
3
server/chainquery/queries/inputQueries.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
export default (db, table) => ({
|
||||
example: () => table.findAll(),
|
||||
})
|
3
server/chainquery/queries/outputQueries.js
Normal file
3
server/chainquery/queries/outputQueries.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
export default (db, table) => ({
|
||||
example: () => table.findAll(),
|
||||
})
|
3
server/chainquery/queries/supportQueries.js
Normal file
3
server/chainquery/queries/supportQueries.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
export default (db, table) => ({
|
||||
example: () => table.findAll(),
|
||||
})
|
3
server/chainquery/queries/transactionAddressQueries.js
Normal file
3
server/chainquery/queries/transactionAddressQueries.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
export default (db, table) => ({
|
||||
example: () => table.findAll(),
|
||||
})
|
3
server/chainquery/queries/transactionQueries.js
Normal file
3
server/chainquery/queries/transactionQueries.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
export default (db, table) => ({
|
||||
example: () => table.findAll(),
|
||||
})
|
|
@ -7,5 +7,5 @@ export default {
|
|||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -7,5 +7,5 @@ export default {
|
|||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
import ApplicationStatusModel from '../models/ApplicationStatusModel';
|
||||
|
||||
export default {
|
||||
createModel(...args) {
|
||||
return ApplicationStatusModel(...args);
|
||||
},
|
||||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
}
|
|
@ -7,5 +7,5 @@ export default {
|
|||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -7,5 +7,5 @@ export default {
|
|||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
import GorpMigrationsModel from '../models/GorpMigrationsModel';
|
||||
|
||||
export default {
|
||||
createModel(...args) {
|
||||
return GorpMigrationsModel(...args);
|
||||
},
|
||||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
}
|
|
@ -7,5 +7,5 @@ export default {
|
|||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
import JobStatusModel from '../models/JobStatusModel';
|
||||
|
||||
export default {
|
||||
createModel(...args) {
|
||||
return JobStatusModel(...args);
|
||||
},
|
||||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
}
|
|
@ -7,5 +7,5 @@ export default {
|
|||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -7,5 +7,5 @@ export default {
|
|||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -7,5 +7,5 @@ export default {
|
|||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -7,5 +7,5 @@ export default {
|
|||
|
||||
associate(db) {
|
||||
// associate
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,28 +1,15 @@
|
|||
const db = require('../../../../models');
|
||||
const chainquery = require('chainquery');
|
||||
const getClaimData = require('server/utils/getClaimData');
|
||||
const { returnPaginatedChannelClaims } = require('./channelPagination.js');
|
||||
|
||||
const getChannelClaims = (channelName, channelClaimId, page) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let longChannelClaimId;
|
||||
// 1. get the long channel Id (make sure channel exists)
|
||||
db.Certificate
|
||||
.getLongChannelId(channelName, channelClaimId)
|
||||
.then(result => {
|
||||
longChannelClaimId = result;
|
||||
return db
|
||||
.Claim
|
||||
.getAllChannelClaims(longChannelClaimId);
|
||||
})
|
||||
.then(channelClaimsArray => {
|
||||
// 3. format the data for the view, including pagination
|
||||
let paginatedChannelViewData = returnPaginatedChannelClaims(channelName, longChannelClaimId, channelClaimsArray, page);
|
||||
// 4. return all the channel information and contents
|
||||
resolve(paginatedChannelViewData);
|
||||
})
|
||||
.catch(error => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
const getChannelClaims = async (channelName, channelShortId, page) => {
|
||||
const channelId = await chainquery.claim.queries.getLongClaimId(channelName, channelShortId);
|
||||
const channelClaims = await chainquery.claim.queries.getAllChannelClaims(channelId);
|
||||
|
||||
const processedChannelClaims = channelClaims.map((claim) => getClaimData(claim));
|
||||
|
||||
return returnPaginatedChannelClaims(channelName, channelId, processedChannelClaims, page);
|
||||
};
|
||||
|
||||
module.exports = getChannelClaims;
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
const db = require('../../../../models');
|
||||
const chainquery = require('chainquery');
|
||||
const { publishing: { primaryClaimAddress, additionalClaimAddresses } } = require('@config/siteConfig');
|
||||
const Sequelize = require('sequelize');
|
||||
const Op = Sequelize.Op;
|
||||
|
||||
const claimAvailability = (name) => {
|
||||
const claimAvailability = async (name) => {
|
||||
const claimAddresses = additionalClaimAddresses || [];
|
||||
claimAddresses.push(primaryClaimAddress);
|
||||
// find any records where the name is used
|
||||
return db.Claim
|
||||
return await chainquery.claim
|
||||
.findAll({
|
||||
attributes: ['address'],
|
||||
attributes: ['claim_address'],
|
||||
where : {
|
||||
name,
|
||||
address: {
|
||||
claim_address: {
|
||||
[Op.or]: claimAddresses,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
|
||||
const getClaimData = require('server/utils/getClaimData');
|
||||
const chainquery = require('chainquery');
|
||||
const db = require('../../../../models');
|
||||
|
||||
/*
|
||||
|
@ -11,7 +13,14 @@ const claimData = ({ ip, originalUrl, body, params }, res) => {
|
|||
const claimName = params.claimName;
|
||||
let claimId = params.claimId;
|
||||
if (claimId === 'none') claimId = null;
|
||||
db.Claim.resolveClaim(claimName, claimId)
|
||||
chainquery.claim.queries.resolveClaim(claimName, claimId)
|
||||
.then(claimInfo => {
|
||||
if (!claimInfo) {
|
||||
// Not found remote, try local
|
||||
return db.Claim.resolveClaim(claimName, claimId)
|
||||
}
|
||||
return claimInfo
|
||||
})
|
||||
.then(claimInfo => {
|
||||
if (!claimInfo) {
|
||||
return res.status(404).json({
|
||||
|
@ -21,7 +30,7 @@ const claimData = ({ ip, originalUrl, body, params }, res) => {
|
|||
}
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
data : claimInfo,
|
||||
data : getClaimData(claimInfo),
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
const { getClaim } = require('../../../../lbrynet');
|
||||
const { createFileRecordDataAfterGet } = require('../../../../models/utils/createFileRecordData.js');
|
||||
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
|
||||
const getClaimData = require('server/utils/getClaimData');
|
||||
const chainquery = require('chainquery');
|
||||
const db = require('../../../../models');
|
||||
|
||||
/*
|
||||
|
@ -15,21 +17,37 @@ const claimGet = ({ ip, originalUrl, params }, res) => {
|
|||
let resolveResult;
|
||||
let getResult;
|
||||
|
||||
db.Claim.resolveClaim(name, claimId)
|
||||
|
||||
|
||||
chainquery.claim.queries.resolveClaim(name, claimId)
|
||||
.then(result => {
|
||||
if (!result) {
|
||||
// could not find remote, return false to try local
|
||||
return false;
|
||||
}
|
||||
return resolveResult = result;
|
||||
})
|
||||
.then(result => {
|
||||
if (result === false) {
|
||||
// Could not find remote, try local
|
||||
return db.Claim.resolveClaim(name, claimId);
|
||||
}
|
||||
return result;
|
||||
})
|
||||
.then(result => {
|
||||
if (!result) {
|
||||
throw new Error('No matching uri found in Claim table');
|
||||
}
|
||||
resolveResult = result;
|
||||
return getClaim(`${name}#${claimId}`);
|
||||
return resolveResult = result;
|
||||
})
|
||||
.then(result => getClaim(`${name}#${claimId}`))
|
||||
.then(result => {
|
||||
if (!result) {
|
||||
throw new Error(`Unable to Get ${name}#${claimId}`);
|
||||
}
|
||||
getResult = result;
|
||||
if (result.completed) {
|
||||
return createFileRecordDataAfterGet(resolveResult, getResult)
|
||||
return createFileRecordDataAfterGet(getClaimData(resolveResult), getResult)
|
||||
.then(fileData => {
|
||||
const upsertCriteria = {name, claimId};
|
||||
return db.upsert(db.File, fileData, upsertCriteria, 'File');
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
const db = require('../../../../models');
|
||||
const chainquery = require('chainquery');
|
||||
|
||||
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
|
||||
|
||||
|
@ -22,7 +23,13 @@ const claimLongId = ({ ip, originalUrl, body, params }, res) => {
|
|||
getClaimId(channelName, channelClaimId, claimName, claimId)
|
||||
.then(fullClaimId => {
|
||||
claimId = fullClaimId;
|
||||
return db.Claim.getOutpoint(claimName, fullClaimId);
|
||||
return chainquery.claim.queries.getOutpoint(claimName, fullClaimId);
|
||||
})
|
||||
.then(outpointResult => {
|
||||
if (!outpointResult) {
|
||||
return db.Claim.getOutpoint(claimName, fullClaimId);
|
||||
}
|
||||
return outpointResult;
|
||||
})
|
||||
.then(outpoint => {
|
||||
return db.Blocked.isNotBlocked(outpoint);
|
||||
|
|
13
server/utils/getClaimData.js
Normal file
13
server/utils/getClaimData.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
const { details: { host } } = require('@config/siteConfig');
|
||||
|
||||
module.exports = (data) => ({
|
||||
name: data.name,
|
||||
title: data.title,
|
||||
contentType: data.content_type || data.contentType,
|
||||
claimId: data.claim_id || data.claimId,
|
||||
fileExt: data.generated_extension || data.fileExt,
|
||||
description: data.description,
|
||||
thumbnail: data.generated_thumbnail || data.thumbnail,
|
||||
outpoint: data.transaction_hash_id || data.outpoint,
|
||||
host,
|
||||
})
|
|
@ -36,9 +36,13 @@ const addAlliasesForSCSS = (aliasObject) => { // scss
|
|||
|
||||
module.exports = () => {
|
||||
let moduleAliases = {};
|
||||
|
||||
moduleAliases['chainquery'] = resolve('./server/chainquery/bundle');
|
||||
moduleAliases['server'] = resolve('./server');
|
||||
|
||||
// aliases for configs
|
||||
moduleAliases['@config'] = resolve(`config`);
|
||||
moduleAliases['@devConfig'] = resolve(`devConfig`);
|
||||
moduleAliases['@config'] = resolve('config');
|
||||
moduleAliases['@devConfig'] = resolve('devConfig');
|
||||
|
||||
// create specific aliases for locally defined components in the following folders
|
||||
moduleAliases = addAliasesForCustomComponentFolder('containers', moduleAliases);
|
||||
|
|
Loading…
Reference in a new issue