Merge pull request #158 from lbryio/chainquery-db-connection
use DB connector instead of web API
This commit is contained in:
commit
34df58e967
9 changed files with 130 additions and 125 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -9,3 +9,4 @@ npm-debug.log
|
||||||
claimTrieCache.json
|
claimTrieCache.json
|
||||||
syncState.json
|
syncState.json
|
||||||
yarn-error.log
|
yarn-error.log
|
||||||
|
chainquery-config.json
|
||||||
|
|
6
chainquery-config.json.example
Normal file
6
chainquery-config.json.example
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
{
|
||||||
|
"host": "chainquery.lbry.com",
|
||||||
|
"user": "lighthouse",
|
||||||
|
"password": "",
|
||||||
|
"db": "chainquery"
|
||||||
|
}
|
|
@ -1,67 +0,0 @@
|
||||||
#!/usr/bin/python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import json, os
|
|
||||||
from bitcoinrpc.authproxy import AuthServiceProxy
|
|
||||||
from lbryschema.decode import smart_decode
|
|
||||||
from flask import Flask, url_for
|
|
||||||
app = Flask(__name__)
|
|
||||||
|
|
||||||
def get_lbrycrdd_connection_details(wallet_conf):
|
|
||||||
settings = {"username": "lbry",
|
|
||||||
"password": "lbry",
|
|
||||||
"rpc_port": 9245}
|
|
||||||
if wallet_conf and os.path.exists(wallet_conf):
|
|
||||||
with open(wallet_conf, "r") as conf:
|
|
||||||
conf_lines = conf.readlines()
|
|
||||||
for l in conf_lines:
|
|
||||||
if l.startswith("rpcuser="):
|
|
||||||
settings["username"] = l[8:].rstrip('\n')
|
|
||||||
if l.startswith("rpcpassword="):
|
|
||||||
settings["password"] = l[12:].rstrip('\n')
|
|
||||||
if l.startswith("rpcport="):
|
|
||||||
settings["rpc_port"] = int(l[8:].rstrip('\n'))
|
|
||||||
rpc_user = settings["username"]
|
|
||||||
rpc_pass = settings["password"]
|
|
||||||
rpc_port = settings["rpc_port"]
|
|
||||||
rpc_url = "127.0.0.1"
|
|
||||||
return "http://%s:%s@%s:%i" % (rpc_user, rpc_pass, rpc_url, rpc_port)
|
|
||||||
|
|
||||||
@app.errorhandler(500)
|
|
||||||
def internal_error(error):
|
|
||||||
|
|
||||||
return 'error when decoding claims'
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/claim_decode/<txid>/<nout>')
|
|
||||||
def api_decode(txid, nout):
|
|
||||||
connection_string = get_lbrycrdd_connection_details(os.path.expanduser("~")+"/.lbrycrd/lbrycrd.conf")
|
|
||||||
rpc = AuthServiceProxy(connection_string)
|
|
||||||
result = rpc.getclaimsfortx(txid)
|
|
||||||
claim = None
|
|
||||||
for claim_out in result:
|
|
||||||
if claim_out['nOut'] == int(nout):
|
|
||||||
claim = claim_out
|
|
||||||
break
|
|
||||||
if claim:
|
|
||||||
converted = ''.join([chr(ord(i)) for i in claim['value']])
|
|
||||||
decoded = smart_decode(converted)
|
|
||||||
claim['value'] = decoded.claim_dict
|
|
||||||
return json.dumps(claim)
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/claim_decodeinv/<claimid>')
|
|
||||||
def api_decodebyclaim(claimid):
|
|
||||||
connection_string = get_lbrycrdd_connection_details(os.path.expanduser("~")+"/.lbrycrd/lbrycrd.conf")
|
|
||||||
rpc = AuthServiceProxy(connection_string)
|
|
||||||
claim = rpc.getvalueforname(claimid)
|
|
||||||
if claim:
|
|
||||||
converted = ''.join([chr(ord(i)) for i in claim['value']])
|
|
||||||
decoded = smart_decode(converted)
|
|
||||||
claim['value'] = decoded.claim_dict
|
|
||||||
return json.dumps(claim)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
app.run(host='127.0.0.1')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
git+https://github.com/lbryio/lbryschema.git#egg=lbryschema
|
|
||||||
python-bitcoinrpc==0.1
|
|
||||||
flask
|
|
28
package-lock.json
generated
28
package-lock.json
generated
|
@ -2814,6 +2814,11 @@
|
||||||
"resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz",
|
||||||
"integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ=="
|
"integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ=="
|
||||||
},
|
},
|
||||||
|
"bignumber.js": {
|
||||||
|
"version": "7.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz",
|
||||||
|
"integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ=="
|
||||||
|
},
|
||||||
"binary-extensions": {
|
"binary-extensions": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.0.0.tgz",
|
||||||
|
@ -7122,6 +7127,24 @@
|
||||||
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.6.tgz",
|
||||||
"integrity": "sha1-SJYrGeFp/R38JAs/HnMXYnu8R9s="
|
"integrity": "sha1-SJYrGeFp/R38JAs/HnMXYnu8R9s="
|
||||||
},
|
},
|
||||||
|
"mysql": {
|
||||||
|
"version": "2.17.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/mysql/-/mysql-2.17.1.tgz",
|
||||||
|
"integrity": "sha512-7vMqHQ673SAk5C8fOzTG2LpPcf3bNt0oL3sFpxPEEFp1mdlDcrLK0On7z8ZYKaaHrHwNcQ/MTUz7/oobZ2OyyA==",
|
||||||
|
"requires": {
|
||||||
|
"bignumber.js": "7.2.1",
|
||||||
|
"readable-stream": "2.3.6",
|
||||||
|
"safe-buffer": "5.1.2",
|
||||||
|
"sqlstring": "2.3.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"safe-buffer": {
|
||||||
|
"version": "5.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
|
||||||
|
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"nan": {
|
"nan": {
|
||||||
"version": "2.14.0",
|
"version": "2.14.0",
|
||||||
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz",
|
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz",
|
||||||
|
@ -10984,6 +11007,11 @@
|
||||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
|
||||||
"integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw="
|
"integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw="
|
||||||
},
|
},
|
||||||
|
"sqlstring": {
|
||||||
|
"version": "2.3.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/sqlstring/-/sqlstring-2.3.1.tgz",
|
||||||
|
"integrity": "sha1-R1OT/56RR5rqYtyvDKPRSYOn+0A="
|
||||||
|
},
|
||||||
"sshpk": {
|
"sshpk": {
|
||||||
"version": "1.16.1",
|
"version": "1.16.1",
|
||||||
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
|
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
|
||||||
|
|
|
@ -56,6 +56,7 @@
|
||||||
"koa-logger": "^2.0.0",
|
"koa-logger": "^2.0.0",
|
||||||
"koa-router": "^7.0.0",
|
"koa-router": "^7.0.0",
|
||||||
"limited-request-queue": "^3.0.4",
|
"limited-request-queue": "^3.0.4",
|
||||||
|
"mysql": "^2.17.1",
|
||||||
"node-slack": "^0.0.7",
|
"node-slack": "^0.0.7",
|
||||||
"oas": "^0.8.15",
|
"oas": "^0.8.15",
|
||||||
"ora": "^1.3.0",
|
"ora": "^1.3.0",
|
||||||
|
|
|
@ -48,7 +48,7 @@ function getResults (input) {
|
||||||
'bool': {
|
'bool': {
|
||||||
'must': {
|
'must': {
|
||||||
'query_string': {
|
'query_string': {
|
||||||
'fields': ['channel_id'],
|
'fields': ['channel_claim_id'],
|
||||||
'query' : getEscapedQuery(input.channel_id.trim()),
|
'query' : getEscapedQuery(input.channel_id.trim()),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -6,14 +6,14 @@ import helmet from 'koa-helmet';
|
||||||
import routing from './routes/';
|
import routing from './routes/';
|
||||||
import { port } from './config';
|
import { port } from './config';
|
||||||
import winston from 'winston';
|
import winston from 'winston';
|
||||||
import slack from 'node-slack';
|
import Slack from 'node-slack';
|
||||||
require('winston-daily-rotate-file');
|
require('winston-daily-rotate-file');
|
||||||
|
|
||||||
// Setup logging
|
// Setup logging
|
||||||
winston.remove(winston.transports.Console);
|
winston.remove(winston.transports.Console);
|
||||||
winston.add(winston.transports.Console, { colorize: true, timestamp: true, prettyPrint: true });
|
winston.add(winston.transports.Console, { colorize: true, timestamp: true, prettyPrint: true });
|
||||||
var slackAPIKey = process.env.SLACK_HOOK_URL;
|
const slackAPIKey = process.env.SLACK_HOOK_URL;
|
||||||
var mySlack = new slack(slackAPIKey, {});
|
const mySlack = new Slack(slackAPIKey, {});
|
||||||
// Create Koa Application
|
// Create Koa Application
|
||||||
const app = new Koa();
|
const app = new Koa();
|
||||||
|
|
||||||
|
|
|
@ -13,25 +13,35 @@ import fs from 'fs';
|
||||||
import fileExists from 'file-exists';
|
import fileExists from 'file-exists';
|
||||||
import * as util from './util';
|
import * as util from './util';
|
||||||
import {logErrorToSlack} from '../../index';
|
import {logErrorToSlack} from '../../index';
|
||||||
|
import mysql from 'mysql';
|
||||||
|
import chainqueryConfig from '../../../chainquery-config.json';
|
||||||
|
|
||||||
const elasticsearchloglevel = 'info';
|
let connection = null;
|
||||||
|
|
||||||
|
const esLogLevel = 'info';
|
||||||
const MaxClaimsToProcessPerIteration = 100000;
|
const MaxClaimsToProcessPerIteration = 100000;
|
||||||
const BatchSize = 5000;
|
const BatchSize = 5000;
|
||||||
const loggerStream = winstonStream(winston, elasticsearchloglevel);
|
const loggerStream = winstonStream(winston, esLogLevel);
|
||||||
const eclient = new elasticsearch.Client({
|
const eclient = new elasticsearch.Client({
|
||||||
host: 'http://localhost:9200',
|
host: 'http://localhost:9200',
|
||||||
|
|
||||||
log: {
|
log: {
|
||||||
level : elasticsearchloglevel,
|
level : esLogLevel,
|
||||||
type : 'stream',
|
type : 'stream',
|
||||||
stream: loggerStream,
|
stream: loggerStream,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const queue = new ElasticQueue({elastic: eclient});
|
const queue = new ElasticQueue({elastic: eclient});
|
||||||
|
queue.on('drain', function () {
|
||||||
|
console.log('elasticsearch queue is drained');
|
||||||
|
});
|
||||||
|
|
||||||
// Check that our syncState file exist.
|
// Check that our syncState file exist.
|
||||||
fileExists(path.join(appRoot.path, 'syncState.json'), (err, exists) => {
|
fileExists(path.join(appRoot.path, 'syncState.json'), (err, exists) => {
|
||||||
if (err) { throw err }
|
if (err) {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
fs.writeFileSync(path.join(appRoot.path, 'syncState.json'), '{}');
|
fs.writeFileSync(path.join(appRoot.path, 'syncState.json'), '{}');
|
||||||
}
|
}
|
||||||
|
@ -56,16 +66,16 @@ export async function claimSync () {
|
||||||
let lastID = syncState.LastID;
|
let lastID = syncState.LastID;
|
||||||
let iteration = 0;
|
let iteration = 0;
|
||||||
while (!finished) {
|
while (!finished) {
|
||||||
let claimsResponse = await getClaimsSince(syncState.LastSyncTime, lastID, BatchSize);
|
let claims = await getClaimsSince(syncState.LastSyncTime, lastID, BatchSize);
|
||||||
let claims = JSON.parse(claimsResponse).data;
|
|
||||||
status.info = 'addingClaimsToElastic';
|
status.info = 'addingClaimsToElastic';
|
||||||
for (let claim of claims) {
|
for (let claim of claims) {
|
||||||
if (claim.value === null) {
|
if (claim.value === null) {
|
||||||
console.log(claim);
|
console.log(claim);
|
||||||
await logErrorToSlack('Failed to process claim ' + claim.claimId + ' due to missing value');
|
// await logErrorToSlack('Failed to process claim ' + claim.claimId + ' due to missing value');
|
||||||
|
console.error('Failed to process claim ' + claim.claimId + ' due to missing value');
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
claim.value = JSON.parse(claim.value).Claim;
|
claim.value = claim.value.Claim;
|
||||||
if (claim.name && claim.value) {
|
if (claim.name && claim.value) {
|
||||||
claim.suggest_name = {
|
claim.suggest_name = {
|
||||||
input : '' + claim.name + '',
|
input : '' + claim.name + '',
|
||||||
|
@ -83,7 +93,7 @@ export async function claimSync () {
|
||||||
finished = claims.length < BatchSize || (iteration * BatchSize + BatchSize >= MaxClaimsToProcessPerIteration);
|
finished = claims.length < BatchSize || (iteration * BatchSize + BatchSize >= MaxClaimsToProcessPerIteration);
|
||||||
iteration++;
|
iteration++;
|
||||||
}
|
}
|
||||||
deleteBlockedClaims();
|
await deleteBlockedClaims();
|
||||||
// If not finished, store last id to run again later where we left off, otherwise update last sync time.
|
// If not finished, store last id to run again later where we left off, otherwise update last sync time.
|
||||||
if (iteration * BatchSize + BatchSize >= MaxClaimsToProcessPerIteration) {
|
if (iteration * BatchSize + BatchSize >= MaxClaimsToProcessPerIteration) {
|
||||||
syncState.LastID = lastID;
|
syncState.LastID = lastID;
|
||||||
|
@ -95,12 +105,12 @@ export async function claimSync () {
|
||||||
status.info = 'upToDate';
|
status.info = 'upToDate';
|
||||||
status.syncState = syncState;
|
status.syncState = syncState;
|
||||||
await sleep(600000);
|
await sleep(600000);
|
||||||
claimSync();
|
await claimSync();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
await logErrorToSlack(err);
|
await logErrorToSlack(err);
|
||||||
status.err = err;
|
status.err = err;
|
||||||
await sleep(600000);
|
await sleep(600000);
|
||||||
claimSync();
|
await claimSync();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -119,26 +129,22 @@ async function deleteBlockedClaims () {
|
||||||
winston.log('info', '[Importer] Done processing blocked claims!');
|
winston.log('info', '[Importer] Done processing blocked claims!');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function deleteFromElastic (claimid) {
|
function deleteFromElastic (claimid) {
|
||||||
return new Promise(async (resolve, reject) => {
|
|
||||||
queue.push({
|
queue.push({
|
||||||
index: 'claims',
|
index: 'claims',
|
||||||
type : 'claim',
|
type : 'claim',
|
||||||
id : claimid,
|
id : claimid,
|
||||||
body : {},
|
body : {},
|
||||||
});
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function pushElastic (claim) {
|
function pushElastic (claim) {
|
||||||
return new Promise(async (resolve, reject) => {
|
|
||||||
queue.push({
|
queue.push({
|
||||||
index: 'claims',
|
index: 'claims',
|
||||||
type : 'claim',
|
type : 'claim',
|
||||||
id : claim.claimId,
|
id : claim.claimId,
|
||||||
body : claim,
|
body : claim,
|
||||||
});
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function getJSON (path) {
|
function getJSON (path) {
|
||||||
|
@ -153,6 +159,7 @@ function getJSON (path) {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function saveJSON (path, obj) {
|
function saveJSON (path, obj) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
jsonfile.writeFile(path, obj, function (err, jsoncontent) {
|
jsonfile.writeFile(path, obj, function (err, jsoncontent) {
|
||||||
|
@ -183,34 +190,66 @@ function getBlockedOutpoints () {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getChainqueryConnection () {
|
||||||
|
if (connection === null) {
|
||||||
|
connection = mysql.createConnection({
|
||||||
|
host : chainqueryConfig.host,
|
||||||
|
user : chainqueryConfig.user,
|
||||||
|
password: chainqueryConfig.password,
|
||||||
|
database: chainqueryConfig.db,
|
||||||
|
});
|
||||||
|
connection.connect();
|
||||||
|
}
|
||||||
|
return connection;
|
||||||
|
}
|
||||||
|
|
||||||
function getClaimsSince (time, lastID, MaxClaimsInCall) {
|
function getClaimsSince (time, lastID, MaxClaimsInCall) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
let query = `` +
|
let query = `SELECT c.id,
|
||||||
`SELECT ` +
|
c.name,
|
||||||
`c.id, ` +
|
p.name as channel,
|
||||||
`c.name,` +
|
p.claim_id as channel_id,
|
||||||
`p.name as channel,` +
|
c.bid_state,
|
||||||
`p.claim_id as channel_id,` +
|
c.effective_amount,
|
||||||
`c.bid_state,` +
|
COALESCE(p.effective_amount,1) as certificate_amount,
|
||||||
`c.effective_amount,` +
|
c.claim_id as claimId,
|
||||||
`COALESCE(p.effective_amount,1) as certificate_amount,` +
|
c.value_as_json as value
|
||||||
`c.claim_id as claimId,` +
|
FROM claim c LEFT JOIN claim p
|
||||||
`c.value_as_json as value ` +
|
on p.claim_id = c.publisher_id
|
||||||
`FROM claim c ` +
|
WHERE c.id >${lastID} AND
|
||||||
`LEFT JOIN claim p on p.claim_id = c.publisher_id ` +
|
c.modified_at >='${time}'
|
||||||
`WHERE c.id >` + lastID + ` ` +
|
ORDER BY c.id LIMIT ${MaxClaimsInCall}`;
|
||||||
`AND c.modified_at >='` + time + `' ` +
|
|
||||||
`ORDER BY c.id ` +
|
|
||||||
`LIMIT ` + MaxClaimsInCall;
|
|
||||||
// Outputs full query to console for copy/paste into chainquery (debugging)
|
// Outputs full query to console for copy/paste into chainquery (debugging)
|
||||||
console.log(query);
|
console.log(query);
|
||||||
rp(`https://chainquery.lbry.com/api/sql?query=` + query)
|
getChainqueryConnection().query(query, function (err, results, fields) {
|
||||||
.then(function (htmlString) {
|
if (err) {
|
||||||
resolve(htmlString);
|
console.error(err);
|
||||||
})
|
|
||||||
.catch(function (err) {
|
|
||||||
logErrorToSlack('[Importer] Error getting updated claims. ' + err);
|
logErrorToSlack('[Importer] Error getting updated claims. ' + err);
|
||||||
reject(err);
|
return reject(err);
|
||||||
|
}
|
||||||
|
let claims = [];
|
||||||
|
for (let i = 0; i < results.length; i++) {
|
||||||
|
let r = results[i];
|
||||||
|
let value = null;
|
||||||
|
try {
|
||||||
|
value = JSON.parse(r.value);
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e);
|
||||||
|
console.error(r.value);
|
||||||
|
}
|
||||||
|
claims.push({
|
||||||
|
id : r.id,
|
||||||
|
name : r.name,
|
||||||
|
channel : r.channel,
|
||||||
|
channel_claim_id : r.channel_id,
|
||||||
|
bid_state : r.bid_state,
|
||||||
|
effective_amount : r.effective_amount,
|
||||||
|
certificate_amount: r.certificate_amount,
|
||||||
|
claimId : r.claimId,
|
||||||
|
value : value,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
resolve(claims);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue