From ba258cae0345682eb61acf5940c7a999a3598279 Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Mon, 8 Jul 2019 20:54:01 +0200 Subject: [PATCH 01/11] replace API with db connector --- chainquery-config.json.example | 6 +++ decoder/decoder.py | 67 -------------------------------- decoder/requirements.txt | 3 -- package-lock.json | 28 +++++++++++++ package.json | 1 + server/utils/chainquery/index.js | 50 ++++++++++++------------ 6 files changed, 60 insertions(+), 95 deletions(-) create mode 100644 chainquery-config.json.example delete mode 100644 decoder/decoder.py delete mode 100644 decoder/requirements.txt diff --git a/chainquery-config.json.example b/chainquery-config.json.example new file mode 100644 index 0000000..253ecdc --- /dev/null +++ b/chainquery-config.json.example @@ -0,0 +1,6 @@ +{ + "host": "chainquery.lbry.com", + "user": "lighthouse", + "password": "", + "db": "chainquery" +} diff --git a/decoder/decoder.py b/decoder/decoder.py deleted file mode 100644 index 49716ec..0000000 --- a/decoder/decoder.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -import json, os -from bitcoinrpc.authproxy import AuthServiceProxy -from lbryschema.decode import smart_decode -from flask import Flask, url_for -app = Flask(__name__) - -def get_lbrycrdd_connection_details(wallet_conf): - settings = {"username": "lbry", - "password": "lbry", - "rpc_port": 9245} - if wallet_conf and os.path.exists(wallet_conf): - with open(wallet_conf, "r") as conf: - conf_lines = conf.readlines() - for l in conf_lines: - if l.startswith("rpcuser="): - settings["username"] = l[8:].rstrip('\n') - if l.startswith("rpcpassword="): - settings["password"] = l[12:].rstrip('\n') - if l.startswith("rpcport="): - settings["rpc_port"] = int(l[8:].rstrip('\n')) - rpc_user = settings["username"] - rpc_pass = settings["password"] - rpc_port = settings["rpc_port"] - rpc_url = "127.0.0.1" - return "http://%s:%s@%s:%i" % (rpc_user, rpc_pass, rpc_url, rpc_port) - -@app.errorhandler(500) -def internal_error(error): - - return 'error when decoding claims' - - -@app.route('/claim_decode//') -def api_decode(txid, nout): - connection_string = get_lbrycrdd_connection_details(os.path.expanduser("~")+"/.lbrycrd/lbrycrd.conf") - rpc = AuthServiceProxy(connection_string) - result = rpc.getclaimsfortx(txid) - claim = None - for claim_out in result: - if claim_out['nOut'] == int(nout): - claim = claim_out - break - if claim: - converted = ''.join([chr(ord(i)) for i in claim['value']]) - decoded = smart_decode(converted) - claim['value'] = decoded.claim_dict - return json.dumps(claim) - - -@app.route('/claim_decodeinv/') -def api_decodebyclaim(claimid): - connection_string = get_lbrycrdd_connection_details(os.path.expanduser("~")+"/.lbrycrd/lbrycrd.conf") - rpc = AuthServiceProxy(connection_string) - claim = rpc.getvalueforname(claimid) - if claim: - converted = ''.join([chr(ord(i)) for i in claim['value']]) - decoded = smart_decode(converted) - claim['value'] = decoded.claim_dict - return json.dumps(claim) - -if __name__ == '__main__': - app.run(host='127.0.0.1') - - - diff --git a/decoder/requirements.txt b/decoder/requirements.txt deleted file mode 100644 index 116e61b..0000000 --- a/decoder/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -git+https://github.com/lbryio/lbryschema.git#egg=lbryschema -python-bitcoinrpc==0.1 -flask \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index c83f795..dda837f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2814,6 +2814,11 @@ "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==" }, + "bignumber.js": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", + "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==" + }, "binary-extensions": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.0.0.tgz", @@ -7122,6 +7127,24 @@ "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.6.tgz", "integrity": "sha1-SJYrGeFp/R38JAs/HnMXYnu8R9s=" }, + "mysql": { + "version": "2.17.1", + "resolved": "https://registry.npmjs.org/mysql/-/mysql-2.17.1.tgz", + "integrity": "sha512-7vMqHQ673SAk5C8fOzTG2LpPcf3bNt0oL3sFpxPEEFp1mdlDcrLK0On7z8ZYKaaHrHwNcQ/MTUz7/oobZ2OyyA==", + "requires": { + "bignumber.js": "7.2.1", + "readable-stream": "2.3.6", + "safe-buffer": "5.1.2", + "sqlstring": "2.3.1" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + } + } + }, "nan": { "version": "2.14.0", "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", @@ -10984,6 +11007,11 @@ "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" }, + "sqlstring": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/sqlstring/-/sqlstring-2.3.1.tgz", + "integrity": "sha1-R1OT/56RR5rqYtyvDKPRSYOn+0A=" + }, "sshpk": { "version": "1.16.1", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", diff --git a/package.json b/package.json index b0d4b31..8d12626 100644 --- a/package.json +++ b/package.json @@ -56,6 +56,7 @@ "koa-logger": "^2.0.0", "koa-router": "^7.0.0", "limited-request-queue": "^3.0.4", + "mysql": "^2.17.1", "node-slack": "^0.0.7", "oas": "^0.8.15", "ora": "^1.3.0", diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index 74f6820..1c79dbf 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -13,6 +13,7 @@ import fs from 'fs'; import fileExists from 'file-exists'; import * as util from './util'; import {logErrorToSlack} from '../../index'; +import mysql from 'mysql'; const elasticsearchloglevel = 'info'; const MaxClaimsToProcessPerIteration = 100000; @@ -31,7 +32,9 @@ const queue = new ElasticQueue({elastic: eclient}); // Check that our syncState file exist. fileExists(path.join(appRoot.path, 'syncState.json'), (err, exists) => { - if (err) { throw err } + if (err) { + throw err; + } if (!exists) { fs.writeFileSync(path.join(appRoot.path, 'syncState.json'), '{}'); } @@ -153,6 +156,7 @@ function getJSON (path) { }); }); } + function saveJSON (path, obj) { return new Promise((resolve, reject) => { jsonfile.writeFile(path, obj, function (err, jsoncontent) { @@ -183,34 +187,30 @@ function getBlockedOutpoints () { }); } +let connection = null; +const chainqueryConfig = require('../../../chainquery-config.json'); + function getClaimsSince (time, lastID, MaxClaimsInCall) { + if (connection === null) { + connection = mysql.createConnection({ + host : chainqueryConfig.host, + user : chainqueryConfig.user, + password: chainqueryConfig.password, + database: chainqueryConfig.db, + }); + connection.connect(); + } + return new Promise((resolve, reject) => { - let query = `` + - `SELECT ` + - `c.id, ` + - `c.name,` + - `p.name as channel,` + - `p.claim_id as channel_id,` + - `c.bid_state,` + - `c.effective_amount,` + - `COALESCE(p.effective_amount,1) as certificate_amount,` + - `c.claim_id as claimId,` + - `c.value_as_json as value ` + - `FROM claim c ` + - `LEFT JOIN claim p on p.claim_id = c.publisher_id ` + - `WHERE c.id >` + lastID + ` ` + - `AND c.modified_at >='` + time + `' ` + - `ORDER BY c.id ` + - `LIMIT ` + MaxClaimsInCall; + let query = `SELECT c.id, c.name,p.name as channel, p.claim_id as channel_id, c.bid_state,c.effective_amount,COALESCE(p.effective_amount,1) as certificate_amount,c.claim_id as claimId,c.value_as_json as value FROM claim c LEFT JOIN claim p on p.claim_id = c.publisher_id WHERE c.id >${lastID} AND c.modified_at >='${time}' ORDER BY c.id LIMIT ${MaxClaimsInCall}`; // Outputs full query to console for copy/paste into chainquery (debugging) console.log(query); - rp(`https://chainquery.lbry.com/api/sql?query=` + query) - .then(function (htmlString) { - resolve(htmlString); - }) - .catch(function (err) { + connection.query(query, function (err, results, fields) { + if (err) { logErrorToSlack('[Importer] Error getting updated claims. ' + err); - reject(err); - }); + return reject(err); + } + resolve(results); + }); }); } From 469ac7329a923ec8075b3b6ce2f0debecab96b33 Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Mon, 8 Jul 2019 21:03:12 +0200 Subject: [PATCH 02/11] add debugging --- .gitignore | 1 + server/index.js | 4 ++-- server/utils/chainquery/index.js | 1 + 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 9116e04..f186c44 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ npm-debug.log claimTrieCache.json syncState.json yarn-error.log +chainquery-config.json diff --git a/server/index.js b/server/index.js index a110709..8100b74 100644 --- a/server/index.js +++ b/server/index.js @@ -12,8 +12,8 @@ require('winston-daily-rotate-file'); // Setup logging winston.remove(winston.transports.Console); winston.add(winston.transports.Console, { colorize: true, timestamp: true, prettyPrint: true }); -var slackAPIKey = process.env.SLACK_HOOK_URL; -var mySlack = new slack(slackAPIKey, {}); +let slackAPIKey = process.env.SLACK_HOOK_URL; +let mySlack = new slack(slackAPIKey, {}); // Create Koa Application const app = new Koa(); diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index 1c79dbf..51531e6 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -207,6 +207,7 @@ function getClaimsSince (time, lastID, MaxClaimsInCall) { console.log(query); connection.query(query, function (err, results, fields) { if (err) { + console.error(err); logErrorToSlack('[Importer] Error getting updated claims. ' + err); return reject(err); } From 02f322059d3e1a4d87e537531c3b175d1c324230 Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Mon, 8 Jul 2019 21:08:47 +0200 Subject: [PATCH 03/11] parse json --- server/utils/chainquery/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index 51531e6..e775970 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -211,7 +211,7 @@ function getClaimsSince (time, lastID, MaxClaimsInCall) { logErrorToSlack('[Importer] Error getting updated claims. ' + err); return reject(err); } - resolve(results); + resolve(JSON.parse(results)); }); }); } From c485b89972a6d44046e43e71e25707ce39da8ffe Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Mon, 8 Jul 2019 21:16:54 +0200 Subject: [PATCH 04/11] construct object from mysql --- server/utils/chainquery/index.js | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index e775970..ba6e707 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -59,8 +59,7 @@ export async function claimSync () { let lastID = syncState.LastID; let iteration = 0; while (!finished) { - let claimsResponse = await getClaimsSince(syncState.LastSyncTime, lastID, BatchSize); - let claims = JSON.parse(claimsResponse).data; + let claims = await getClaimsSince(syncState.LastSyncTime, lastID, BatchSize); status.info = 'addingClaimsToElastic'; for (let claim of claims) { if (claim.value === null) { @@ -211,7 +210,21 @@ function getClaimsSince (time, lastID, MaxClaimsInCall) { logErrorToSlack('[Importer] Error getting updated claims. ' + err); return reject(err); } - resolve(JSON.parse(results)); + let claims = []; + for (let i = 0; i < results.length; i++) { + let r = results[i]; + claims.push({ + id : r.id, + name : r.name, + channel : r.channel, + bid_state : r.bid_state, + effective_amount : r.effective_amount, + certificate_amount: r.certificate_amount, + claimId : r.claimId, + value : r.value, + }); + } + resolve(claims); }); }); } From 79b9eda56877552571e4f4638e1a918c2b4e4314 Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Mon, 8 Jul 2019 21:25:19 +0200 Subject: [PATCH 05/11] don't spam slack --- server/utils/chainquery/index.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index ba6e707..d15f4b2 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -64,7 +64,8 @@ export async function claimSync () { for (let claim of claims) { if (claim.value === null) { console.log(claim); - await logErrorToSlack('Failed to process claim ' + claim.claimId + ' due to missing value'); + // await logErrorToSlack('Failed to process claim ' + claim.claimId + ' due to missing value'); + console.error('Failed to process claim ' + claim.claimId + ' due to missing value'); continue; } claim.value = JSON.parse(claim.value).Claim; From 42f2a6386c1675518cc383178e6568667cc512b3 Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Wed, 2 Oct 2019 18:04:17 +0200 Subject: [PATCH 06/11] possibly improve memory management --- server/utils/chainquery/index.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index d15f4b2..04d4383 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -76,9 +76,9 @@ export async function claimSync () { }; } if (claim.bid_state === 'Spent' || claim.bid_state === 'Expired') { - deleteFromElastic(claim.claimId); + await deleteFromElastic(claim.claimId); } else { - pushElastic(claim); + await pushElastic(claim); } lastID = claim.id; } @@ -86,7 +86,7 @@ export async function claimSync () { finished = claims.length < BatchSize || (iteration * BatchSize + BatchSize >= MaxClaimsToProcessPerIteration); iteration++; } - deleteBlockedClaims(); + await deleteBlockedClaims(); // If not finished, store last id to run again later where we left off, otherwise update last sync time. if (iteration * BatchSize + BatchSize >= MaxClaimsToProcessPerIteration) { syncState.LastID = lastID; @@ -98,12 +98,12 @@ export async function claimSync () { status.info = 'upToDate'; status.syncState = syncState; await sleep(600000); - claimSync(); + await claimSync(); } catch (err) { await logErrorToSlack(err); status.err = err; await sleep(600000); - claimSync(); + await claimSync(); } } From 7bdf527ec932908564e9842bb47cebc675c83dbb Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Wed, 2 Oct 2019 18:15:24 +0200 Subject: [PATCH 07/11] feedback on drained queue --- server/utils/chainquery/index.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index 04d4383..340c129 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -28,7 +28,11 @@ const eclient = new elasticsearch.Client({ stream: loggerStream, }, }); + const queue = new ElasticQueue({elastic: eclient}); +queue.on('drain', function () { + console.log('elasticsearch queue is drained'); +}); // Check that our syncState file exist. fileExists(path.join(appRoot.path, 'syncState.json'), (err, exists) => { From 99342580461d3af26b3648ece7d8b648e2964758 Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Wed, 2 Oct 2019 18:43:12 +0200 Subject: [PATCH 08/11] remove unused promises --- server/utils/chainquery/index.js | 32 ++++++++++++++------------------ 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index 340c129..0c53065 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -80,9 +80,9 @@ export async function claimSync () { }; } if (claim.bid_state === 'Spent' || claim.bid_state === 'Expired') { - await deleteFromElastic(claim.claimId); + deleteFromElastic(claim.claimId); } else { - await pushElastic(claim); + pushElastic(claim); } lastID = claim.id; } @@ -126,25 +126,21 @@ async function deleteBlockedClaims () { winston.log('info', '[Importer] Done processing blocked claims!'); } -async function deleteFromElastic (claimid) { - return new Promise(async (resolve, reject) => { - queue.push({ - index: 'claims', - type : 'claim', - id : claimid, - body : {}, - }); +function deleteFromElastic (claimid) { + queue.push({ + index: 'claims', + type : 'claim', + id : claimid, + body : {}, }); } -async function pushElastic (claim) { - return new Promise(async (resolve, reject) => { - queue.push({ - index: 'claims', - type : 'claim', - id : claim.claimId, - body : claim, - }); +function pushElastic (claim) { + queue.push({ + index: 'claims', + type : 'claim', + id : claim.claimId, + body : claim, }); } From e7cf20eda428f95293cf1cd60cab212322a6f769 Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Tue, 15 Oct 2019 00:04:47 +0200 Subject: [PATCH 09/11] address review comments --- server/index.js | 6 +++--- server/utils/chainquery/index.js | 19 +++++++++++-------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/server/index.js b/server/index.js index 8100b74..2004396 100644 --- a/server/index.js +++ b/server/index.js @@ -6,14 +6,14 @@ import helmet from 'koa-helmet'; import routing from './routes/'; import { port } from './config'; import winston from 'winston'; -import slack from 'node-slack'; +import Slack from 'node-slack'; require('winston-daily-rotate-file'); // Setup logging winston.remove(winston.transports.Console); winston.add(winston.transports.Console, { colorize: true, timestamp: true, prettyPrint: true }); -let slackAPIKey = process.env.SLACK_HOOK_URL; -let mySlack = new slack(slackAPIKey, {}); +const slackAPIKey = process.env.SLACK_HOOK_URL; +const mySlack = new Slack(slackAPIKey, {}); // Create Koa Application const app = new Koa(); diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index 0c53065..8aaef24 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -14,16 +14,19 @@ import fileExists from 'file-exists'; import * as util from './util'; import {logErrorToSlack} from '../../index'; import mysql from 'mysql'; +import chainqueryConfig from '../../../chainquery-config.json'; -const elasticsearchloglevel = 'info'; +let connection = null; + +const esLogLevel = 'info'; const MaxClaimsToProcessPerIteration = 100000; const BatchSize = 5000; -const loggerStream = winstonStream(winston, elasticsearchloglevel); +const loggerStream = winstonStream(winston, esLogLevel); const eclient = new elasticsearch.Client({ host: 'http://localhost:9200', log: { - level : elasticsearchloglevel, + level : esLogLevel, type : 'stream', stream: loggerStream, }, @@ -187,10 +190,7 @@ function getBlockedOutpoints () { }); } -let connection = null; -const chainqueryConfig = require('../../../chainquery-config.json'); - -function getClaimsSince (time, lastID, MaxClaimsInCall) { +function getChainqueryConnection () { if (connection === null) { connection = mysql.createConnection({ host : chainqueryConfig.host, @@ -200,12 +200,15 @@ function getClaimsSince (time, lastID, MaxClaimsInCall) { }); connection.connect(); } + return connection; +} +function getClaimsSince (time, lastID, MaxClaimsInCall) { return new Promise((resolve, reject) => { let query = `SELECT c.id, c.name,p.name as channel, p.claim_id as channel_id, c.bid_state,c.effective_amount,COALESCE(p.effective_amount,1) as certificate_amount,c.claim_id as claimId,c.value_as_json as value FROM claim c LEFT JOIN claim p on p.claim_id = c.publisher_id WHERE c.id >${lastID} AND c.modified_at >='${time}' ORDER BY c.id LIMIT ${MaxClaimsInCall}`; // Outputs full query to console for copy/paste into chainquery (debugging) console.log(query); - connection.query(query, function (err, results, fields) { + getChainqueryConnection().query(query, function (err, results, fields) { if (err) { console.error(err); logErrorToSlack('[Importer] Error getting updated claims. ' + err); From f1ad94d29d12d386bde37a3477bde1484d860dd5 Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Wed, 30 Oct 2019 17:57:40 +0100 Subject: [PATCH 10/11] fix es storage object --- server/utils/chainquery/index.js | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index 8aaef24..08f989d 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -75,7 +75,7 @@ export async function claimSync () { console.error('Failed to process claim ' + claim.claimId + ' due to missing value'); continue; } - claim.value = JSON.parse(claim.value).Claim; + claim.value = claim.value.Claim; if (claim.name && claim.value) { claim.suggest_name = { input : '' + claim.name + '', @@ -217,6 +217,13 @@ function getClaimsSince (time, lastID, MaxClaimsInCall) { let claims = []; for (let i = 0; i < results.length; i++) { let r = results[i]; + let value = null; + try { + value = JSON.parse(r.value); + } catch (e) { + console.error(e); + console.error(r.value); + } claims.push({ id : r.id, name : r.name, @@ -225,7 +232,7 @@ function getClaimsSince (time, lastID, MaxClaimsInCall) { effective_amount : r.effective_amount, certificate_amount: r.certificate_amount, claimId : r.claimId, - value : r.value, + value : value, }); } resolve(claims); From f4115b0edc1934f95c4be675c21af02731c96eee Mon Sep 17 00:00:00 2001 From: Niko Storni Date: Thu, 31 Oct 2019 17:42:56 +0100 Subject: [PATCH 11/11] adjust channel ID lookup index channel claim id for claims --- server/controllers/lighthouse.js | 2 +- server/utils/chainquery/index.js | 16 +++++++++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/server/controllers/lighthouse.js b/server/controllers/lighthouse.js index 506e583..97c2262 100644 --- a/server/controllers/lighthouse.js +++ b/server/controllers/lighthouse.js @@ -48,7 +48,7 @@ function getResults (input) { 'bool': { 'must': { 'query_string': { - 'fields': ['channel_id'], + 'fields': ['channel_claim_id'], 'query' : getEscapedQuery(input.channel_id.trim()), }, }, diff --git a/server/utils/chainquery/index.js b/server/utils/chainquery/index.js index 08f989d..8e9e485 100644 --- a/server/utils/chainquery/index.js +++ b/server/utils/chainquery/index.js @@ -205,7 +205,20 @@ function getChainqueryConnection () { function getClaimsSince (time, lastID, MaxClaimsInCall) { return new Promise((resolve, reject) => { - let query = `SELECT c.id, c.name,p.name as channel, p.claim_id as channel_id, c.bid_state,c.effective_amount,COALESCE(p.effective_amount,1) as certificate_amount,c.claim_id as claimId,c.value_as_json as value FROM claim c LEFT JOIN claim p on p.claim_id = c.publisher_id WHERE c.id >${lastID} AND c.modified_at >='${time}' ORDER BY c.id LIMIT ${MaxClaimsInCall}`; + let query = `SELECT c.id, + c.name, + p.name as channel, + p.claim_id as channel_id, + c.bid_state, + c.effective_amount, + COALESCE(p.effective_amount,1) as certificate_amount, + c.claim_id as claimId, + c.value_as_json as value + FROM claim c LEFT JOIN claim p + on p.claim_id = c.publisher_id + WHERE c.id >${lastID} AND + c.modified_at >='${time}' + ORDER BY c.id LIMIT ${MaxClaimsInCall}`; // Outputs full query to console for copy/paste into chainquery (debugging) console.log(query); getChainqueryConnection().query(query, function (err, results, fields) { @@ -228,6 +241,7 @@ function getClaimsSince (time, lastID, MaxClaimsInCall) { id : r.id, name : r.name, channel : r.channel, + channel_claim_id : r.channel_id, bid_state : r.bid_state, effective_amount : r.effective_amount, certificate_amount: r.certificate_amount,