Merge pull request #77 from lbryio/spent_claims

Spent claims
This commit is contained in:
Mark 2018-05-27 09:50:52 -04:00 committed by GitHub
commit 8c674b2e1f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 39 additions and 33 deletions

View file

@ -5,6 +5,6 @@ then
exit 1; exit 1;
else else
echo "Index did not exist, creating..." ; echo "Index did not exist, creating..." ;
curl -X PUT http://localhost:9200/claims -d '{ "settings" : { "number_of_shards" : 1 }, "mappings" : { "claim" : { "properties" : { "value" : { "type" : "nested" }, "suggest_name": { "type": "completion" }, "suggest_desc": { "type": "completion" } } } } }'; curl -X PUT http://localhost:9200/claims -H '"Content-Type: application/json"'-d '{ "settings" : { "number_of_shards" : 1 }, "mappings" : { "claim" : { "properties" : { "value" : { "type" : "nested" }, "suggest_name": { "type": "completion" }, "suggest_desc": { "type": "completion" } } } } }';
exit 0; exit 0;
fi fi

View file

@ -13,19 +13,20 @@ import fs from 'fs';
import fileExists from 'file-exists'; import fileExists from 'file-exists';
import * as util from '../../utils/importer/util'; import * as util from '../../utils/importer/util';
const loggerStream = winstonStream(winston, 'info'); const elasticsearchloglevel = 'info';
const loggerStream = winstonStream(winston, elasticsearchloglevel);
const eclient = new elasticsearch.Client({ const eclient = new elasticsearch.Client({
host: 'http://localhost:9200', host: 'http://localhost:9200',
log: { log: {
level : 'info', level : elasticsearchloglevel,
type : 'stream', type : 'stream',
stream: loggerStream, stream: loggerStream,
}, },
}); });
const queue = new ElasticQueue({elastic: eclient}); const queue = new ElasticQueue({elastic: eclient});
// Check that our cache file exist. // Check that our syncState file exist.
fileExists(path.join(appRoot.path, 'syncState.json'), (err, exists) => { fileExists(path.join(appRoot.path, 'syncState.json'), (err, exists) => {
if (err) { throw err } if (err) { throw err }
if (!exists) { if (!exists) {
@ -47,38 +48,20 @@ export async function claimSync () {
status.info = 'addingClaimsToElastic'; status.info = 'addingClaimsToElastic';
for (let claim of claims) { for (let claim of claims) {
claim.value = JSON.parse(claim.value).Claim; claim.value = JSON.parse(claim.value).Claim;
// console.log('NEW: ' + JSON.stringify(claim));
// console.log('--------------------------------------------');
// var imprtr = require('../importer');
// let oldvalue = await imprtr.getValue(claim.transaction_by_hash_id, claim.vout);
// oldvalue = JSON.parse(oldvalue);
// console.log('OLD: ' + JSON.stringify(oldvalue));
// console.log('--------------------------------------------');
if (claim.name && claim.value) { if (claim.name && claim.value) {
claim.suggest_name = { claim.suggest_name = {
input : claim.name, input : '' + claim.name + '',
weight: 30, weight: '30',
}; };
} }
if (claim.bid_state === 'Spent' || claim.bid_state === 'Expired') {
deleteFromElastic(claim.claimId);
} else {
pushElastic(claim); pushElastic(claim);
} }
}
winston.log('info', '[Importer] Pushed ' + claims.length + ' claims to elastic search'); winston.log('info', '[Importer] Pushed ' + claims.length + ' claims to elastic search');
winston.log('info', '[Importer] Removing blocked claims from search!'); deleteBlockedClaims();
let blockedOutputsResponse = await getBlockedOutpoints();
let outpointlist = JSON.parse(blockedOutputsResponse);
for (let outpoint of outpointlist.data.outpoints) {
let claimid = util.OutpointToClaimId(outpoint);
console.log('Deleting ClaimId: ' + claimid);
eclient.delete({
index: 'claims',
type : 'claim',
id : claimid,
}, function (error, response) {
if (error) {
winston.log(error, response);
}
});
}
syncState.LastSyncTime = new Date().toISOString().slice(0, 19).replace('T', ' '); syncState.LastSyncTime = new Date().toISOString().slice(0, 19).replace('T', ' ');
await saveJSON(path.join(appRoot.path, 'syncState.json'), syncState); await saveJSON(path.join(appRoot.path, 'syncState.json'), syncState);
status.info = 'upToDate'; status.info = 'upToDate';
@ -96,8 +79,29 @@ export function getStats () {
return status; return status;
} }
async function deleteBlockedClaims () {
winston.log('info', '[Importer] Removing blocked claims from search!');
let blockedOutputsResponse = await getBlockedOutpoints();
let outpointlist = JSON.parse(blockedOutputsResponse);
for (let outpoint of outpointlist.data.outpoints) {
let claimid = util.OutpointToClaimId(outpoint);
deleteFromElastic(claimid);
}
winston.log('info', '[Importer] Done processing blocked claims!');
}
async function deleteFromElastic (claimid) {
return new Promise(async(resolve, reject) => {
queue.push({
index: 'claims',
type : 'claim',
id : claimid,
body : {},
});
});
}
async function pushElastic (claim) { async function pushElastic (claim) {
console.log('Pushing To Elastic Search claimId:' + claim.claimId);
return new Promise(async(resolve, reject) => { return new Promise(async(resolve, reject) => {
queue.push({ queue.push({
index: 'claims', index: 'claims',
@ -142,6 +146,7 @@ function getBlockedOutpoints () {
resolve(htmlString); resolve(htmlString);
}) })
.catch(function (err) { .catch(function (err) {
winston.log('error', '[Importer] Error getting blocked outpoints. ' + err);
reject(err); reject(err);
}); });
}); });
@ -167,6 +172,7 @@ function getClaimsSince (time) {
resolve(htmlString); resolve(htmlString);
}) })
.catch(function (err) { .catch(function (err) {
winston.log('error', '[Importer] Error getting updated claims. ' + err);
reject(err); reject(err);
}); });
}); });