Apidocs fix #60

Merged
tiger5226 merged 2 commits from apidocs_fix into master 2018-04-08 09:16:37 +02:00
10 changed files with 98 additions and 424 deletions

View file

@ -3,11 +3,11 @@
"version": "0.0.1",
"description": "Lighthouse - Next-gen search api for LBRY",
"title": "Lighthouse API DOCS",
"url" : "http://localhost",
"sampleUrl": "http://localhost",
"url" : "http://lighthouse.lbry.io",
"sampleUrl": "http://lighthouse.lbry.io",
"json_body": true,
"template": {
"withCompare": true,
"withGenerator": true
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1,21 +1 @@
define({
"name": "Lighthouse",
"version": "0.0.1",
"description": "Lighthouse - Next-gen search api for LBRY",
"title": "Lighthouse API DOCS",
"url": "http://localhost",
"sampleUrl": "http://localhost",
"json_body": true,
"template": {
"withCompare": true,
"withGenerator": true
},
"defaultVersion": "0.0.0",
"apidoc": "0.3.0",
"generator": {
"name": "apidoc",
"time": "2017-09-28T20:35:05.906Z",
"url": "http://apidocjs.com",
"version": "0.17.6"
}
});
define({ "name": "Lighthouse", "version": "0.0.1", "description": "Lighthouse - Next-gen search api for LBRY", "title": "Lighthouse API DOCS", "url": "http://lighthouse.lbry.io", "sampleUrl": "http://lighthouse.lbry.io", "json_body": true, "template": { "withCompare": true, "withGenerator": true }, "defaultVersion": "0.0.0", "apidoc": "0.3.0", "generator": { "name": "apidoc", "time": "2018-04-08T06:08:09.948Z", "url": "http://apidocjs.com", "version": "0.17.6" } });

View file

@ -1,21 +1 @@
{
"name": "Lighthouse",
"version": "0.0.1",
"description": "Lighthouse - Next-gen search api for LBRY",
"title": "Lighthouse API DOCS",
"url": "http://localhost",
"sampleUrl": "http://localhost",
"json_body": true,
"template": {
"withCompare": true,
"withGenerator": true
},
"defaultVersion": "0.0.0",
"apidoc": "0.3.0",
"generator": {
"name": "apidoc",
"time": "2017-09-28T20:35:05.906Z",
"url": "http://apidocjs.com",
"version": "0.17.6"
}
}
{ "name": "Lighthouse", "version": "0.0.1", "description": "Lighthouse - Next-gen search api for LBRY", "title": "Lighthouse API DOCS", "url": "http://lighthouse.lbry.io", "sampleUrl": "http://lighthouse.lbry.io", "json_body": true, "template": { "withCompare": true, "withGenerator": true }, "defaultVersion": "0.0.0", "apidoc": "0.3.0", "generator": { "name": "apidoc", "time": "2018-04-08T06:08:09.948Z", "url": "http://apidocjs.com", "version": "0.17.6" } }

0
docs/utils/send_sample_request.js Normal file → Executable file
View file

View file

@ -33,6 +33,7 @@
"gendoc": "apidoc -i server/ -o docs/"
},
"dependencies": {
"@koa/cors": "^2.2.1",
"app-root-path": "^2.0.1",
"babel-polyfill": "^6.5.0",
"bitcoin-promise": "filipnyquist/node-bitcoin-promise#1fbf1cb8913ca3542b66060d48ebea185661e0a7",

View file

@ -1,4 +1,5 @@
import bodyParser from 'koa-bodyparser';
import cors from '@koa/cors';
import Koa from 'koa';
import logger from 'koa-logger';
import helmet from 'koa-helmet';
@ -16,7 +17,8 @@ const app = new Koa();
app
.use(logger())
.use(bodyParser())
.use(helmet());
.use(helmet())
.use(cors());
routing(app);

View file

@ -121,6 +121,24 @@ export async function sync () {
}
});
}
winston.log('info', '[Importer] Removing blocked claims from search!');
var util = require('./util.js');
var blockedOutputsResponse = await getBlockedOutpoints();
var outpointlist = JSON.parse(blockedOutputsResponse);
for (let outpoint of outpointlist.data.outpoints) {
var claimid = util.OutpointToClaimId(outpoint);
console.log('Deleting ClaimId: ' + claimid);
eclient.delete({
index: 'claims',
type : 'claim',
id : claimid,
}, function (error, response) {
if (error) {
winston.log(error);
}
});
}
// Done adding, update our claimTrie cache to latest and wait a bit...
await saveJSON(path.join(appRoot.path, 'claimTrieCache.json'), latestClaimTrie);
status.info = 'upToDate';
@ -152,6 +170,18 @@ function getRemovedClaims (oldClaimTrie, newClaimTrie) {
});
}
function getBlockedOutpoints () {
return new Promise((resolve, reject) => {
rp(`http://api.lbry.io/file/list_blocked`)
.then(function (htmlString) {
resolve(htmlString);
})
.catch(function (err) {
reject(err);
});
});
}
function getValue (tx, i) {
return new Promise((resolve, reject) => {
rp(`http://localhost:5000/claim_decode/${tx}/${i}`)

View file

@ -0,0 +1,57 @@
// taken from https://github.com/crypto-browserify/buffer-reverse/blob/master/inplace.js
function reverseInplace (buffer) {
for (var i = 0, j = buffer.length - 1; i < j; ++i, --j) {
var t = buffer[j];
buffer[j] = buffer[i];
buffer[i] = t;
}
return buffer;
}
function ripemd160 (bytearray) {
const crypto = require('crypto');
const secret = 'abcdefg';
const hash = crypto.createHash('ripemd160', secret)
.update(bytearray, 'binary', 'binary')
.digest();
return hash;
}
function sha256 (bytearray) {
const crypto = require('crypto');
const secret = 'abcdefg';
const hash = crypto.createHash('sha256', secret)
.update(bytearray, 'binary', 'binary')
.digest();
return hash;
}
export function OutpointToClaimId (outpointstr) {
var outpointsplit = outpointstr.split(':');
var outpoint = {txid: outpointsplit[0], vout: outpointsplit[1]};
// Assuming endianess is LittleEndian - Javacsript endianess depends on hardware.
// Can check with os.endianness(). Possible values are "BE" or "LE" as of Node.js v0.10.0
// convert txid to byte array then reverse bytes to get BigEndian
var txidbytes = Buffer.from(outpoint.txid, 'hex');
var txidrevbytes = reverseInplace(txidbytes);
// Get big endian of vout(uint32)
var voutbytes = Buffer.allocUnsafe(4);
voutbytes.writeInt32BE(outpoint.vout);
// Merge arrays
var claimidbytes = Buffer.concat([txidrevbytes, voutbytes]);
// Hash - Sha256
claimidbytes = sha256(claimidbytes);
// Hash - RipeMD160
claimidbytes = ripemd160(claimidbytes);
// Return to little endian.
claimidbytes = reverseInplace(claimidbytes);
// Encode to hex string
var claimid = claimidbytes.toString('hex');
return claimid;
}