Moved stuff to official repo, time to get work going!
Moved stuff to official repo, time to get work going!
This commit is contained in:
parent
c4dd60dc4e
commit
4427226eef
8 changed files with 259 additions and 1 deletions
59
.gitignore
vendored
Normal file
59
.gitignore
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Typescript v1 declaration files
|
||||
typings/
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2017 Fillerino
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
17
README.md
17
README.md
|
@ -1 +1,16 @@
|
|||
lighthouse
|
||||
# Lighthouse - A lightning fast search for the LBRY blockchain
|
||||
**Created by _filipnyquist_ <filip@lbry.io> and _billbitt_ <bill@lbry.io>**
|
||||
|
||||
## What is Lighthouse?
|
||||
>Lighthouse is a lightning-fast advanced search engine API for publications on the lbrycrd with autocomplete capabilities.
|
||||
|
||||
## What does Lighthouse consist of?
|
||||
>1. Elasticsearch as a backend db server.
|
||||
>2. LBRYimport, a importer that imports the claims into the Elasticsearch database.
|
||||
>3. Lighthouse API server, which serves the API and does all calculations about what to send to the end user.
|
||||
|
||||
## DEVELOPMENT NOTES:
|
||||
> Stuff needed to be worked on(in order):
|
||||
>1. Importer needs to add the block that the claim was made in as depth wont work in a a plain-non-updating-all-claims database.
|
||||
>2. Lighthouse API server needs to be rebuilt in node.js with Koa.JS, this api server will query the elasticsearch autosuggest api, this will need some score balance between searching names,titles and description and some logic to only send the standing claims to the clients.(Bill can help with this part as this will take the longest)
|
||||
>3. Ansible configuration and documentation
|
||||
|
|
1
importer/.gitignore
vendored
Normal file
1
importer/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
node_modules
|
1
importer/README.md
Normal file
1
importer/README.md
Normal file
|
@ -0,0 +1 @@
|
|||
## More documentation here soon...
|
61
importer/getClaims.js
Normal file
61
importer/getClaims.js
Normal file
|
@ -0,0 +1,61 @@
|
|||
'use strict'
|
||||
|
||||
const Promise = require('bluebird')
|
||||
const bitcoin = require('bitcoin-promise');
|
||||
const rp = require('request-promise');
|
||||
const request = require('request');
|
||||
|
||||
let client;
|
||||
|
||||
async function getClaims (height, gclient) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
client = gclient;
|
||||
let blockHash = await client.getBlockHash(height).then(blockHash => {return blockHash}).catch( err => reject(err));
|
||||
let block = await client.getBlock(blockHash).then(block => {return block}).catch( err => reject(err));
|
||||
let claims = await getClaimsForTxes(block.tx,height); // should return an array of claims, decoded if possible.
|
||||
resolve(claims);
|
||||
} catch (err) {
|
||||
return reject(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
async function getClaimsForTxes(txes,height) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
let claimsArr = [];
|
||||
let i=0;
|
||||
for (let tx of txes) {
|
||||
let claims_tx = await client.getClaimsForTx(tx).then(claims => {return claims}).catch( err => reject(err));
|
||||
if(claims_tx != null){for (let claim of claims_tx) {
|
||||
claim['height'] = height;
|
||||
let d_claim = await getValue(tx,claim['nOut']);
|
||||
if(d_claim !== 'error when decoding claims' && claim['value']){
|
||||
claim['value'] = JSON.parse(d_claim);
|
||||
claimsArr.push(claim);
|
||||
}else{
|
||||
claim['value'] = { error: 'non_decodable' }
|
||||
claimsArr.push(claim);
|
||||
}
|
||||
}}
|
||||
}
|
||||
resolve(claimsArr);
|
||||
} catch (err) {
|
||||
return reject(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function getValue(tx, i){
|
||||
return new Promise(async (resolve, reject) => {
|
||||
rp(`http://localhost:5000/claim_decode/${tx}/${i}`)
|
||||
.then(function (htmlString) {
|
||||
resolve(htmlString);
|
||||
})
|
||||
.catch(function (err) {
|
||||
reject(err);
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = exports = getClaims
|
24
importer/package.json
Normal file
24
importer/package.json
Normal file
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"dependencies": {
|
||||
"bitcoin-promise": "filipnyquist/node-bitcoin-promise#c3eb4bea552a7a136a4a1405d831da3e92f2efea",
|
||||
"bluebird": "^3.5.0",
|
||||
"chalk": "^2.0.1",
|
||||
"elasticsearch": "^13.2.0",
|
||||
"jsonfile": "^3.0.1",
|
||||
"limited-request-queue": "^3.0.4",
|
||||
"ora": "^1.3.0",
|
||||
"request": "^2.81.0",
|
||||
"request-promise": "^4.2.1",
|
||||
"request-rate-limiter": "^1.0.2",
|
||||
"sleep": "^5.1.1",
|
||||
"throttled-queue": "^1.0.4",
|
||||
"unirest": "^0.5.1"
|
||||
},
|
||||
"name": "LBRYimporter",
|
||||
"version": "1.0.0",
|
||||
"description": "A LBRY sync tool, syncs claims from the chain into whatever you want :)",
|
||||
"main": "sync.js",
|
||||
"repository": "https://github.com/filipnyquist/lighthouse-sync.git",
|
||||
"author": "Fillerino <fillerix@fillerix.se>",
|
||||
"license": "MIT"
|
||||
}
|
76
importer/syncElastic.js
Normal file
76
importer/syncElastic.js
Normal file
|
@ -0,0 +1,76 @@
|
|||
const Promise = require('bluebird')
|
||||
const ora = require('ora');
|
||||
const chalk = require('chalk');
|
||||
const bitcoin = require('bitcoin-promise');
|
||||
const request = require('request');
|
||||
const sleep = require('sleep');
|
||||
var elasticsearch = require('elasticsearch');
|
||||
var eclient = new elasticsearch.Client({
|
||||
host: 'http://elastic:changeme@localhost:9200',
|
||||
log: 'info'
|
||||
});
|
||||
const client = new bitcoin.Client({
|
||||
host: 'localhost',
|
||||
port: 9245,
|
||||
user: 'lbry',
|
||||
pass: 'lbry',
|
||||
timeout: 30000
|
||||
});
|
||||
let claimsSynced=0;
|
||||
|
||||
async function sync (currentHeight) {
|
||||
try {
|
||||
let maxHeight = await client.getBlockCount().then(blockHash => {return blockHash}).catch( err => reject(err));
|
||||
if( currentHeight <= maxHeight ) {
|
||||
let claims = await require('./getClaims')(currentHeight, client);
|
||||
send(claims);
|
||||
claimsSynced += claims.length;
|
||||
spinner.color = 'green';
|
||||
spinner.text = `Current block: ${currentHeight}/${maxHeight} | TotalClaimsImported: ${claimsSynced} `
|
||||
sync(currentHeight+1);
|
||||
} else {
|
||||
process.exit(0);
|
||||
spinner.color = 'yellow';
|
||||
spinner.text = `Waiting for new blocks...`;
|
||||
sync(currentHeight);
|
||||
|
||||
}
|
||||
} catch (err) {
|
||||
spinner.color = 'red';
|
||||
spinner.text = ('Error with block: %s, %s', currentHeight, err);
|
||||
}
|
||||
}
|
||||
|
||||
function send(arr){ // Modular change output here :)
|
||||
arr.forEach(function(claim) {
|
||||
claim['id'] = claim['claimId'];
|
||||
//Check if our value is a object, else make it a object...
|
||||
claim['value'] = (typeof claim.value == "object" ? claim.value : JSON.parse(claim.value));
|
||||
//claim['value'] = JSON.stringify(claim['value']);
|
||||
console.log(claim.value.metadata);
|
||||
if(claim.name && claim.value){
|
||||
claim.suggest_name ={
|
||||
input: claim.name,
|
||||
weight: 20
|
||||
}
|
||||
if(claim.value.claimType == "streamType" && claim.value.stream.metadata && claim.value.stream.metadata.description){
|
||||
claim.suggest_desc ={
|
||||
input: claim.value.stream.metadata.description.split(" "),
|
||||
weight: 10
|
||||
}
|
||||
}
|
||||
}
|
||||
eclient.create({
|
||||
index: 'claims',
|
||||
type: 'claim',
|
||||
id: claim.claimId,
|
||||
body: claim
|
||||
}, function (error, response) {
|
||||
console.log(response);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
console.log(chalk.green.underline.bold('Running LBRYSync v0.0.1rc1'))
|
||||
const spinner = ora('Loading LBRYsync..').start();
|
||||
sync(0)// Block to start from... :)
|
Loading…
Reference in a new issue