Merge pull request #7 from lbryio/base-koa

Added the basic structure and fixed some issues!
This commit is contained in:
Fillerino 2017-08-16 19:51:11 +02:00 committed by GitHub
commit 3037f3f4c4
22 changed files with 4022 additions and 247 deletions

4
.babelrc Normal file
View file

@ -0,0 +1,4 @@
{
"presets": ["env"],
"plugins": ["transform-async-to-generator","syntax-async-functions","add-module-exports"]
}

16
.editorconfig Normal file
View file

@ -0,0 +1,16 @@
# EditorConfig helps developers define and maintain consistent
# coding styles between different editors and IDEs
# http://editorconfig.org
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.md]
trim_trailing_whitespace = false

42
.eslintrc Normal file
View file

@ -0,0 +1,42 @@
{
"env": {
"browser": false,
"mocha": false,
"es6": true,
"node": true
},
"parser": "babel-eslint",
"extends": "standard",
"globals": {
"GENTLY": true
},
"rules": {
"no-multi-spaces": 0,
"new-cap": 0,
"prefer-promise-reject-errors":0,
"comma-dangle": [
"error",
"always-multiline"
],
"semi": [
"error",
"always",
{ "omitLastInOneLineBlock": true }
],
"key-spacing": [
"error",
{
"multiLine": {
"beforeColon": false,
"afterColon": true
},
"align": {
"beforeColon": false,
"afterColon": true,
"on": "colon",
"mode": "strict"
}
}
]
}
}

63
.gitignore vendored
View file

@ -1,59 +1,4 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Typescript v1 declaration files
typings/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
node_modules
.DS_Store
/dist
npm-debug.log

21
LICENSE
View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2017 Fillerino
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -10,7 +10,31 @@
>3. Lighthouse API server, which serves the API and does all calculations about what to send to the end user.
## DEVELOPMENT NOTES:
> Stuff needed to be worked on(in order):
>1. Importer needs to add the block that the claim was made in as depth wont work in a a plain-non-updating-all-claims database.
>2. Lighthouse API server needs to be rebuilt in node.js with Koa.JS, this api server will query the elasticsearch autosuggest api, this will need some score balance between searching names,titles and description and some logic to only send the standing claims to the clients.(Bill can help with this part as this will take the longest)
>3. Ansible configuration and documentation
> Stuff needed to be worked on are located in issues or in the project board.
## Running
Install dependencies
```
yarn install --production=false
```
Start a Local Server
```
npm start
```
Run Test
```
npm test
```
Building and Running Production Server
```
npm run prod
```
**Note : Please make sure your elasticsearch server is running before using ```npm start``` or ```npm run prod```**
## License
MIT © [LBRYio, Filip Nyquist, Bill Bittner](https://github.com/lbryio)

1
importer/.gitignore vendored
View file

@ -1 +0,0 @@
node_modules

View file

@ -1 +0,0 @@
## More documentation here soon...

View file

@ -1,61 +0,0 @@
'use strict'
const Promise = require('bluebird')
const bitcoin = require('bitcoin-promise');
const rp = require('request-promise');
const request = require('request');
let client;
async function getClaims (height, gclient) {
return new Promise(async (resolve, reject) => {
try {
client = gclient;
let blockHash = await client.getBlockHash(height).then(blockHash => {return blockHash}).catch( err => reject(err));
let block = await client.getBlock(blockHash).then(block => {return block}).catch( err => reject(err));
let claims = await getClaimsForTxes(block.tx,height); // should return an array of claims, decoded if possible.
resolve(claims);
} catch (err) {
return reject(err)
}
})
}
async function getClaimsForTxes(txes,height) {
return new Promise(async (resolve, reject) => {
try {
let claimsArr = [];
let i=0;
for (let tx of txes) {
let claims_tx = await client.getClaimsForTx(tx).then(claims => {return claims}).catch( err => reject(err));
if(claims_tx != null){for (let claim of claims_tx) {
claim['height'] = height;
let d_claim = await getValue(tx,claim['nOut']);
if(d_claim !== 'error when decoding claims' && claim['value']){
claim['value'] = JSON.parse(d_claim);
claimsArr.push(claim);
}else{
claim['value'] = { error: 'non_decodable' }
claimsArr.push(claim);
}
}}
}
resolve(claimsArr);
} catch (err) {
return reject(err)
}
})
}
async function getValue(tx, i){
return new Promise(async (resolve, reject) => {
rp(`http://localhost:5000/claim_decode/${tx}/${i}`)
.then(function (htmlString) {
resolve(htmlString);
})
.catch(function (err) {
reject(err);
});
})
}
module.exports = exports = getClaims

View file

@ -1,24 +0,0 @@
{
"dependencies": {
"bitcoin-promise": "filipnyquist/node-bitcoin-promise#c3eb4bea552a7a136a4a1405d831da3e92f2efea",
"bluebird": "^3.5.0",
"chalk": "^2.0.1",
"elasticsearch": "^13.2.0",
"jsonfile": "^3.0.1",
"limited-request-queue": "^3.0.4",
"ora": "^1.3.0",
"request": "^2.81.0",
"request-promise": "^4.2.1",
"request-rate-limiter": "^1.0.2",
"sleep": "^5.1.1",
"throttled-queue": "^1.0.4",
"unirest": "^0.5.1"
},
"name": "LBRYimporter",
"version": "1.0.0",
"description": "A LBRY sync tool, syncs claims from the chain into whatever you want :)",
"main": "sync.js",
"repository": "https://github.com/filipnyquist/lighthouse-sync.git",
"author": "Fillerino <fillerix@fillerix.se>",
"license": "MIT"
}

View file

@ -1,76 +0,0 @@
const Promise = require('bluebird')
const ora = require('ora');
const chalk = require('chalk');
const bitcoin = require('bitcoin-promise');
const request = require('request');
const sleep = require('sleep');
var elasticsearch = require('elasticsearch');
var eclient = new elasticsearch.Client({
host: 'http://elastic:changeme@localhost:9200',
log: 'info'
});
const client = new bitcoin.Client({
host: 'localhost',
port: 9245,
user: 'lbry',
pass: 'lbry',
timeout: 30000
});
let claimsSynced=0;
async function sync (currentHeight) {
try {
let maxHeight = await client.getBlockCount().then(blockHash => {return blockHash}).catch( err => reject(err));
if( currentHeight <= maxHeight ) {
let claims = await require('./getClaims')(currentHeight, client);
send(claims);
claimsSynced += claims.length;
spinner.color = 'green';
spinner.text = `Current block: ${currentHeight}/${maxHeight} | TotalClaimsImported: ${claimsSynced} `
sync(currentHeight+1);
} else {
process.exit(0);
spinner.color = 'yellow';
spinner.text = `Waiting for new blocks...`;
sync(currentHeight);
}
} catch (err) {
spinner.color = 'red';
spinner.text = ('Error with block: %s, %s', currentHeight, err);
}
}
function send(arr){ // Modular change output here :)
arr.forEach(function(claim) {
claim['id'] = claim['claimId'];
//Check if our value is a object, else make it a object...
claim['value'] = (typeof claim.value == "object" ? claim.value : JSON.parse(claim.value));
//claim['value'] = JSON.stringify(claim['value']);
console.log(claim.value.metadata);
if(claim.name && claim.value){
claim.suggest_name ={
input: claim.name,
weight: 20
}
if(claim.value.claimType == "streamType" && claim.value.stream.metadata && claim.value.stream.metadata.description){
claim.suggest_desc ={
input: claim.value.stream.metadata.description.split(" "),
weight: 10
}
}
}
eclient.create({
index: 'claims',
type: 'claim',
id: claim.claimId,
body: claim
}, function (error, response) {
console.log(response);
});
});
}
console.log(chalk.green.underline.bold('Running LBRYSync v0.0.1rc1'))
const spinner = ora('Loading LBRYsync..').start();
sync(0)// Block to start from... :)

65
package.json Normal file
View file

@ -0,0 +1,65 @@
{
"name": "lighthouse",
"description": "Lighthouse is a lightning-fast advanced search engine API for publications on the lbrycrd with autocomplete capabilities.",
"version": "0.0.1",
"author": "filipnyquist <filip@lbry.io> , billbitt <bill@lbry.io>",
"keywords": [
"lbry",
"search",
"koa",
"rest",
"api",
"async",
"es7"
],
"repository": {
"type": "git",
"url": "git+https://github.com/lbryio/lighthouse.git"
},
"license": "MIT",
"bugs": {
"url": "https://github.com/lbryio/lighthouse/issues"
},
"homepage": "https://github.com/lbryio/lighthouse#readme",
"main": "server/index.js",
"scripts": {
"start": "nodemon server/ --exec babel-node",
"build": "babel server -d dist",
"lint": "eslint ./server",
"test": "npm run lint && npm run mocha",
"prod": "npm run build && node dist/",
"mocha": "./node_modules/.bin/mocha --compilers js:babel-register --require babel-polyfill"
},
"dependencies": {
"babel-polyfill": "^6.5.0",
"glob": "^7.1.1",
"jsonwebtoken": "^7.2.1",
"koa": "^2.0.0-alpha.7",
"koa-bodyparser": "^3.0.0",
"koa-helmet": "^2.0.0",
"koa-jwt": "^2.1.0",
"koa-logger": "^2.0.0",
"koa-router": "^7.0.0",
"mongoose": "^4.4.3"
},
"devDependencies": {
"babel-cli": "^6.5.1",
"babel-eslint": "^7.1.1",
"babel-plugin-add-module-exports": "^0.2.1",
"babel-plugin-syntax-async-functions": "^6.5.0",
"babel-plugin-transform-async-to-generator": "^6.5.0",
"babel-preset-env": "^1.6.0",
"babel-register": "^6.3.13",
"chai": "^3.5.0",
"eslint": "^3.12.2",
"eslint-config-standard": "^10.2.1",
"eslint-plugin-import": "^2.2.0",
"eslint-plugin-node": "^5.1.1",
"eslint-plugin-promise": "^3.5.0",
"eslint-plugin-standard": "^3.0.1",
"mocha": "^3.2.0",
"nodemon": "^1.8.1",
"should": "^11.1.2",
"supertest": "^2.0.1"
}
}

2
server/config.js Normal file
View file

@ -0,0 +1,2 @@
export const port = process.env.PORT || 3000;
export const baseApi = 'api';

View file

@ -0,0 +1,33 @@
import 'babel-polyfill';
class LighthouseControllers {
/* eslint-disable no-param-reassign */
/**
* Search api here
* @param {ctx} Koa Context
*/
async search (ctx) {
ctx.body = 'Search...';
}
/**
* Info about the api here
* @param {ctx} Koa Context
*/
async info (ctx) {
ctx.body = 'Info...';
}
/**
* Status of the api here
* @param {ctx} Koa Context
*/
async status (ctx) {
ctx.body = 'Status...';
}
/* eslint-enable no-param-reassign */
}
export default new LighthouseControllers();

21
server/index.js Normal file
View file

@ -0,0 +1,21 @@
import bodyParser from 'koa-bodyparser';
import Koa from 'koa';
import logger from 'koa-logger';
import helmet from 'koa-helmet';
import routing from './routes/';
import { port } from './config';
// Create Koa Application
const app = new Koa();
app
.use(logger())
.use(bodyParser())
.use(helmet());
routing(app);
// Start the application
app.listen(port, () => console.log(`✅ The server is running at http://localhost:${port}/`));
export default app;

13
server/routes/index.js Normal file
View file

@ -0,0 +1,13 @@
import routesLoader from '../utils/routesLoader';
export default function (app) {
routesLoader(`${__dirname}`).then((files) => {
files.forEach((route) => {
app
.use(route.routes())
.use(route.allowedMethods({
throw: true,
}));
});
});
}

View file

@ -0,0 +1,21 @@
import 'babel-polyfill';
import Router from 'koa-router';
import { baseApi } from '../config';
import LighthouseControllers from '../controllers/lighthouse';
const api = 'lighthouse';
const router = new Router();
router.prefix(`/${baseApi}/${api}`);
// GET /api/lighthouse
router.get('/', LighthouseControllers.info);
// GET /api/search
router.get('/search', LighthouseControllers.search);
// GET /api/cities
router.get('/status', LighthouseControllers.status);
export default router;

View file

@ -0,0 +1,60 @@
'use strict';
const Promise = require('bluebird');
const rp = require('request-promise');
let client;
async function getClaims (height, gclient) {
return new Promise(async (resolve, reject) => {
try {
client = gclient;
let blockHash = await client.getBlockHash(height).then(blockHash => { return blockHash }).catch(err => reject(err));
let block = await client.getBlock(blockHash).then(block => { return block }).catch(err => reject(err));
let claims = await getClaimsForTxes(block.tx, height); // should return an array of claims, decoded if possible.
resolve(claims);
} catch (err) {
return reject(err);
}
});
}
async function getClaimsForTxes (txes, height) {
return new Promise(async (resolve, reject) => {
try {
let claimsArr = [];
for (let tx of txes) {
let claimsTx = await client.getClaimsForTx(tx).then(claims => { return claims }).catch(err => reject(err));
if (claimsTx != null) {
for (let claim of claimsTx) {
claim['height'] = height;
let dClaim = await getValue(tx, claim['nOut']);
if (dClaim !== 'error when decoding claims' && claim['value']) {
claim['value'] = JSON.parse(dClaim);
claimsArr.push(claim);
} else {
claim['value'] = { error: 'non_decodable' };
claimsArr.push(claim);
}
}
}
}
resolve(claimsArr);
} catch (err) {
return reject(err);
}
});
}
async function getValue (tx, i) {
return new Promise(async (resolve, reject) => {
rp(`http://localhost:5000/claim_decode/${tx}/${i}`)
.then(function (htmlString) {
resolve(htmlString);
})
.catch(function (err) {
reject(err);
});
});
}
module.exports = exports = getClaims;

View file

@ -0,0 +1,65 @@
const bitcoin = require('bitcoin-promise');
var elasticsearch = require('elasticsearch');
var eclient = new elasticsearch.Client({
host: 'http://elastic:changeme@localhost:9200',
log : 'info',
});
const client = new bitcoin.Client({
host : 'localhost',
port : 9245,
user : 'lbry',
pass : 'lbry',
timeout: 30000,
});
async function sync (currentHeight) {
try {
let maxHeight = await client.getBlockCount().then(blockHash => { return blockHash }).catch(err => console.log(err));
if (currentHeight <= maxHeight) {
let claims = await require('./getClaims')(currentHeight, client);
send(claims);
this.claimsSynced += claims.length;
// currentHeight / maxHeight / claimsSynced
sync(currentHeight + 1);
} else {
process.exit(0);
// Waiting for new blocks logic here
sync(currentHeight); // eslint-disable-line no-unreachable
}
} catch (err) {
// Catch errors here
}
}
function send (arr) { // Modular change output here :)
arr.forEach(function (claim) {
claim['id'] = claim['claimId'];
// Check if our value is a object, else make it a object...
claim['value'] = (typeof claim.value === 'object' ? claim.value : JSON.parse(claim.value));
// claim['value'] = JSON.stringify(claim['value']);
console.log(claim.value.metadata);
if (claim.name && claim.value) {
claim.suggest_name = {
input : claim.name,
weight: 20,
};
if (claim.value.claimType === 'streamType' && claim.value.stream.metadata && claim.value.stream.metadata.description) {
claim.suggest_desc = {
input : claim.value.stream.metadata.description.split(' '),
weight: 10,
};
}
}
eclient.create({
index: 'claims',
type : 'claim',
id : claim.claimId,
body : claim,
}, function (error, response) {
if (error) { console.log(error) }
console.log(response);
});
});
}
module.exports = exports = sync;

View file

@ -0,0 +1,19 @@
import glob from 'glob';
export default function (dirname) {
return new Promise((resolve, reject) => {
const routes = [];
glob(`${dirname}/*`, {
ignore: '**/index.js',
}, (err, files) => {
if (err) {
return reject(err);
}
files.forEach((file) => {
const route = require(file); // eslint-disable-line global-require, import/no-dynamic-require
routes.push(route);
});
return resolve(routes);
});
});
}

128
test/api.test.js Normal file
View file

@ -0,0 +1,128 @@
/*
import app from '../server/'
import supertest from 'supertest'
import { expect, should } from 'chai'
const temp = {}
const request = supertest.agent(app.listen())
should()
describe('POST api/authenticate', () => {
it('should get all cities', (done) => {
request
.post('/api/authenticate')
.set('Accept', 'application/json')
.send({
password: 'password',
})
.expect(200, (err, res) => {
temp.token = res.body.token
done()
})
})
})
describe('POST /city', () => {
it('should add a city', (done) => {
request
.post('/api/cities')
.set('Accept', 'application/json')
.set('Authorization', `Bearer ${temp.token}`)
.set('Accept', 'application/json')
.send({
name: 'Bangkok',
totalPopulation: 8249117,
country: 'Thailand',
zipCode: 1200,
})
.expect(200, (err, res) => {
temp.idCity = res.body._id;
done()
})
})
})
describe('GET /cities', () => {
it('should get all cities', (done) => {
request
.get('/api/cities')
.set('Authorization', `Bearer ${temp.token}`)
.set('Accept', 'application/json')
.expect(200, (err, res) => {
expect(res.body.length).to.be.at.least(1);
done()
})
})
})
describe('GET /cities/:id', () => {
it('should get a city', (done) => {
request
.get(`/api/cities/${temp.idCity}`)
.set('Authorization', `Bearer ${temp.token}`)
.set('Accept', 'application/json')
.expect(200, (err, res) => {
res.body.name.should.equal('Bangkok')
res.body.totalPopulation.should.equal(8249117)
res.body.country.should.equal('Thailand')
res.body.zipCode.should.equal(1200)
res.body._id.should.equal(temp.idCity)
done()
})
})
})
describe('PUT /cities', () => {
it('should update a city', (done) => {
request
.put(`/api/cities/${temp.idCity}`)
.set('Authorization', `Bearer ${temp.token}`)
.set('Accept', 'application/json')
.send({
name: 'Chiang Mai',
totalPopulation: 148477,
country: 'Thailand',
zipCode: 50000,
})
.expect(200, (err, res) => {
temp.idCity = res.body._id;
done()
})
})
it('should get updated city', (done) => {
request
.get(`/api/cities/${temp.idCity}`)
.set('Authorization', `Bearer ${temp.token}`)
.set('Accept', 'application/json')
.expect(200, (err, res) => {
res.body.name.should.equal('Chiang Mai')
res.body.totalPopulation.should.equal(148477)
res.body.country.should.equal('Thailand')
res.body.zipCode.should.equal(50000)
res.body._id.should.equal(temp.idCity)
done()
})
})
})
describe('DELETE /cities', () => {
it('should delete a city', (done) => {
request
.delete(`/api/cities/${temp.idCity}`)
.set('Authorization', `Bearer ${temp.token}`)
.set('Accept', 'application/json')
.expect(200, (err, res) => {
done()
})
})
it('should get error', (done) => {
request
.get(`/api/cities/${temp.idCity}`)
.set('Accept', 'application/json')
.expect(404, () => {
done()
})
})
}) */

3501
yarn.lock Normal file

File diff suppressed because it is too large Load diff