Merge pull request #733 from lbryio/master

Cut staging from master
This commit is contained in:
Shawn K 2018-11-12 14:22:09 -06:00 committed by GitHub
commit 2f09713186
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
55 changed files with 5234 additions and 12952 deletions

View file

@ -3,3 +3,4 @@ node_modules/
public/bundle
server/render/build
test/
server/chainquery

View file

@ -1,4 +1,5 @@
{
"parser": "babel-eslint",
"extends": ["standard", "standard-jsx"],
"env": {
"es6": true,
@ -12,16 +13,14 @@
"rules": {
"no-multi-spaces": 0,
"new-cap": 0,
"prefer-promise-reject-errors":0,
"comma-dangle": [
"error",
"always-multiline"
],
"semi": [
"error",
"always",
{ "omitLastInOneLineBlock": true }
],
"prefer-promise-reject-errors": 0,
"no-unused-vars": 0,
"standard/object-curly-even-spacing": 0,
"handle-callback-err": 0,
"one-var": 0,
"object-curly-spacing": 0,
"comma-dangle": ["error", "always-multiline"],
"semi": ["error", "always", { "omitLastInOneLineBlock": true }],
"key-spacing": [
"error",
{

View file

@ -40,7 +40,7 @@ Spee.ch is a react web app that depends on MySQL for local content, and on two o
* [FFmpeg](https://www.ffmpeg.org/download.html)
* [Spee.ch] (below)
* [pm2] (optional) process manager such as pm2 to run speech server.js
* [http proxy server] caddy, nginx, traefik, etc to forward 443 to speech port 3000
* [http proxy server] caddy, nginx, traefik, etc to forward 80/443 to speech port 3000
#### Clone this repo
@ -67,22 +67,12 @@ $ npm install
#### Create the config files using the built-in CLI
_note: make sure lbrynet is running in the background before proceeding_
_note: If you are opt to run a local chainquery, such as from [lbry-docker/chainquery](https://github.com/lbryio/lbry-docker/tree/master/chainquery) you will need to specify connection details at this time in:_ ~/spee.ch/docs/setup/conf/speech/chainqueryConfig.json
```
$ npm run configure
```
* _note: At the moment, you will have to copy chainqueryConfig.json from:_
```
~/spee.ch/docs/setup/conf/speech/chainqueryConfig.json
```
_to:_
```
~/spee.ch/site/config/chainqueryConfig.json
```
* _note: The domain name in this part must be prefixed with http:// or https://_
#### Build & start the app
_note: make sure lbrynet is running in the background before proceeding_

View file

@ -13,7 +13,7 @@ export function onHandleShowPageUri (params, url) {
data: {
...params,
url,
}
},
};
}
@ -23,7 +23,7 @@ export function onHandleShowHomepage (params, url) {
data: {
...params,
url,
}
},
};
}

View file

@ -1,6 +1,6 @@
import Request from '../utils/request';
export function getSpecialAssetClaims(host, name, page) {
export function getSpecialAssetClaims (host, name, page) {
if (!page) page = 1;
const url = `${host}/api/special/${name}/${page}`;
return Request(url);

View file

@ -1,6 +1,6 @@
import React from 'react';
import Row from '@components/Row';
import {Link} from "react-router-dom";
import {Link} from 'react-router-dom';
const AboutSpeechDetails = () => {
return (

View file

@ -20,6 +20,6 @@ const ChooseChannelPublishRadio = ({ publishInChannel, toggleAnonymousPublish })
</label>
</div>
);
}
};
export default ChooseChannelPublishRadio;

View file

@ -6,6 +6,6 @@ const DropzoneDropItDisplay = () => {
Drop it.
</div>
);
}
};
export default DropzoneDropItDisplay;

View file

@ -8,5 +8,5 @@ const Label = ({ value }) => {
{value}
</label>
);
}
};
export default Label;

View file

@ -1,6 +1,5 @@
import React from 'react';
import AssetPreview from '@components/AssetPreview';
import HorizontalQuadSplit from '@components/HorizontalQuadSplit';
import Row from '@components/Row';
import ButtonSecondary from '@components/ButtonSecondary';
import { createGroupedList } from '../../utils/createGroupedList.js';

View file

@ -75,7 +75,7 @@ class ChannelCreateForm extends React.Component {
value={'Create Channel'}
onClickHandler={this.handleSubmit}
/>
</form>
</form>
) : (
<div>
<span className={'text--small text--secondary'}>{status}</span>

View file

@ -64,7 +64,7 @@ class ChannelLoginForm extends React.Component {
value={'Authenticate'}
onClickHandler={this.loginToChannel}
/>
</form>
</form>
);
}
}

View file

@ -5,10 +5,10 @@ import View from './view';
const mapStateToProps = ({ show, publish }) => {
return {
file: publish.file,
isUpdate: publish.isUpdate,
file : publish.file,
isUpdate : publish.isUpdate,
hasChanged: publish.hasChanged,
asset: selectAsset(show),
asset : selectAsset(show),
};
};

View file

@ -1,7 +1,7 @@
import {connect} from 'react-redux';
import View from './view';
import {selectAsset} from "../../selectors/show";
import {buildURI} from "../../utils/buildURI";
import {selectAsset} from '../../selectors/show';
import {buildURI} from '../../utils/buildURI';
const mapStateToProps = props => {
const { show, publish } = props;
@ -11,10 +11,10 @@ const mapStateToProps = props => {
uri = `lbry://${buildURI(asset)}`;
}
return {
disabled: publish.disabled,
file: publish.file,
status: publish.status.status,
isUpdate: publish.isUpdate,
disabled : publish.disabled,
file : publish.file,
status : publish.status.status,
isUpdate : publish.isUpdate,
hasChanged: publish.hasChanged,
uri,
};

View file

@ -34,7 +34,7 @@ class PublishUrlInput extends React.Component {
value = this.cleanseInput(value);
this.updateAndValidateClaimInput(value);
}
updateAndValidateClaimInput(value) {
updateAndValidateClaimInput (value) {
if (value) {
this.props.validateClaim(value);
} else {

View file

@ -24,7 +24,7 @@ class SEO extends React.Component {
const canonicalLink = `${host}${createCanonicalLink({
asset: asset ? { ...asset.claimData, shortId: asset.shortId } : undefined,
channel,
page: pageUri,
page : pageUri,
})}`;
// render results
return (

View file

@ -14,20 +14,20 @@ class FaqPage extends React.Component {
</Row>
<Row>
<h3>What is spee.ch?</h3>
<p>Spee.ch is a media-hosting site that reads from and publishes content to the <a href="http://lbry.io/">LBRY blockchain</a>.</p>
<p>Spee.ch is a media-hosting site that reads from and publishes content to the <a href='http://lbry.io/'>LBRY blockchain</a>.</p>
</Row>
<Row>
<h3>OK But Why Should I Care?</h3>
<p>Spee.ch is a fast and easy way to host your images, videos, and other content. What makes this different from other similar sites is that Speech is hosted on the LBRY blockchain. That means it is impossible for your content to be censored via digital means. Even if we took down Speech today, all content would remain immutably stored on the LBRY blockchain.</p>
<p>Blockchain technology doesnt solve <a href="https://xkcd.com/538/">the 5 dollar wrench attack</a>, but it solves just about every other problem in media hosting and distribution.</p>
<p>Even better - you can host your own clone of Speech to get even more control over your content. <a href="https://github.com/lbryio/spee.ch/blob/master/README.md">CLICK HERE FOR INFO</a>.</p>
<p>Blockchain technology doesnt solve <a href='https://xkcd.com/538/'>the 5 dollar wrench attack</a>, but it solves just about every other problem in media hosting and distribution.</p>
<p>Even better - you can host your own clone of Speech to get even more control over your content. <a href='https://github.com/lbryio/spee.ch/blob/master/README.md'>CLICK HERE FOR INFO</a>.</p>
<p>Speech is just the beginning of what will soon be a vibrant ecosystem of LBRY-powered apps. Use LBRY and youre one step closer to true freedom.</p>
</Row>
<Row>
<h3>How to Use spee.ch</h3>
<p>Its easy. Drag the image or video file of your choice into the center of the spee.ch homepage.</p>
<p>Spee.ch is currently best suited for web optimized MP4 video and standard image filetypes (JPEG, GIF).</p>
<p>If you want to refer to a piece of content repeatedly, or to build a collection of related content, you could create a channel. Channels work both for private collections and for public repositories. Theres more info about how to do this <a href="https://spee.ch/login">on the channel page</a>.</p>
<p>If you want to refer to a piece of content repeatedly, or to build a collection of related content, you could create a channel. Channels work both for private collections and for public repositories. Theres more info about how to do this <a href='https://spee.ch/login'>on the channel page</a>.</p>
<p>Published files will be wiewable and embeddable with any web browser and accesible in the LBRY app. You can also use spee.ch to view free and non-NSFW content published on LBRY network from LBRY app. You just need to replace "lbry://" with "http://spee.ch/" in the URL.</p>
</Row>
<Row>
@ -36,8 +36,8 @@ class FaqPage extends React.Component {
</Row>
<Row>
<h3>Contribute</h3>
<p>If you have an idea for your own spee.ch-like site on top of LBRY, fork our <a href="https://github.com/lbryio/spee.ch">github repo</a> and go to town!</p>
<p>If you want to improve spee.ch, join <a href="https://chat.lbry.io/">our discord channel</a> or solve one of our <a href="https://github.com/lbryio/spee.ch/issues">github issues</a>.</p>
<p>If you have an idea for your own spee.ch-like site on top of LBRY, fork our <a href='https://github.com/lbryio/spee.ch'>github repo</a> and go to town!</p>
<p>If you want to improve spee.ch, join <a href='https://chat.lbry.io/'>our discord channel</a> or solve one of our <a href='https://github.com/lbryio/spee.ch/issues'>github issues</a>.</p>
</Row>
</PageLayout>
);

View file

@ -16,7 +16,7 @@ class PopularPage extends React.Component {
const { homeChannel } = this.props;
return (
<ContentPageWrapper homeChannel={homeChannel} />
)
);
}
};

View file

@ -41,9 +41,9 @@ const initialState = {
license : '',
nsfw : false,
},
isUpdate: false,
isUpdate : false,
hasChanged: false,
thumbnail: null,
thumbnail : null,
thumbnailChannel,
thumbnailChannelId,
};
@ -52,7 +52,7 @@ export default function (state = initialState, action) {
switch (action.type) {
case actions.FILE_SELECTED:
return Object.assign({}, state.isUpdate ? state : initialState, { // note: clears to initial state
file: action.data,
file : action.data,
hasChanged: true,
});
case actions.FILE_CLEAR:
@ -66,13 +66,13 @@ export default function (state = initialState, action) {
});
case actions.CLAIM_UPDATE:
return Object.assign({}, state, {
claim: action.data,
claim : action.data,
hasChanged: true,
});
case actions.SET_PUBLISH_IN_CHANNEL:
return Object.assign({}, state, {
publishInChannel: action.channel,
hasChanged: true,
hasChanged : true,
});
case actions.PUBLISH_STATUS_UPDATE:
return Object.assign({}, state, {
@ -96,7 +96,7 @@ export default function (state = initialState, action) {
case actions.THUMBNAIL_NEW:
return {
...state,
thumbnail: action.data,
thumbnail : action.data,
hasChanged: true,
};
case actions.SET_UPDATE_TRUE:

View file

@ -59,9 +59,9 @@ export function * newAssetRequest (action) {
try {
const { loggedInChannel } = yield select(selectChannelState);
if(loggedInChannel && loggedInChannel.longId) {
if (loggedInChannel && loggedInChannel.longId) {
const {
data: claimViewData
data: claimViewData,
} = yield call(getClaimViews, longId);
claimViews = claimViewData[longId] || 0;
@ -82,18 +82,18 @@ export function * updateAssetViews (action) {
const channelId = loggedInChannel.longId;
for(let key in showState.assetList) {
for (let key in showState.assetList) {
let asset = showState.assetList[key];
if(asset.claimData && asset.claimData.certificateId === channelId) {
if (asset.claimData && asset.claimData.certificateId === channelId) {
const longId = asset.claimId;
const assetKey = `a#${asset.name}#${longId}`;
let claimViews = null;
if(longId) {
if (longId) {
const {
data: claimViewData
data: claimViewData,
} = yield call(getClaimViews, longId);
claimViews = claimViewData[longId] || 0;
@ -103,12 +103,12 @@ export function * updateAssetViews (action) {
}
}
} catch (error) {
console.log(error)
console.log(error);
}
};
export function * watchUpdateAssetViews (action) {
yield takeLatest(channelActions.CHANNEL_UPDATE, updateAssetViews)
yield takeLatest(channelActions.CHANNEL_UPDATE, updateAssetViews);
};
export function * watchNewAssetRequest () {

View file

@ -1,7 +1,7 @@
import {call, put, select, takeLatest} from 'redux-saga/effects';
import * as actions from '../constants/show_action_types';
import { addNewChannelToChannelList, addRequestToRequestList, onRequestError, onRequestUpdate, updateChannelClaims } from '../actions/show';
//import { getChannelClaims, getChannelData } from '../api/channelApi';
// import { getChannelClaims, getChannelData } from '../api/channelApi';
import { getSpecialAssetClaims } from '../api/specialAssetApi';
import { selectShowState } from '../selectors/show';
import { selectSiteHost } from '../selectors/site';

View file

@ -30,8 +30,8 @@ function * parseAndUpdateIdentifierAndClaim (modifier, claim) {
}
function * parseAndUpdateClaimOnly (claim) {
if(/^special\:/.test(claim) === true) {
const assetName = /special\:(.*)/.exec(claim)[1];
if (/^special:/.test(claim) === true) {
const assetName = /special:(.*)/.exec(claim)[1];
return yield call(newSpecialAssetRequest, onNewSpecialAssetRequest(assetName));
} else {
// this could be a request for an asset or a channel page

View file

@ -1,261 +0,0 @@
# Create Your Own Spee.ch on Ubuntu 16.x 18.x VPS
# Overview
## Prerequisites
* UBUNTU 16+ VPS with root access
* Your login info ready
* Domain name with @ and www pointed at your VPS IP
* Email Address
* Ability to send 5+ LBRY credits to an address
* Noncommercial use (highly suggested, but you still _could_)
* We recommend that you fork Spee.ch so that you can customize the site.
## You'll be installing:
* MYSQL DB
* Default Port
* NODE v8+
* HTTPS PROXY SERVER
* Caddy for personal use
* Exposed ports: 22, 80, 443, 3333, 4444
* Reverse proxies to App on 3000
* SPEE.CH
* LBRYNET DAEMON
# 1. Update OS and install packages
## OS
`sudo apt-get update -y`
`ulimit -n 8192`
## Git
`sudo apt-get install git -y`
## NODE v8
`wget -qO- https://deb.nodesource.com/setup_8.x | sudo -E bash -`
`sudo apt-get install -y nodejs`
## Curl, Tmux, Unzip, ffmpeg
`sudo apt-get install curl tmux unzip ffmpeg -y`
## Grab config files
`git clone https://github.com/jessopb/speechconfigs.git`
`chmod 640 -R ~/speechconfigs`
# 2 Secure the UFW firewall
## UFW
`sudo ufw status`
`sudo ufw allow 80`
`sudo ufw allow 443`
`sudo ufw allow 22`
`sudo ufw allow 3333`
`sudo ufw allow 4444`
`sudo ufw default allow outgoing`
`sudo ufw default deny incoming`
`sudo ufw show added`
`sudo ufw enable` (yes, you've allowed ssh 22)
`sudo ufw status`
# 3 Install Caddy to handle https and reverse proxy
## Get Caddy
`curl https://getcaddy.com | bash -s personal`
## Set up Caddy
`mkdir -p /opt/caddy/logs/`
`mkdir -p /opt/caddy/store/`
`cp ~/speechconfigs/caddy/Caddyfile.speechsample ~/speechconfigs/caddy/Caddyfile`
`nano ~/speechconfigs/caddy/Caddyfile`
( Change {{EXAMPLE.COM}} to YOURDOMAIN.COM )
`cp ~/speechconfigs/caddy/Caddyfile /opt/caddy/`
## Set up Caddy to run as systemd service
`cp ~/speechconfigs/caddy/caddy.service /etc/systemd/system/caddy.service`
`chmod 644 /etc/systemd/system/caddy.service`
`chown -R www-data:www-data /opt/caddy/`
`setcap 'cap_net_bind_service=+ep' /usr/local/bin/caddy`
`systemctl daemon-reload`
`systemctl start caddy`
`systemctl status caddy`
At this point, navigating to yourdomain.com should give you a 502 bad gateway error. That's good!
# 4 Set up MySql
## Install MySql
`sudo apt-get install mysql-server -y`
( enter blank password each time )
`sudo systemctl status mysql` (q to exit)
## Secure Setup
`sudo mysql_secure_installation`
* No to password validation
* Y to all other options
* password abcd1234
## Login to mysql from root to complete setup:
`mysql` to enter mysql> console
mysql> `ALTER USER 'root'@'localhost' IDENTIFIED WITH mysql_native_password BY 'abcd1234';`
mysql> `FLUSH PRIVILEGES;`
Control+D to exit
Verify:
`mysql -u root -p` and then entering your password abcd1234 should give you the mysql> shell
# 5 Get Lbrynet Daemon
### TODO: Enable something like sudo systemctl start lbrynet so it runs as www-data
## Enter tmux
`tmux`
* Ctrl+b, d detaches leaving session running.
* ~# `tmux`, Ctrl+b, ( goes back to that session.
## Get the daemon
`wget -O ~/latest_daemon.zip https://lbry.io/get/lbrynet.linux.zip`
`unzip -o -u ~/latest_daemon.zip`
## Start the daemon
~# `./lbrynet start`
## Detatch tmux session
`Control+b, then d` to leave lbrynet daemon running and exit the session
`tmux` if you want to get back into tmux
`Control+b, then ) in tmux` to cycle back to your lbrynet session to see output
## Display wallet address to which to send 5+ LBC.
### These commands work when `lbrynet start` is already running in another tmux
`./lbrynet commands` to check out the current commands
`./lbrynet address_list` to get your wallet address
`Ctrl + Shift + C` after highlighting an address to copy.
Use a LBRY app or daemon to send LBC to the address. Sending LBC may take a few seconds or longer.
`./lbrynet account_balance` to check your balance after you've sent LBC.
# 6 Set up spee.ch
## Clone speech either from your own fork, or from the lbryio/spee.ch repo.
### Developers
SSH?
`git clone git@github.com:{{youraccount}}/spee.ch`
HTTPS?
`git clone https://github.com/{{youraccount}}/spee.ch.git`
### Publishers
`git clone -b release https://github.com/lbryio/spee.ch`
## Build it
`cd spee.ch`
~/spee.ch# `npm install`
`cp ~/speechconfigs/speech/chainqueryConfig.json ~/spee.ch/site/config/chainqueryConfig.json`
~/spee.ch# `npm run configure` (once your wallet balance has cleared)
* DATABASE: lbry
* USER NAME: root
* PASSWORD: abcd1234
* PORT: 3000
* Site Title: Your Site Name
* Enter your site's domain name: https://freezepeach.fun (this must include https://)
* Enter a directory where uploads should be stored: (/home/lbry/Uploads)
~/spee.ch/# `npm run start`
## Try it
Navigate to yourdomain.fun!
### 7 Maintenance Proceedures
* Change wallet
* TODO
* Change daemon
* wget daemon from https://github.com/lbryio/lbry/releases
* wget --quiet -O ~/your_name_daemon.zip https://your_copied_file_path.zip
* rm ./lbrynet
* unzip -o -u ~/your_name_daemon.zip
### 7 TODO
* Don't run as root
* Use Dockerized Spee.ch and Lbrynet
* https://github.com/lbryio/lbry-docker/tree/master/www.spee.ch
* https://github.com/lbryio/lbry-docker/tree/master/lbrynet-daemon
* https://blog.hasura.io/an-exhaustive-guide-to-writing-dockerfiles-for-node-js-web-apps-bbee6bd2f3c4
* https://docs.traefik.io/user-guide/docker-and-lets-encrypt/
* https://docs.traefik.io/configuration/acme/
* Systemd unit files
* https://nodesource.com/blog/running-your-node-js-app-with-systemd-part-1/
* Spee.ch
* sudo nano /lib/systemd/system/speech.service
* Lbrynet
* sudo nano /lib/systemd/system/lbrynet.service
```
[Unit]
Description=hello_env.js - making your environment variables read
Documentation=https://example.com
After=network.target
[Service]
Environment=NODE_PORT=3001
Type=simple
User=ubuntu
ExecStart=node path/server.js
Restart=on-failure
[Install]
WantedBy=multi-user.target
```
* Provide spee.ch build releases?
* Provide system to configure chainqueryConfig.json
* Clone speech to stripped version, streamline customization
* Automate for testing

View file

@ -19,12 +19,12 @@
* Https proxy server
* Caddy for personal use
* Exposed ports: 22, 80, 443, 3333, 4444
* Reverse proxies 443 to App on 3000
* Reverse proxies 80 redirected to 443 to App on 3000
* Spee.ch started on port 3000
* Lbrynet DAEMON started on ports 3333 and 4444
# 1. Update OS and install packages
# 1. Setup OS and install dependencies
## OS
### Secure your server by creating a non-root sudoer.
@ -59,25 +59,21 @@ Log in as username@domainname or username@ip_address
## Clone speech either from your own fork, or from the lbryio/spee.ch repo.
### For Developers or those with their own forked repo
* For Developers - our master branch
`git clone https://github.com/lbryio/spee.ch`
`git clone -b master https://github.com/lbryio/spee.ch`
SSH:
`git clone git@github.com:{{youraccount}}/spee.ch`
HTTPS:
* For Developers - your fork
`git clone https://github.com/{{youraccount}}/spee.ch.git`
### For Publishers and Content creators
`git clone git@github.com:{{youraccount}}/spee.ch`
* For Publishers and Content creators - stable release
`git clone -b release https://github.com/lbryio/spee.ch`
### Prepare the scripts
## Prepare the scripts
`chmod 750 -R ~/spee.ch/docs/setup`
@ -181,9 +177,9 @@ This just allows you to run multiple things in different sessions. Useful for ma
## Detatch tmux session
* `Control + b`, then `d` to leave lbrynet daemon running and exit the session
`tmux` if you want to get back into tmux
* `tmux` if you want to get back into tmux
`Control+b`, then `)` while in tmux session to cycle back to your lbrynet session to see output
* `Control+b`, then `)` while in tmux session to cycle back to your lbrynet session to see output
## Display wallet address to which to send 5+ LBC.
@ -206,28 +202,31 @@ This just allows you to run multiple things in different sessions. Useful for ma
# 6 Set up spee.ch
## Build it
`cd spee.ch`
`cd spee.ch`
~/spee.ch:
~/spee.ch:
`npm install`
`npm install`
`cp ~/spee.ch/docs/setup/conf/speech/chainqueryConfig.json ~/spee.ch/site/config/chainqueryConfig.json`
_note: if you have installed your own local chainquery instance, you will need to specify it in your own /site/config/chainqueryConfig.json_
Once your wallet has a balance, run this:
`npm run configure`
`npm run configure` (once your wallet balance has cleared)
* Database: lbry
* Username: root
* Password: abcd1234
* Port: 3000
* Site Title: Your Site Name
* Enter your site's domain name: https://freezepeach.fun (this must include 'https://')
* Enter your site's domain name: https://example.com or http://localhost
* Enter a directory where uploads should be stored: (/home/lbry/Uploads)
`npm run start`
## Try it
Navigate to yourdomain.fun!
Navigate to example.com!
# 7 Production

View file

@ -25,14 +25,14 @@ $ npm update
* Create a config file called `spee.ch` in */etc/nginx/sites-available*
* see example: [config file](https://github.com/lbryio/spee.ch/blob/master/nginx_example_config).
* Rename all mentions of *sub.domain.com* with your subdomain name.
* Run this command to link the sites-available.
* Run this command to link the sites-available.
`$ ln -s /etc/nginx/sites-available/speech /etc/nginx/sites-enabled/speech`
* Restart Nginx.
`$ sudo service nginx restart`
* Try visiting your website.
* If Nginx is working, you should get a **502** error because there is nothing running on **3000** yet.
* If you get the default Nginx greeting, you have not properly configured it to serve from port **3000**.
@ -40,29 +40,29 @@ $ npm update
* Caddy tutorial: [https://caddyserver.com/tutorial](https://caddyserver.com/tutorial)
### MySql
* Install MySql
* Install MySql
* [Instructions](https://dev.mysql.com/doc/mysql-installation-excerpt/5.7/en)
* Create user **root**.
* Note: We are going to access **mysql** as **root** for this setup, but you may want to create a separate user in the future.
* Keep your password somewhere handy!
* Create a database called **lbry** and make sure you can use it.
`CREATE DATABASE lbry;`
`$ USE lbry;`
`$ exit; (or press ctl + d)`
* Try logging into mysql.
`$ mysql -u username -p`
* If you are using a **LBRY** server, your **password** is the one provided for **ssh**.
* Note: If it fails, try using `sudo`.
##2. Install & Run the LBRY Daemon
### Install **lbrynet**
### Install **lbrynet**
_note: if you have a server from LBRY, lbrynet is already installed, you can skip to 2.4._
```
$ wget --quiet -O ~/latest_daemon.zip https://lbry.io/get/lbrynet.linux.zip
@ -75,9 +75,9 @@ $ ./lbrynet-daemon
```
### Detach (exit) the tmux session and leave **lbrynet** running in the background.
press `ctrl` + `b` then `d` to detach
### Get LBC!
Get a list of your wallets:
@ -93,7 +93,7 @@ Check your balance again:
```
$ ~/lbrynet-cli wallet_balance
```
You should have **LBC**!
### Install ffmpeg
@ -128,10 +128,10 @@ $ npm run configure
Check your site configs
```
$ cd config/
$ cd /site/config/
$ nano siteConfig.json
```
### Build & run
Run the below command to transpile, build, and start your server.
@ -141,15 +141,15 @@ $ npm run start
_**Note:** if you had to use `sudo` to login to **mysql** above, you may have issues with this step._
Spee.ch should now be running !
Spee.ch should now be running !
Visit your site in the browser. Try publishing an image!
## 4. Bonus:
### Install PM2 and run your server with PM2
Install PM2
```
$ sudo npm i -g pm2
@ -161,14 +161,3 @@ $ pm2 start server.js
```
Visit your site and see if it is running!
### Sync Your Spee.ch Instance with the full **Blockchain**
Install **lbrycrdd**
Install **lbry-decoder**
Start **lbry-decoder**
Install & run [spee.ch-sync](https://github.com/billbitt/spee.ch-sync)

8983
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -11,7 +11,6 @@
"configure": "node cli/configure.js",
"fix": "eslint . --fix",
"lint": "eslint .",
"precommit": "eslint .",
"prestart": "builder run bundle",
"start": "node server.js",
"start:build": "builder run start",
@ -50,33 +49,33 @@
"axios": "^0.18.0",
"bcrypt": "^2.0.1",
"body-parser": "^1.18.3",
"connect-multiparty": "^2.1.1",
"connect-multiparty": "^2.2.0",
"cookie-session": "^2.0.0-beta.3",
"express": "^4.15.2",
"express": "^4.16.4",
"express-handlebars": "^3.0.0",
"express-http-context": "^1.1.0",
"express-http-context": "^1.2.0",
"get-video-dimensions": "^1.0.0",
"helmet": "^3.13.0",
"helmet": "^3.15.0",
"image-size": "^0.6.3",
"inquirer": "^5.2.0",
"ip": "^1.1.5",
"make-dir": "^1.3.0",
"module-alias": "^2.1.0",
"mysql2": "^1.6.1",
"mysql2": "^1.6.4",
"npm": "^6.3.0",
"passport": "^0.4.0",
"passport-local": "^1.0.0",
"prop-types": "^15.6.2",
"react": "^16.4.2",
"react-dom": "^16.4.2",
"react-feather": "^1.1.3",
"react-feather": "^1.1.4",
"react-dom": "^16.6.1",
"react-ga": "^2.5.3",
"react-helmet": "^5.2.0",
"react-redux": "^5.0.6",
"react-redux": "^5.1.1",
"react-router-dom": "^4.3.1",
"redux": "^4.0.0",
"redux-saga": "^0.16.0",
"sequelize": "^4.38.0",
"redux": "^4.0.1",
"redux-saga": "^0.16.2",
"sequelize": "^4.41.1",
"sequelize-cli": "^4.0.0",
"universal-analytics": "^0.4.20",
"webpack": "^3.10.0",
@ -87,43 +86,49 @@
"winston-slack-webhook": "github:billbitt/winston-slack-webhook"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/cli": "^7.1.5",
"@babel/core": "^7.1.5",
"@babel/plugin-proposal-object-rest-spread": "^7.0.0",
"@babel/polyfill": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"@babel/preset-env": "^7.1.5",
"@babel/preset-react": "^7.0.0",
"@babel/preset-stage-2": "^7.0.0",
"@babel/register": "^7.0.0",
"babel-loader": "^7.1.2",
"babel-plugin-module-resolver": "^3.1.1",
"babel-eslint": "9.0.0-beta.3",
"builder": "^4.0.0",
"chai": "^4.1.2",
"chai-http": "^4.0.0",
"cross-fetch": "^2.2.2",
"chai": "^4.2.0",
"chai-http": "^4.2.0",
"cross-fetch": "^2.2.3",
"css-loader": "^0.28.11",
"eslint": "4.19.1",
"eslint-config-standard": "^11.0.0",
"eslint-config-standard-jsx": "^5.0.0",
"eslint-plugin-import": "^2.12.0",
"eslint-plugin-node": "^6.0.1",
"eslint-plugin-promise": "^3.8.0",
"eslint-plugin-react": "^7.9.1",
"eslint-plugin-standard": "^3.0.1",
"eslint": "5.9.0",
"eslint-config-standard": "^12.0.0",
"eslint-config-standard-jsx": "^6.0.2",
"eslint-plugin-import": "^2.14.0",
"eslint-plugin-node": "^8.0.0",
"eslint-plugin-promise": "^4.0.1",
"eslint-plugin-react": "^7.11.1",
"eslint-plugin-standard": "^4.0.0",
"extract-text-webpack-plugin": "^3.0.2",
"file-loader": "^1.1.11",
"husky": "^0.14.3",
"husky": "^1.1.3",
"mocha": "^5.2.0",
"ndb": "^1.0.24",
"node-sass": "^4.9.3",
"nodemon": "^1.17.5",
"ndb": "^1.0.26",
"node-sass": "^4.10.0",
"nodemon": "^1.18.6",
"redux-devtools": "^3.4.1",
"regenerator-transform": "^0.13.0",
"rollup": "^0.66.2",
"regenerator-transform": "^0.13.3",
"rollup": "^0.67.0",
"sass-loader": "^7.1.0",
"sequelize-cli": "^4.0.0",
"style-loader": "^0.21.0",
"url-loader": "^1.0.1",
"wait-on": "^3.1.0"
"style-loader": "^0.23.1",
"url-loader": "^1.1.2",
"wait-on": "^3.2.0"
},
"husky": {
"hooks": {
"pre-commit": "eslint ."
}
}
}

View file

@ -4,14 +4,14 @@ const chainquery = require('chainquery');
const getChannelData = async (channelName, channelClaimId) => {
let longChannelClaimId = await chainquery.claim.queries.getLongClaimId(channelName, channelClaimId).catch(() => false);
if(!longChannelClaimId) {
if (!longChannelClaimId) {
// Allow an error to throw here if this fails
longChannelClaimId = await db.Certificate.getLongChannelId(channelName, channelClaimId);
}
let shortChannelClaimId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(longChannelClaimId, channelName).catch(() => false);
if(!shortChannelClaimId) {
if (!shortChannelClaimId) {
shortChannelClaimId = await db.Certificate.getShortChannelIdFromLongChannelId(longChannelClaimId, channelName);
}

View file

@ -12,7 +12,7 @@ const channelShortIdRoute = async ({ ip, originalUrl, params }, res) => {
try {
let shortId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(params.longId, params.name).catch(() => false);
if(!shortId) {
if (!shortId) {
shortId = await db.Certificate.getShortChannelIdFromLongChannelId(params.longId, params.name);
}

View file

@ -7,7 +7,7 @@ const claimAvailability = async (name) => {
const claimAddresses = additionalClaimAddresses || [];
claimAddresses.push(primaryClaimAddress);
// find any records where the name is used
return await chainquery.claim
return chainquery.claim
.findAll({
attributes: ['claim_address'],
where : {

View file

@ -10,7 +10,6 @@ const db = require('server/models');
*/
const claimData = async ({ ip, originalUrl, body, params }, res) => {
try {
const resolvedClaim = await fetchClaimData(params);
@ -25,7 +24,7 @@ const claimData = async ({ ip, originalUrl, body, params }, res) => {
success: true,
data : await getClaimData(resolvedClaim),
});
} catch(error) {
} catch (error) {
handleErrorResponse(originalUrl, ip, error, res);
}
};

View file

@ -18,16 +18,16 @@ const claimGet = async ({ ip, originalUrl, params }, res) => {
try {
let claimData = await chainquery.claim.queries.resolveClaim(name, claimId).catch(() => {});
if(!claimData) {
if (!claimData) {
claimData = await db.Claim.resolveClaim(name, claimId);
}
if(!claimData) {
if (!claimData) {
throw new Error('No matching uri found in Claim table');
}
let lbrynetResult = await getClaim(`${name}#${claimId}`);
if(!lbrynetResult) {
if (!lbrynetResult) {
throw new Error(`Unable to Get ${name}#${claimId}`);
}
@ -38,8 +38,8 @@ const claimGet = async ({ ip, originalUrl, params }, res) => {
try {
await waitOn({
resources: [ lbrynetResult.file_name ],
delay: 100,
timeout: 10000, // 10 seconds
delay : 100,
timeout : 10000, // 10 seconds
});
} catch (e) {}
@ -49,7 +49,7 @@ const claimGet = async ({ ip, originalUrl, params }, res) => {
message,
completed,
});
} catch(error) {
} catch (error) {
handleErrorResponse(originalUrl, ip, error, res);
}
};

View file

@ -125,7 +125,7 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
if (channelName) {
canonicalUrl = createCanonicalLink({ asset: { ...claimData, channelShortId: shortId } });
} else {
canonicalUrl = createCanonicalLink({ asset: { ...claimData, shortId } })
canonicalUrl = createCanonicalLink({ asset: { ...claimData, shortId } });
}
res.status(200).json({

View file

@ -12,12 +12,12 @@ const claimShortId = async ({ ip, originalUrl, body, params }, res) => {
try {
let shortId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(params.longId, params.name).catch(() => {});
if(!shortId) {
if (!shortId) {
shortId = await db.Claim.getShortClaimIdFromLongClaimId(params.longId, params.name);
}
res.status(200).json({success: true, data: shortId});
} catch(error) {
} catch (error) {
handleErrorResponse(originalUrl, ip, error, res);
}
};

View file

@ -107,7 +107,6 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
return [null, null];
})
.then(([fileResult, resolution]) => {
metadata = Object.assign({}, {
title : claimRecord.title,
description: claimRecord.description,
@ -164,7 +163,7 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
if (channelName) {
canonicalUrl = createCanonicalLink({ asset: { ...publishResult, channelShortId: shortId } });
} else {
canonicalUrl = createCanonicalLink({ asset: { ...publishResult, shortId } })
canonicalUrl = createCanonicalLink({ asset: { ...publishResult, shortId } });
}
if (publishResult.error) {
@ -181,10 +180,10 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
data : {
name,
claimId,
url : `${details.host}${canonicalUrl}`, // for backwards compatability with app
showUrl : `${details.host}${canonicalUrl}`,
serveUrl: `${details.host}${canonicalUrl}${fileExtension}`,
pushTo : canonicalUrl,
url : `${details.host}${canonicalUrl}`, // for backwards compatability with app
showUrl : `${details.host}${canonicalUrl}`,
serveUrl : `${details.host}${canonicalUrl}${fileExtension}`,
pushTo : canonicalUrl,
claimData: publishResult,
},
});

View file

@ -20,7 +20,7 @@ const claimViews = async ({ ip, originalUrl, body, params }, res) => {
[claimId]: viewCount,
},
});
} catch(error) {
} catch (error) {
handleErrorResponse(originalUrl, ip, error, res);
}
};

View file

@ -14,21 +14,21 @@ const channelClaims = async ({ ip, originalUrl, body, params }, res) => {
page,
} = params;
if(name === 'trending') {
if (name === 'trending') {
const result = await db.Trending.getTrendingClaims();
const claims = await Promise.all(result.map((claim) => getClaimData(claim)));
return res.status(200).json({
success: true,
data: {
channelName: name,
data : {
channelName : name,
claims,
longChannelClaimId: name,
currentPage: 1,
nextPage: null,
previousPage: null,
totalPages: 1,
totalResults: claims.length,
}
currentPage : 1,
nextPage : null,
previousPage : null,
totalPages : 1,
totalResults : claims.length,
},
});
}

View file

@ -28,7 +28,7 @@ const getClaimIdAndServeAsset = (channelName, channelClaimId, claimName, claimId
logger.debug('Full claim id:', claimId);
return db.Claim.findOne({
where: {
name : claimName,
name: claimName,
claimId,
},
});
@ -47,17 +47,15 @@ const getClaimIdAndServeAsset = (channelName, channelClaimId, claimName, claimId
logger.debug('Outpoint:', outpoint);
return db.Blocked.isNotBlocked(outpoint).then(() => {
// If content was found, is approved, and not blocked - log a view.
if(headers && headers['user-agent'] && /LBRY/.test(headers['user-agent']) === false) {
if (headers && headers['user-agent'] && /LBRY/.test(headers['user-agent']) === false) {
db.Views.create({
time: Date.now(),
isChannel: false,
claimId: claimDataValues.claim_id || claimDataValues.claimId,
time : Date.now(),
isChannel : false,
claimId : claimDataValues.claim_id || claimDataValues.claimId,
publisherId: claimDataValues.publisher_id || claimDataValues.certificateId,
ip,
});
}
return;
});
})
.then(() => {

View file

@ -4,57 +4,52 @@ const {
} = require('@config/siteConfig');
const padSizes = {
small: 'padSmall',
small : 'padSmall',
medium: 'padMedium',
large: 'padLarge',
large : 'padLarge',
};
const argumentProcessors = {
'bottom': async (config) => {
config.classNames.push('bottom');
return;
},
'right': async (config) => {
config.classNames.push('right');
return;
},
'pad': async (config, val) => {
config.classNames.push(padSizes[val]);
return;
},
'logoClaim': async (config, val) => {
config.logoUrl = `${host}/${val}`;
return;
},
'link': async (config, val) => {
config.logoLink = val;
return;
}
},
};
const parseLogoConfigParam = async (rawConfig) => {
if(rawConfig) {
if (rawConfig) {
let parsedConfig = {
classNames: ['logoLink'],
logoUrl: thumbnail,
logoUrl : thumbnail,
};
let splitConfig;
try {
splitConfig = rawConfig.split(',');
} catch(e) { }
} catch (e) { }
if(!splitConfig) {
if (!splitConfig) {
return false;
}
for(let i = 0; i < splitConfig.length; i++) {
for (let i = 0; i < splitConfig.length; i++) {
let currentArgument = splitConfig[i];
if(argumentProcessors[currentArgument]) {
if (argumentProcessors[currentArgument]) {
await argumentProcessors[currentArgument](parsedConfig);
} else {
const splitArgument = currentArgument.split(':');
if(argumentProcessors[splitArgument[0]]) {
if (argumentProcessors[splitArgument[0]]) {
await argumentProcessors[splitArgument[0]](parsedConfig, splitArgument[1]);
}
}
@ -66,7 +61,7 @@ const parseLogoConfigParam = async (rawConfig) => {
}
return false;
}
};
const sendVideoEmbedPage = async ({ params }, res) => {
let {

View file

@ -8,13 +8,13 @@ const getClaimIdByChannel = async (channelName, channelClaimId, claimName) => {
let channelId = await chainquery.claim.queries.getLongClaimId(channelName, channelClaimId);
if(channelId === null) {
if (channelId === null) {
channelId = await db.Certificate.getLongChannelId(channelName, channelClaimId);
}
let claimId = await chainquery.claim.queries.getClaimIdByLongChannelId(channelId, claimName);
if(claimId === null) {
if (claimId === null) {
claimId = db.Claim.getClaimIdByLongChannelId(channelId, claimName);
}
@ -24,11 +24,11 @@ const getClaimIdByChannel = async (channelName, channelClaimId, claimName) => {
const getClaimId = async (channelName, channelClaimId, name, claimId) => {
logger.debug(`getClaimId: ${channelName}, ${channelClaimId}, ${name}, ${claimId})`);
if (channelName) {
return await getClaimIdByChannel(channelName, channelClaimId, name);
return getClaimIdByChannel(channelName, channelClaimId, name);
} else {
let claimIdResult = await chainquery.claim.queries.getLongClaimId(name, claimId);
if(!claimIdResult) {
if (!claimIdResult) {
claimIdResult = await db.Claim.getLongClaimId(name, claimId);
}

View file

@ -48,7 +48,7 @@ function Server () {
app.enable('trust proxy');
app.use((req, res, next) => {
if(req.get('User-Agent') === 'Mozilla/5.0 (Windows NT 5.1; rv:14.0) Gecko/20120405 Firefox/14.0a1') {
if (req.get('User-Agent') === 'Mozilla/5.0 (Windows NT 5.1; rv:14.0) Gecko/20120405 Firefox/14.0a1') {
res.status(403).send('<h1>Forbidden</h1>If you are seeing this by mistake, please contact us using <a href="https://chat.lbry.io/">https://chat.lbry.io/</a>');
res.end();
} else {
@ -182,7 +182,7 @@ function Server () {
.then(() => {
logger.info('Spee.ch startup is complete');
setInterval(processTrending, 30 * 60000) // 30 minutes
setInterval(processTrending, 30 * 60000); // 30 minutes
})
.catch(error => {
if (error.code === 'ECONNREFUSED') {

View file

@ -2,19 +2,19 @@ const fs = require('fs');
const logger = require('winston');
const { publishing: { publishingChannelWhitelist } } = require('@config/siteConfig');
const ipBanFile = './config/ipBan.txt';
const ipBanFile = './site/config/ipBan.txt';
const forbiddenMessage = '<h1>Forbidden</h1>If you are seeing this by mistake, please contact us using <a href="https://chat.lbry.io/">https://chat.lbry.io/</a>';
let ipCounts = {};
let blockedAddresses = [];
if(fs.existsSync(ipBanFile)) {
if (fs.existsSync(ipBanFile)) {
const lineReader = require('readline').createInterface({
input: require('fs').createReadStream(ipBanFile),
});
lineReader.on('line', (line) => {
if(line && line !== '') {
if (line && line !== '') {
blockedAddresses.push(line);
}
});
@ -23,7 +23,7 @@ if(fs.existsSync(ipBanFile)) {
const autoblockPublishMiddleware = (req, res, next) => {
let ip = (req.headers['x-forwarded-for'] || req.connection.remoteAddress).split(/,\s?/)[0];
if(blockedAddresses.indexOf(ip) !== -1) {
if (blockedAddresses.indexOf(ip) !== -1) {
res.status(403).send(forbiddenMessage);
res.end();
@ -33,15 +33,15 @@ const autoblockPublishMiddleware = (req, res, next) => {
let count = ipCounts[ip] = (ipCounts[ip] || 0) + 1;
setTimeout(() => {
if(ipCounts[ip]) {
if (ipCounts[ip]) {
ipCounts[ip]--;
if(ipCounts[ip] === 0) {
if (ipCounts[ip] === 0) {
delete ipCounts[ip];
}
}
}, 600000 /* 10 minute retainer */)
}, 600000 /* 10 minute retainer */);
if(count === 10) {
if (count === 10) {
logger.error(`Banning IP: ${ip}`);
blockedAddresses.push(ip);
res.status(403).send(forbiddenMessage);
@ -51,19 +51,19 @@ const autoblockPublishMiddleware = (req, res, next) => {
} else {
next();
}
}
};
const autoblockPublishBodyMiddleware = (req, res, next) => {
if(req.body && publishingChannelWhitelist) {
if (req.body && publishingChannelWhitelist) {
let ip = (req.headers['x-forwarded-for'] || req.connection.remoteAddress).split(/,\s?/)[0];
const { channelName } = req.body;
if(channelName && publishingChannelWhitelist.indexOf(channelName) !== -1) {
if (channelName && publishingChannelWhitelist.indexOf(channelName.toLowerCase()) !== -1) {
delete ipCounts[ip];
}
}
next();
}
};
module.exports = {
autoblockPublishMiddleware,

View file

@ -2,39 +2,39 @@ const logger = require('winston');
const db = require('../models');
const httpContext = require('express-http-context');
function logMetricsMiddleware(req, res, next) {
function logMetricsMiddleware (req, res, next) {
res.on('finish', () => {
const userAgent = req.get('user-agent');
const routePath = httpContext.get('routePath');
let referrer = req.get('referrer');
if(referrer && referrer.length > 255) {
if (referrer && referrer.length > 255) {
try {
// Attempt to "safely" clamp long URLs
referrer = /(.*?)#.*/.exec(referrer)[1];
} catch(e) {
} catch (e) {
// Cheap forced string conversion & clamp
referrer = new String(referrer);
referrer = String(referrer);
referrer = referrer.substr(0, 255);
}
if(referrer.length > 255) {
if (referrer.length > 255) {
logger.warn('Request refferer exceeds 255 characters:', referrer);
referrer = referrer.substring(0, 255);
}
}
db.Metrics.create({
time: Date.now(),
isInternal: /node\-fetch/.test(userAgent),
isChannel: res.isChannel,
claimId: res.claimId,
routePath: httpContext.get('routePath'),
params: JSON.stringify(req.params),
ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress,
request: req.url,
routeData: JSON.stringify(httpContext.get('routeData')),
time : Date.now(),
isInternal: /node-fetch/.test(userAgent),
isChannel : res.isChannel,
claimId : res.claimId,
routePath : httpContext.get('routePath'),
params : JSON.stringify(req.params),
ip : req.headers['x-forwarded-for'] || req.connection.remoteAddress,
request : req.url,
routeData : JSON.stringify(httpContext.get('routeData')),
referrer,
userAgent,
});
@ -43,7 +43,7 @@ function logMetricsMiddleware(req, res, next) {
next();
}
function setRouteDataInContextMiddleware(routePath, routeData) {
function setRouteDataInContextMiddleware (routePath, routeData) {
return function (req, res, next) {
httpContext.set('routePath', routePath);
httpContext.set('routeData', routeData);

View file

@ -3,49 +3,49 @@ module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
'Metrics',
{
time: {
type: DATE(6),
type : DATE(6),
defaultValue: sequelize.NOW,
},
isInternal: {
type: BOOLEAN,
},
isChannel: {
type: BOOLEAN,
type : BOOLEAN,
defaultValue: false,
},
claimId: {
type: STRING,
type : STRING,
defaultValue: null,
},
ip: {
type: STRING,
type : STRING,
defaultValue: null,
},
request: {
type: STRING,
type : STRING,
defaultValue: null,
},
userAgent: {
type: STRING,
type : STRING,
defaultValue: null,
},
referrer: {
type: STRING,
type : STRING,
defaultValue: null,
},
routePath: {
type: STRING,
type : STRING,
defaultValue: null,
},
params: {
type: STRING,
type : STRING,
defaultValue: null,
}
},
},
{
freezeTableName: true,
timestamps: false, // don't use default timestamps columns
indexes: [
timestamps : false, // don't use default timestamps columns
indexes : [
{
fields: ['isInternal', 'isChannel', 'time', 'claimId', 'routePath'],
},

View file

@ -5,43 +5,43 @@ module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
'Trending',
{
time: { /* TODO: Historical analysis and log roll */
type: DATE(6),
type : DATE(6),
defaultValue: sequelize.NOW,
},
isChannel: {
type: BOOLEAN,
type : BOOLEAN,
defaultValue: false,
},
claimId: {
type: STRING,
type : STRING,
defaultValue: null,
},
publisherId: {
type: STRING,
type : STRING,
defaultValue: null,
},
intervalViews: {
type: INTEGER,
type : INTEGER,
defaultValue: 0,
},
weight: {
type: FLOAT,
type : FLOAT,
defaultValue: 0,
},
zScore: {
type: FLOAT,
type : FLOAT,
defaultValue: 0,
},
pValue: {
type: FLOAT,
type : FLOAT,
defaultValue: 0,
},
// TODO: Calculate t-statistics
},
{
freezeTableName: true,
timestamps: false, // don't use default timestamps columns
indexes: [
timestamps : false, // don't use default timestamps columns
indexes : [
{
fields: ['claimId'],
},
@ -55,7 +55,7 @@ module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
Trending.getTrendingWeightData = async ({
hours = 2,
minutes = 0,
limit = 20
limit = 20,
} = {}) => {
let time = new Date();
time.setHours(time.getHours() - hours);
@ -65,9 +65,9 @@ module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
const selectString = 'DISTINCT(claimId), weight';
const whereString = `isChannel = false and time > '${sqlTime}'`;
const query = `SELECT ${selectString} FROM Trending WHERE ${whereString} ORDER BY weight DESC LIMIT ${limit}`
const query = `SELECT ${selectString} FROM Trending WHERE ${whereString} ORDER BY weight DESC LIMIT ${limit}`;
return await sequelize.query(query, { type: sequelize.QueryTypes.SELECT });
return sequelize.query(query, { type: sequelize.QueryTypes.SELECT });
};
Trending.getTrendingClaims = async () => {
@ -77,7 +77,7 @@ module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
const trendingClaims = trendingWeightData.reduce((claims, trendingData) => {
trendingClaimIds.push(trendingData.claimId);
claims[trendingData.claimId] = {
...trendingData
...trendingData,
};
return claims;

View file

@ -9,7 +9,7 @@ const getMean = (numArr) => {
let total = 0;
let length = numArr.length; // store local to reduce potential prop lookups
for(let i = 0; i < length; i++) {
for (let i = 0; i < length; i++) {
total += numArr[i];
}
@ -28,17 +28,17 @@ const getInformationFromValues = (numArr) => {
return {
mean,
standardDeviation: getStandardDeviation(numArr, mean),
}
};
};
const getZScore = (value, mean, sDeviation) => ( sDeviation !== 0 ? (value - mean) / sDeviation : 0 );
const getZScore = (value, mean, sDeviation) => (sDeviation !== 0 ? (value - mean) / sDeviation : 0);
const getFastPValue = (zScore) => {
if(zScore <= MIN_P) {
return 0;
if (zScore <= MIN_P) {
return 0;
}
if(zScore >= MAX_P) {
return 1;
if (zScore >= MAX_P) {
return 1;
}
let factorialK = 1;
@ -46,8 +46,8 @@ const getFastPValue = (zScore) => {
let sum = 0;
let term = 1;
while(Math.abs(term) > MAX_P_PRECISION) {
term = ONE_DIV_SQRT_2PI * Math.pow(-1 , k) * Math.pow(zScore , k) / (2 * k + 1) / Math.pow(2 , k) * Math.pow(zScore, k + 1) / factorialK;
while (Math.abs(term) > MAX_P_PRECISION) {
term = ONE_DIV_SQRT_2PI * Math.pow(-1, k) * Math.pow(zScore, k) / (2 * k + 1) / Math.pow(2, k) * Math.pow(zScore, k + 1) / factorialK;
sum += term;
k++;
factorialK *= k;
@ -57,7 +57,6 @@ const getFastPValue = (zScore) => {
return sum;
};
const getWeight = (zScore, pValue) => (zScore * pValue);
module.exports = {

View file

@ -3,30 +3,30 @@ module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
'Views',
{
time: {
type: DATE(6),
type : DATE(6),
defaultValue: sequelize.NOW,
},
isChannel: {
type: BOOLEAN,
type : BOOLEAN,
defaultValue: false,
},
claimId: {
type: STRING,
type : STRING,
defaultValue: null,
},
publisherId: {
type: STRING,
type : STRING,
defaultValue: null,
},
ip: {
type: STRING,
type : STRING,
defaultValue: null,
},
},
{
freezeTableName: true,
timestamps: false, // don't use default timestamps columns
indexes: [
timestamps : false, // don't use default timestamps columns
indexes : [
{
fields: ['time', 'isChannel', 'claimId', 'publisherId', 'ip'],
},
@ -59,8 +59,8 @@ module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
claimId,
},
distinct: true,
col: 'ip'
})
col : 'ip',
});
};
return Views;

View file

@ -26,42 +26,40 @@ const getTorList = require('../../controllers/api/tor');
const getBlockedList = require('../../controllers/api/blocked');
const getOEmbedData = require('../../controllers/api/oEmbed');
module.exports = {
// homepage routes
'/api/homepage/data/channels': { controller: [ torCheckMiddleware, channelData ] },
'/api/homepage/data/channels' : { controller: [ torCheckMiddleware, channelData ] },
// channel routes
'/api/channel/availability/:name': { controller: [ torCheckMiddleware, channelAvailability ] },
'/api/channel/short-id/:longId/:name': { controller: [ torCheckMiddleware, channelShortId ] },
'/api/channel/data/:channelName/:channelClaimId': { controller: [ torCheckMiddleware, channelData ] },
'/api/channel/availability/:name' : { controller: [ torCheckMiddleware, channelAvailability ] },
'/api/channel/short-id/:longId/:name' : { controller: [ torCheckMiddleware, channelShortId ] },
'/api/channel/data/:channelName/:channelClaimId' : { controller: [ torCheckMiddleware, channelData ] },
'/api/channel/claims/:channelName/:channelClaimId/:page': { controller: [ torCheckMiddleware, channelClaims ] },
// sepcial routes
'/api/special/:name/:page': { controller: [ torCheckMiddleware, specialClaims ] },
// claim routes
'/api/claim/availability/:name': { controller: [ torCheckMiddleware, claimAvailability ] },
'/api/claim/data/:claimName/:claimId': { controller: [ torCheckMiddleware, claimData ] },
'/api/claim/get/:name/:claimId': { controller: [ torCheckMiddleware, claimGet ] },
'/api/claim/list/:name': { controller: [ torCheckMiddleware, claimList ] },
'/api/claim/long-id': { method: 'post', controller: [ torCheckMiddleware, claimLongId ] }, // note: should be a 'get'
'/api/claim/publish': { method: 'post', controller: [ torCheckMiddleware, autoblockPublishMiddleware, multipartMiddleware, autoblockPublishBodyMiddleware, claimPublish ] },
'/api/claim/update': { method: 'post', controller: [ torCheckMiddleware, multipartMiddleware, claimUpdate ] },
'/api/claim/abandon': { method: 'post', controller: [ torCheckMiddleware, multipartMiddleware, claimAbandon ] },
'/api/claim/resolve/:name/:claimId': { controller: [ torCheckMiddleware, claimResolve ] },
'/api/claim/short-id/:longId/:name': { controller: [ torCheckMiddleware, claimShortId ] },
'/api/claim/views/:claimId': { controller: [ torCheckMiddleware, claimViews ] },
'/api/claim/availability/:name' : { controller: [ torCheckMiddleware, claimAvailability ] },
'/api/claim/data/:claimName/:claimId' : { controller: [ torCheckMiddleware, claimData ] },
'/api/claim/get/:name/:claimId' : { controller: [ torCheckMiddleware, claimGet ] },
'/api/claim/list/:name' : { controller: [ torCheckMiddleware, claimList ] },
'/api/claim/long-id' : { method: 'post', controller: [ torCheckMiddleware, claimLongId ] }, // note: should be a 'get'
'/api/claim/publish' : { method: 'post', controller: [ torCheckMiddleware, autoblockPublishMiddleware, multipartMiddleware, autoblockPublishBodyMiddleware, claimPublish ] },
'/api/claim/update' : { method: 'post', controller: [ torCheckMiddleware, multipartMiddleware, claimUpdate ] },
'/api/claim/abandon' : { method: 'post', controller: [ torCheckMiddleware, multipartMiddleware, claimAbandon ] },
'/api/claim/resolve/:name/:claimId' : { controller: [ torCheckMiddleware, claimResolve ] },
'/api/claim/short-id/:longId/:name' : { controller: [ torCheckMiddleware, claimShortId ] },
'/api/claim/views/:claimId' : { controller: [ torCheckMiddleware, claimViews ] },
// file routes
'/api/file/availability/:name/:claimId': { controller: [ torCheckMiddleware, fileAvailability ] },
// user routes
'/api/user/password/': { method: 'put', controller: [ torCheckMiddleware, userPassword ] },
'/api/user/password/' : { method: 'put', controller: [ torCheckMiddleware, userPassword ] },
// configs
'/api/config/site/publishing': { controller: [ torCheckMiddleware, publishingConfig ] },
'/api/config/site/publishing' : { controller: [ torCheckMiddleware, publishingConfig ] },
// tor
'/api/tor': { controller: [ torCheckMiddleware, getTorList ] },
'/api/tor' : { controller: [ torCheckMiddleware, getTorList ] },
// blocked
'/api/blocked': { controller: [ torCheckMiddleware, getBlockedList ] },
'/api/blocked' : { controller: [ torCheckMiddleware, getBlockedList ] },
// open embed
'/api/oembed': { controller: [ torCheckMiddleware, getOEmbedData ] },
'/api/oembed' : { controller: [ torCheckMiddleware, getOEmbedData ] },
};

View file

@ -7,5 +7,5 @@ const Sagas = require('@sagas').default;
module.exports = {
'/:identifier/:claim': { controller: serveByIdentifierAndClaim, action: Actions.onHandleShowPageUri, saga: Sagas.handleShowPageUri },
'/:claim': { controller: serveByClaim, action: Actions.onHandleShowPageUri, saga: Sagas.handleShowPageUri },
'/:claim' : { controller: serveByClaim, action: Actions.onHandleShowPageUri, saga: Sagas.handleShowPageUri },
};

View file

@ -6,7 +6,7 @@ const handleUserRequest = require('../../controllers/auth/user');
module.exports = {
'/signup': { method: 'post', controller: [ speechPassport.authenticate('local-signup'), handleSignupRequest ] },
'/auth': { method: 'post', controller: handleLoginRequest },
'/auth' : { method: 'post', controller: handleLoginRequest },
'/logout': { controller: handleLogoutRequest },
'/user': { controller: handleUserRequest },
'/user' : { controller: handleUserRequest },
};

View file

@ -7,15 +7,15 @@ const Actions = require('@actions').default;
const Sagas = require('@sagas').default;
module.exports = {
'/': { controller: handlePageRequest, action: Actions.onHandleShowHomepage, saga: Sagas.handleShowHomepage },
'/login': { controller: handlePageRequest },
'/about': { controller: handlePageRequest },
'/tos': { controller: handlePageRequest },
'/faq': { controller: handlePageRequest },
'/trending': { controller: redirect('/popular') },
'/popular': { controller: handlePageRequest },
'/new': { controller: handlePageRequest },
'/edit/:claimId': { controller: handlePageRequest },
'/multisite': { controller: handlePageRequest },
'/' : { controller: handlePageRequest, action: Actions.onHandleShowHomepage, saga: Sagas.handleShowHomepage },
'/login' : { controller: handlePageRequest },
'/about' : { controller: handlePageRequest },
'/tos' : { controller: handlePageRequest },
'/faq' : { controller: handlePageRequest },
'/trending' : { controller: redirect('/popular') },
'/popular' : { controller: handlePageRequest },
'/new' : { controller: handlePageRequest },
'/edit/:claimId' : { controller: handlePageRequest },
'/multisite' : { controller: handlePageRequest },
'/video-embed/:name/:claimId/:config?': { controller: handleVideoEmbedRequest }, // for twitter
};

View file

@ -6,8 +6,8 @@ module.exports = async (data) => {
const certificateId = data.publisher_id || data.certificateId;
let channelName = data.channelName;
if(certificateId && !channelName) {
channelName = await chainquery.claim.queries.getClaimChannelName(certificateId).catch(()=>{});
if (certificateId && !channelName) {
channelName = await chainquery.claim.queries.getClaimChannelName(certificateId).catch(() => {});
}
let channelShortId = null;
@ -16,17 +16,17 @@ module.exports = async (data) => {
}
return ({
name: data.name,
title: data.title,
name : data.name,
title : data.title,
certificateId,
channelName,
channelShortId,
contentType: data.content_type || data.contentType,
claimId: data.claim_id || data.claimId,
fileExt: data.generated_extension || data.fileExt,
claimId : data.claim_id || data.claimId,
fileExt : data.generated_extension || data.fileExt,
description: data.description,
thumbnail: data.generated_thumbnail || data.thumbnail_url || data.thumbnail,
outpoint: data.transaction_hash_id || data.outpoint,
thumbnail : data.generated_thumbnail || data.thumbnail_url || data.thumbnail,
outpoint : data.transaction_hash_id || data.outpoint,
host,
})
}
});
};

View file

@ -1,6 +1,6 @@
module.exports = function(req) {
let reqIp = req.connection.remoteAddress;
let host = req.get('host');
module.exports = function (req) {
let reqIp = req.connection.remoteAddress;
let host = req.get('host');
return reqIp === '127.0.0.1' || reqIp === '::ffff:127.0.0.1' || reqIp === '::1' || host.indexOf('localhost') !== -1;
}
return reqIp === '127.0.0.1' || reqIp === '::ffff:127.0.0.1' || reqIp === '::1' || host.indexOf('localhost') !== -1;
};

View file

@ -13,7 +13,7 @@ module.exports = async () => {
const claims = await db.Trending.getTrendingClaims();
const claimViews = await db.Views.getUniqueViews();
if(claimViews.length <= 1) {
if (claimViews.length <= 1) {
return;
}
@ -26,7 +26,7 @@ module.exports = async () => {
standardDeviation,
} = getInformationFromValues(viewsNumArray);
for(let i = 0; i < claimViews.length; i++) {
for (let i = 0; i < claimViews.length; i++) {
let claimViewsEntry = claimViews[i];
const {
@ -41,9 +41,9 @@ module.exports = async () => {
const trendingData = {
time,
isChannel: claimViewsEntry.isChannel,
claimId: claimViewsEntry.claimId,
publisherId: claimViewsEntry.publisherId,
isChannel : claimViewsEntry.isChannel,
claimId : claimViewsEntry.claimId,
publisherId : claimViewsEntry.publisherId,
intervalViews: claimViewsEntry.views,
weight,
zScore,
@ -52,7 +52,7 @@ module.exports = async () => {
db.Trending.create(trendingData);
}
} catch(e) {
} catch (e) {
logger.error('Error processing trending content:', e);
}
}
};

8288
yarn.lock

File diff suppressed because it is too large Load diff