Staging #1066

Merged
jessopb merged 55 commits from staging into release 2020-05-16 17:08:51 +02:00
55 changed files with 5234 additions and 12952 deletions
Showing only changes of commit 2f09713186 - Show all commits

View file

@ -3,3 +3,4 @@ node_modules/
public/bundle public/bundle
server/render/build server/render/build
test/ test/
server/chainquery

View file

@ -1,4 +1,5 @@
{ {
"parser": "babel-eslint",
"extends": ["standard", "standard-jsx"], "extends": ["standard", "standard-jsx"],
"env": { "env": {
"es6": true, "es6": true,
@ -12,16 +13,14 @@
"rules": { "rules": {
"no-multi-spaces": 0, "no-multi-spaces": 0,
"new-cap": 0, "new-cap": 0,
"prefer-promise-reject-errors":0, "prefer-promise-reject-errors": 0,
"comma-dangle": [ "no-unused-vars": 0,
"error", "standard/object-curly-even-spacing": 0,
"always-multiline" "handle-callback-err": 0,
], "one-var": 0,
"semi": [ "object-curly-spacing": 0,
"error", "comma-dangle": ["error", "always-multiline"],
"always", "semi": ["error", "always", { "omitLastInOneLineBlock": true }],
{ "omitLastInOneLineBlock": true }
],
"key-spacing": [ "key-spacing": [
"error", "error",
{ {

View file

@ -40,7 +40,7 @@ Spee.ch is a react web app that depends on MySQL for local content, and on two o
* [FFmpeg](https://www.ffmpeg.org/download.html) * [FFmpeg](https://www.ffmpeg.org/download.html)
* [Spee.ch] (below) * [Spee.ch] (below)
* [pm2] (optional) process manager such as pm2 to run speech server.js * [pm2] (optional) process manager such as pm2 to run speech server.js
* [http proxy server] caddy, nginx, traefik, etc to forward 443 to speech port 3000 * [http proxy server] caddy, nginx, traefik, etc to forward 80/443 to speech port 3000
#### Clone this repo #### Clone this repo
@ -67,22 +67,12 @@ $ npm install
#### Create the config files using the built-in CLI #### Create the config files using the built-in CLI
_note: make sure lbrynet is running in the background before proceeding_ _note: make sure lbrynet is running in the background before proceeding_
_note: If you are opt to run a local chainquery, such as from [lbry-docker/chainquery](https://github.com/lbryio/lbry-docker/tree/master/chainquery) you will need to specify connection details at this time in:_ ~/spee.ch/docs/setup/conf/speech/chainqueryConfig.json
``` ```
$ npm run configure $ npm run configure
``` ```
* _note: At the moment, you will have to copy chainqueryConfig.json from:_
```
~/spee.ch/docs/setup/conf/speech/chainqueryConfig.json
```
_to:_
```
~/spee.ch/site/config/chainqueryConfig.json
```
* _note: The domain name in this part must be prefixed with http:// or https://_
#### Build & start the app #### Build & start the app
_note: make sure lbrynet is running in the background before proceeding_ _note: make sure lbrynet is running in the background before proceeding_

View file

@ -13,7 +13,7 @@ export function onHandleShowPageUri (params, url) {
data: { data: {
...params, ...params,
url, url,
} },
}; };
} }
@ -23,7 +23,7 @@ export function onHandleShowHomepage (params, url) {
data: { data: {
...params, ...params,
url, url,
} },
}; };
} }

View file

@ -1,6 +1,6 @@
import Request from '../utils/request'; import Request from '../utils/request';
export function getSpecialAssetClaims(host, name, page) { export function getSpecialAssetClaims (host, name, page) {
if (!page) page = 1; if (!page) page = 1;
const url = `${host}/api/special/${name}/${page}`; const url = `${host}/api/special/${name}/${page}`;
return Request(url); return Request(url);

View file

@ -1,6 +1,6 @@
import React from 'react'; import React from 'react';
import Row from '@components/Row'; import Row from '@components/Row';
import {Link} from "react-router-dom"; import {Link} from 'react-router-dom';
const AboutSpeechDetails = () => { const AboutSpeechDetails = () => {
return ( return (

View file

@ -20,6 +20,6 @@ const ChooseChannelPublishRadio = ({ publishInChannel, toggleAnonymousPublish })
</label> </label>
</div> </div>
); );
} };
export default ChooseChannelPublishRadio; export default ChooseChannelPublishRadio;

View file

@ -6,6 +6,6 @@ const DropzoneDropItDisplay = () => {
Drop it. Drop it.
</div> </div>
); );
} };
export default DropzoneDropItDisplay; export default DropzoneDropItDisplay;

View file

@ -8,5 +8,5 @@ const Label = ({ value }) => {
{value} {value}
</label> </label>
); );
} };
export default Label; export default Label;

View file

@ -1,6 +1,5 @@
import React from 'react'; import React from 'react';
import AssetPreview from '@components/AssetPreview'; import AssetPreview from '@components/AssetPreview';
import HorizontalQuadSplit from '@components/HorizontalQuadSplit';
import Row from '@components/Row'; import Row from '@components/Row';
import ButtonSecondary from '@components/ButtonSecondary'; import ButtonSecondary from '@components/ButtonSecondary';
import { createGroupedList } from '../../utils/createGroupedList.js'; import { createGroupedList } from '../../utils/createGroupedList.js';

View file

@ -75,7 +75,7 @@ class ChannelCreateForm extends React.Component {
value={'Create Channel'} value={'Create Channel'}
onClickHandler={this.handleSubmit} onClickHandler={this.handleSubmit}
/> />
</form> </form>
) : ( ) : (
<div> <div>
<span className={'text--small text--secondary'}>{status}</span> <span className={'text--small text--secondary'}>{status}</span>

View file

@ -64,7 +64,7 @@ class ChannelLoginForm extends React.Component {
value={'Authenticate'} value={'Authenticate'}
onClickHandler={this.loginToChannel} onClickHandler={this.loginToChannel}
/> />
</form> </form>
); );
} }
} }

View file

@ -5,10 +5,10 @@ import View from './view';
const mapStateToProps = ({ show, publish }) => { const mapStateToProps = ({ show, publish }) => {
return { return {
file: publish.file, file : publish.file,
isUpdate: publish.isUpdate, isUpdate : publish.isUpdate,
hasChanged: publish.hasChanged, hasChanged: publish.hasChanged,
asset: selectAsset(show), asset : selectAsset(show),
}; };
}; };

View file

@ -1,7 +1,7 @@
import {connect} from 'react-redux'; import {connect} from 'react-redux';
import View from './view'; import View from './view';
import {selectAsset} from "../../selectors/show"; import {selectAsset} from '../../selectors/show';
import {buildURI} from "../../utils/buildURI"; import {buildURI} from '../../utils/buildURI';
const mapStateToProps = props => { const mapStateToProps = props => {
const { show, publish } = props; const { show, publish } = props;
@ -11,10 +11,10 @@ const mapStateToProps = props => {
uri = `lbry://${buildURI(asset)}`; uri = `lbry://${buildURI(asset)}`;
} }
return { return {
disabled: publish.disabled, disabled : publish.disabled,
file: publish.file, file : publish.file,
status: publish.status.status, status : publish.status.status,
isUpdate: publish.isUpdate, isUpdate : publish.isUpdate,
hasChanged: publish.hasChanged, hasChanged: publish.hasChanged,
uri, uri,
}; };

View file

@ -34,7 +34,7 @@ class PublishUrlInput extends React.Component {
value = this.cleanseInput(value); value = this.cleanseInput(value);
this.updateAndValidateClaimInput(value); this.updateAndValidateClaimInput(value);
} }
updateAndValidateClaimInput(value) { updateAndValidateClaimInput (value) {
if (value) { if (value) {
this.props.validateClaim(value); this.props.validateClaim(value);
} else { } else {

View file

@ -24,7 +24,7 @@ class SEO extends React.Component {
const canonicalLink = `${host}${createCanonicalLink({ const canonicalLink = `${host}${createCanonicalLink({
asset: asset ? { ...asset.claimData, shortId: asset.shortId } : undefined, asset: asset ? { ...asset.claimData, shortId: asset.shortId } : undefined,
channel, channel,
page: pageUri, page : pageUri,
})}`; })}`;
// render results // render results
return ( return (

View file

@ -14,20 +14,20 @@ class FaqPage extends React.Component {
</Row> </Row>
<Row> <Row>
<h3>What is spee.ch?</h3> <h3>What is spee.ch?</h3>
<p>Spee.ch is a media-hosting site that reads from and publishes content to the <a href="http://lbry.io/">LBRY blockchain</a>.</p> <p>Spee.ch is a media-hosting site that reads from and publishes content to the <a href='http://lbry.io/'>LBRY blockchain</a>.</p>
</Row> </Row>
<Row> <Row>
<h3>OK But Why Should I Care?</h3> <h3>OK But Why Should I Care?</h3>
<p>Spee.ch is a fast and easy way to host your images, videos, and other content. What makes this different from other similar sites is that Speech is hosted on the LBRY blockchain. That means it is impossible for your content to be censored via digital means. Even if we took down Speech today, all content would remain immutably stored on the LBRY blockchain.</p> <p>Spee.ch is a fast and easy way to host your images, videos, and other content. What makes this different from other similar sites is that Speech is hosted on the LBRY blockchain. That means it is impossible for your content to be censored via digital means. Even if we took down Speech today, all content would remain immutably stored on the LBRY blockchain.</p>
<p>Blockchain technology doesnt solve <a href="https://xkcd.com/538/">the 5 dollar wrench attack</a>, but it solves just about every other problem in media hosting and distribution.</p> <p>Blockchain technology doesnt solve <a href='https://xkcd.com/538/'>the 5 dollar wrench attack</a>, but it solves just about every other problem in media hosting and distribution.</p>
<p>Even better - you can host your own clone of Speech to get even more control over your content. <a href="https://github.com/lbryio/spee.ch/blob/master/README.md">CLICK HERE FOR INFO</a>.</p> <p>Even better - you can host your own clone of Speech to get even more control over your content. <a href='https://github.com/lbryio/spee.ch/blob/master/README.md'>CLICK HERE FOR INFO</a>.</p>
<p>Speech is just the beginning of what will soon be a vibrant ecosystem of LBRY-powered apps. Use LBRY and youre one step closer to true freedom.</p> <p>Speech is just the beginning of what will soon be a vibrant ecosystem of LBRY-powered apps. Use LBRY and youre one step closer to true freedom.</p>
</Row> </Row>
<Row> <Row>
<h3>How to Use spee.ch</h3> <h3>How to Use spee.ch</h3>
<p>Its easy. Drag the image or video file of your choice into the center of the spee.ch homepage.</p> <p>Its easy. Drag the image or video file of your choice into the center of the spee.ch homepage.</p>
<p>Spee.ch is currently best suited for web optimized MP4 video and standard image filetypes (JPEG, GIF).</p> <p>Spee.ch is currently best suited for web optimized MP4 video and standard image filetypes (JPEG, GIF).</p>
<p>If you want to refer to a piece of content repeatedly, or to build a collection of related content, you could create a channel. Channels work both for private collections and for public repositories. Theres more info about how to do this <a href="https://spee.ch/login">on the channel page</a>.</p> <p>If you want to refer to a piece of content repeatedly, or to build a collection of related content, you could create a channel. Channels work both for private collections and for public repositories. Theres more info about how to do this <a href='https://spee.ch/login'>on the channel page</a>.</p>
<p>Published files will be wiewable and embeddable with any web browser and accesible in the LBRY app. You can also use spee.ch to view free and non-NSFW content published on LBRY network from LBRY app. You just need to replace "lbry://" with "http://spee.ch/" in the URL.</p> <p>Published files will be wiewable and embeddable with any web browser and accesible in the LBRY app. You can also use spee.ch to view free and non-NSFW content published on LBRY network from LBRY app. You just need to replace "lbry://" with "http://spee.ch/" in the URL.</p>
</Row> </Row>
<Row> <Row>
@ -36,8 +36,8 @@ class FaqPage extends React.Component {
</Row> </Row>
<Row> <Row>
<h3>Contribute</h3> <h3>Contribute</h3>
<p>If you have an idea for your own spee.ch-like site on top of LBRY, fork our <a href="https://github.com/lbryio/spee.ch">github repo</a> and go to town!</p> <p>If you have an idea for your own spee.ch-like site on top of LBRY, fork our <a href='https://github.com/lbryio/spee.ch'>github repo</a> and go to town!</p>
<p>If you want to improve spee.ch, join <a href="https://chat.lbry.io/">our discord channel</a> or solve one of our <a href="https://github.com/lbryio/spee.ch/issues">github issues</a>.</p> <p>If you want to improve spee.ch, join <a href='https://chat.lbry.io/'>our discord channel</a> or solve one of our <a href='https://github.com/lbryio/spee.ch/issues'>github issues</a>.</p>
</Row> </Row>
</PageLayout> </PageLayout>
); );

View file

@ -16,7 +16,7 @@ class PopularPage extends React.Component {
const { homeChannel } = this.props; const { homeChannel } = this.props;
return ( return (
<ContentPageWrapper homeChannel={homeChannel} /> <ContentPageWrapper homeChannel={homeChannel} />
) );
} }
}; };

View file

@ -41,9 +41,9 @@ const initialState = {
license : '', license : '',
nsfw : false, nsfw : false,
}, },
isUpdate: false, isUpdate : false,
hasChanged: false, hasChanged: false,
thumbnail: null, thumbnail : null,
thumbnailChannel, thumbnailChannel,
thumbnailChannelId, thumbnailChannelId,
}; };
@ -52,7 +52,7 @@ export default function (state = initialState, action) {
switch (action.type) { switch (action.type) {
case actions.FILE_SELECTED: case actions.FILE_SELECTED:
return Object.assign({}, state.isUpdate ? state : initialState, { // note: clears to initial state return Object.assign({}, state.isUpdate ? state : initialState, { // note: clears to initial state
file: action.data, file : action.data,
hasChanged: true, hasChanged: true,
}); });
case actions.FILE_CLEAR: case actions.FILE_CLEAR:
@ -66,13 +66,13 @@ export default function (state = initialState, action) {
}); });
case actions.CLAIM_UPDATE: case actions.CLAIM_UPDATE:
return Object.assign({}, state, { return Object.assign({}, state, {
claim: action.data, claim : action.data,
hasChanged: true, hasChanged: true,
}); });
case actions.SET_PUBLISH_IN_CHANNEL: case actions.SET_PUBLISH_IN_CHANNEL:
return Object.assign({}, state, { return Object.assign({}, state, {
publishInChannel: action.channel, publishInChannel: action.channel,
hasChanged: true, hasChanged : true,
}); });
case actions.PUBLISH_STATUS_UPDATE: case actions.PUBLISH_STATUS_UPDATE:
return Object.assign({}, state, { return Object.assign({}, state, {
@ -96,7 +96,7 @@ export default function (state = initialState, action) {
case actions.THUMBNAIL_NEW: case actions.THUMBNAIL_NEW:
return { return {
...state, ...state,
thumbnail: action.data, thumbnail : action.data,
hasChanged: true, hasChanged: true,
}; };
case actions.SET_UPDATE_TRUE: case actions.SET_UPDATE_TRUE:

View file

@ -59,9 +59,9 @@ export function * newAssetRequest (action) {
try { try {
const { loggedInChannel } = yield select(selectChannelState); const { loggedInChannel } = yield select(selectChannelState);
if(loggedInChannel && loggedInChannel.longId) { if (loggedInChannel && loggedInChannel.longId) {
const { const {
data: claimViewData data: claimViewData,
} = yield call(getClaimViews, longId); } = yield call(getClaimViews, longId);
claimViews = claimViewData[longId] || 0; claimViews = claimViewData[longId] || 0;
@ -82,18 +82,18 @@ export function * updateAssetViews (action) {
const channelId = loggedInChannel.longId; const channelId = loggedInChannel.longId;
for(let key in showState.assetList) { for (let key in showState.assetList) {
let asset = showState.assetList[key]; let asset = showState.assetList[key];
if(asset.claimData && asset.claimData.certificateId === channelId) { if (asset.claimData && asset.claimData.certificateId === channelId) {
const longId = asset.claimId; const longId = asset.claimId;
const assetKey = `a#${asset.name}#${longId}`; const assetKey = `a#${asset.name}#${longId}`;
let claimViews = null; let claimViews = null;
if(longId) { if (longId) {
const { const {
data: claimViewData data: claimViewData,
} = yield call(getClaimViews, longId); } = yield call(getClaimViews, longId);
claimViews = claimViewData[longId] || 0; claimViews = claimViewData[longId] || 0;
@ -103,12 +103,12 @@ export function * updateAssetViews (action) {
} }
} }
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} }
}; };
export function * watchUpdateAssetViews (action) { export function * watchUpdateAssetViews (action) {
yield takeLatest(channelActions.CHANNEL_UPDATE, updateAssetViews) yield takeLatest(channelActions.CHANNEL_UPDATE, updateAssetViews);
}; };
export function * watchNewAssetRequest () { export function * watchNewAssetRequest () {

View file

@ -1,7 +1,7 @@
import {call, put, select, takeLatest} from 'redux-saga/effects'; import {call, put, select, takeLatest} from 'redux-saga/effects';
import * as actions from '../constants/show_action_types'; import * as actions from '../constants/show_action_types';
import { addNewChannelToChannelList, addRequestToRequestList, onRequestError, onRequestUpdate, updateChannelClaims } from '../actions/show'; import { addNewChannelToChannelList, addRequestToRequestList, onRequestError, onRequestUpdate, updateChannelClaims } from '../actions/show';
//import { getChannelClaims, getChannelData } from '../api/channelApi'; // import { getChannelClaims, getChannelData } from '../api/channelApi';
import { getSpecialAssetClaims } from '../api/specialAssetApi'; import { getSpecialAssetClaims } from '../api/specialAssetApi';
import { selectShowState } from '../selectors/show'; import { selectShowState } from '../selectors/show';
import { selectSiteHost } from '../selectors/site'; import { selectSiteHost } from '../selectors/site';

View file

@ -30,8 +30,8 @@ function * parseAndUpdateIdentifierAndClaim (modifier, claim) {
} }
function * parseAndUpdateClaimOnly (claim) { function * parseAndUpdateClaimOnly (claim) {
if(/^special\:/.test(claim) === true) { if (/^special:/.test(claim) === true) {
const assetName = /special\:(.*)/.exec(claim)[1]; const assetName = /special:(.*)/.exec(claim)[1];
return yield call(newSpecialAssetRequest, onNewSpecialAssetRequest(assetName)); return yield call(newSpecialAssetRequest, onNewSpecialAssetRequest(assetName));
} else { } else {
// this could be a request for an asset or a channel page // this could be a request for an asset or a channel page

View file

@ -1,261 +0,0 @@
# Create Your Own Spee.ch on Ubuntu 16.x 18.x VPS
# Overview
## Prerequisites
* UBUNTU 16+ VPS with root access
* Your login info ready
* Domain name with @ and www pointed at your VPS IP
* Email Address
* Ability to send 5+ LBRY credits to an address
* Noncommercial use (highly suggested, but you still _could_)
* We recommend that you fork Spee.ch so that you can customize the site.
## You'll be installing:
* MYSQL DB
* Default Port
* NODE v8+
* HTTPS PROXY SERVER
* Caddy for personal use
* Exposed ports: 22, 80, 443, 3333, 4444
* Reverse proxies to App on 3000
* SPEE.CH
* LBRYNET DAEMON
# 1. Update OS and install packages
## OS
`sudo apt-get update -y`
`ulimit -n 8192`
## Git
`sudo apt-get install git -y`
## NODE v8
`wget -qO- https://deb.nodesource.com/setup_8.x | sudo -E bash -`
`sudo apt-get install -y nodejs`
## Curl, Tmux, Unzip, ffmpeg
`sudo apt-get install curl tmux unzip ffmpeg -y`
## Grab config files
`git clone https://github.com/jessopb/speechconfigs.git`
`chmod 640 -R ~/speechconfigs`
# 2 Secure the UFW firewall
## UFW
`sudo ufw status`
`sudo ufw allow 80`
`sudo ufw allow 443`
`sudo ufw allow 22`
`sudo ufw allow 3333`
`sudo ufw allow 4444`
`sudo ufw default allow outgoing`
`sudo ufw default deny incoming`
`sudo ufw show added`
`sudo ufw enable` (yes, you've allowed ssh 22)
`sudo ufw status`
# 3 Install Caddy to handle https and reverse proxy
## Get Caddy
`curl https://getcaddy.com | bash -s personal`
## Set up Caddy
`mkdir -p /opt/caddy/logs/`
`mkdir -p /opt/caddy/store/`
`cp ~/speechconfigs/caddy/Caddyfile.speechsample ~/speechconfigs/caddy/Caddyfile`
`nano ~/speechconfigs/caddy/Caddyfile`
( Change {{EXAMPLE.COM}} to YOURDOMAIN.COM )
`cp ~/speechconfigs/caddy/Caddyfile /opt/caddy/`
## Set up Caddy to run as systemd service
`cp ~/speechconfigs/caddy/caddy.service /etc/systemd/system/caddy.service`
`chmod 644 /etc/systemd/system/caddy.service`
`chown -R www-data:www-data /opt/caddy/`
`setcap 'cap_net_bind_service=+ep' /usr/local/bin/caddy`
`systemctl daemon-reload`
`systemctl start caddy`
`systemctl status caddy`
At this point, navigating to yourdomain.com should give you a 502 bad gateway error. That's good!
# 4 Set up MySql
## Install MySql
`sudo apt-get install mysql-server -y`
( enter blank password each time )
`sudo systemctl status mysql` (q to exit)
## Secure Setup
`sudo mysql_secure_installation`
* No to password validation
* Y to all other options
* password abcd1234
## Login to mysql from root to complete setup:
`mysql` to enter mysql> console
mysql> `ALTER USER 'root'@'localhost' IDENTIFIED WITH mysql_native_password BY 'abcd1234';`
mysql> `FLUSH PRIVILEGES;`
Control+D to exit
Verify:
`mysql -u root -p` and then entering your password abcd1234 should give you the mysql> shell
# 5 Get Lbrynet Daemon
### TODO: Enable something like sudo systemctl start lbrynet so it runs as www-data
## Enter tmux
`tmux`
* Ctrl+b, d detaches leaving session running.
* ~# `tmux`, Ctrl+b, ( goes back to that session.
## Get the daemon
`wget -O ~/latest_daemon.zip https://lbry.io/get/lbrynet.linux.zip`
`unzip -o -u ~/latest_daemon.zip`
## Start the daemon
~# `./lbrynet start`
## Detatch tmux session
`Control+b, then d` to leave lbrynet daemon running and exit the session
`tmux` if you want to get back into tmux
`Control+b, then ) in tmux` to cycle back to your lbrynet session to see output
## Display wallet address to which to send 5+ LBC.
### These commands work when `lbrynet start` is already running in another tmux
`./lbrynet commands` to check out the current commands
`./lbrynet address_list` to get your wallet address
`Ctrl + Shift + C` after highlighting an address to copy.
Use a LBRY app or daemon to send LBC to the address. Sending LBC may take a few seconds or longer.
`./lbrynet account_balance` to check your balance after you've sent LBC.
# 6 Set up spee.ch
## Clone speech either from your own fork, or from the lbryio/spee.ch repo.
### Developers
SSH?
`git clone git@github.com:{{youraccount}}/spee.ch`
HTTPS?
`git clone https://github.com/{{youraccount}}/spee.ch.git`
### Publishers
`git clone -b release https://github.com/lbryio/spee.ch`
## Build it
`cd spee.ch`
~/spee.ch# `npm install`
`cp ~/speechconfigs/speech/chainqueryConfig.json ~/spee.ch/site/config/chainqueryConfig.json`
~/spee.ch# `npm run configure` (once your wallet balance has cleared)
* DATABASE: lbry
* USER NAME: root
* PASSWORD: abcd1234
* PORT: 3000
* Site Title: Your Site Name
* Enter your site's domain name: https://freezepeach.fun (this must include https://)
* Enter a directory where uploads should be stored: (/home/lbry/Uploads)
~/spee.ch/# `npm run start`
## Try it
Navigate to yourdomain.fun!
### 7 Maintenance Proceedures
* Change wallet
* TODO
* Change daemon
* wget daemon from https://github.com/lbryio/lbry/releases
* wget --quiet -O ~/your_name_daemon.zip https://your_copied_file_path.zip
* rm ./lbrynet
* unzip -o -u ~/your_name_daemon.zip
### 7 TODO
* Don't run as root
* Use Dockerized Spee.ch and Lbrynet
* https://github.com/lbryio/lbry-docker/tree/master/www.spee.ch
* https://github.com/lbryio/lbry-docker/tree/master/lbrynet-daemon
* https://blog.hasura.io/an-exhaustive-guide-to-writing-dockerfiles-for-node-js-web-apps-bbee6bd2f3c4
* https://docs.traefik.io/user-guide/docker-and-lets-encrypt/
* https://docs.traefik.io/configuration/acme/
* Systemd unit files
* https://nodesource.com/blog/running-your-node-js-app-with-systemd-part-1/
* Spee.ch
* sudo nano /lib/systemd/system/speech.service
* Lbrynet
* sudo nano /lib/systemd/system/lbrynet.service
```
[Unit]
Description=hello_env.js - making your environment variables read
Documentation=https://example.com
After=network.target
[Service]
Environment=NODE_PORT=3001
Type=simple
User=ubuntu
ExecStart=node path/server.js
Restart=on-failure
[Install]
WantedBy=multi-user.target
```
* Provide spee.ch build releases?
* Provide system to configure chainqueryConfig.json
* Clone speech to stripped version, streamline customization
* Automate for testing

View file

@ -19,12 +19,12 @@
* Https proxy server * Https proxy server
* Caddy for personal use * Caddy for personal use
* Exposed ports: 22, 80, 443, 3333, 4444 * Exposed ports: 22, 80, 443, 3333, 4444
* Reverse proxies 443 to App on 3000 * Reverse proxies 80 redirected to 443 to App on 3000
* Spee.ch started on port 3000 * Spee.ch started on port 3000
* Lbrynet DAEMON started on ports 3333 and 4444 * Lbrynet DAEMON started on ports 3333 and 4444
# 1. Update OS and install packages # 1. Setup OS and install dependencies
## OS ## OS
### Secure your server by creating a non-root sudoer. ### Secure your server by creating a non-root sudoer.
@ -59,25 +59,21 @@ Log in as username@domainname or username@ip_address
## Clone speech either from your own fork, or from the lbryio/spee.ch repo. ## Clone speech either from your own fork, or from the lbryio/spee.ch repo.
### For Developers or those with their own forked repo * For Developers - our master branch
`git clone https://github.com/lbryio/spee.ch`
`git clone -b master https://github.com/lbryio/spee.ch` * For Developers - your fork
SSH:
`git clone git@github.com:{{youraccount}}/spee.ch`
HTTPS:
`git clone https://github.com/{{youraccount}}/spee.ch.git` `git clone https://github.com/{{youraccount}}/spee.ch.git`
### For Publishers and Content creators `git clone git@github.com:{{youraccount}}/spee.ch`
* For Publishers and Content creators - stable release
`git clone -b release https://github.com/lbryio/spee.ch` `git clone -b release https://github.com/lbryio/spee.ch`
### Prepare the scripts ## Prepare the scripts
`chmod 750 -R ~/spee.ch/docs/setup` `chmod 750 -R ~/spee.ch/docs/setup`
@ -181,9 +177,9 @@ This just allows you to run multiple things in different sessions. Useful for ma
## Detatch tmux session ## Detatch tmux session
* `Control + b`, then `d` to leave lbrynet daemon running and exit the session * `Control + b`, then `d` to leave lbrynet daemon running and exit the session
`tmux` if you want to get back into tmux * `tmux` if you want to get back into tmux
`Control+b`, then `)` while in tmux session to cycle back to your lbrynet session to see output * `Control+b`, then `)` while in tmux session to cycle back to your lbrynet session to see output
## Display wallet address to which to send 5+ LBC. ## Display wallet address to which to send 5+ LBC.
@ -206,28 +202,31 @@ This just allows you to run multiple things in different sessions. Useful for ma
# 6 Set up spee.ch # 6 Set up spee.ch
## Build it ## Build it
`cd spee.ch` `cd spee.ch`
~/spee.ch: ~/spee.ch:
`npm install` `npm install`
`cp ~/spee.ch/docs/setup/conf/speech/chainqueryConfig.json ~/spee.ch/site/config/chainqueryConfig.json` _note: if you have installed your own local chainquery instance, you will need to specify it in your own /site/config/chainqueryConfig.json_
Once your wallet has a balance, run this:
`npm run configure`
`npm run configure` (once your wallet balance has cleared)
* Database: lbry * Database: lbry
* Username: root * Username: root
* Password: abcd1234 * Password: abcd1234
* Port: 3000 * Port: 3000
* Site Title: Your Site Name * Site Title: Your Site Name
* Enter your site's domain name: https://freezepeach.fun (this must include 'https://') * Enter your site's domain name: https://example.com or http://localhost
* Enter a directory where uploads should be stored: (/home/lbry/Uploads) * Enter a directory where uploads should be stored: (/home/lbry/Uploads)
`npm run start` `npm run start`
## Try it ## Try it
Navigate to yourdomain.fun! Navigate to example.com!
# 7 Production # 7 Production

View file

@ -25,14 +25,14 @@ $ npm update
* Create a config file called `spee.ch` in */etc/nginx/sites-available* * Create a config file called `spee.ch` in */etc/nginx/sites-available*
* see example: [config file](https://github.com/lbryio/spee.ch/blob/master/nginx_example_config). * see example: [config file](https://github.com/lbryio/spee.ch/blob/master/nginx_example_config).
* Rename all mentions of *sub.domain.com* with your subdomain name. * Rename all mentions of *sub.domain.com* with your subdomain name.
* Run this command to link the sites-available. * Run this command to link the sites-available.
`$ ln -s /etc/nginx/sites-available/speech /etc/nginx/sites-enabled/speech` `$ ln -s /etc/nginx/sites-available/speech /etc/nginx/sites-enabled/speech`
* Restart Nginx. * Restart Nginx.
`$ sudo service nginx restart` `$ sudo service nginx restart`
* Try visiting your website. * Try visiting your website.
* If Nginx is working, you should get a **502** error because there is nothing running on **3000** yet. * If Nginx is working, you should get a **502** error because there is nothing running on **3000** yet.
* If you get the default Nginx greeting, you have not properly configured it to serve from port **3000**. * If you get the default Nginx greeting, you have not properly configured it to serve from port **3000**.
@ -40,29 +40,29 @@ $ npm update
* Caddy tutorial: [https://caddyserver.com/tutorial](https://caddyserver.com/tutorial) * Caddy tutorial: [https://caddyserver.com/tutorial](https://caddyserver.com/tutorial)
### MySql ### MySql
* Install MySql * Install MySql
* [Instructions](https://dev.mysql.com/doc/mysql-installation-excerpt/5.7/en) * [Instructions](https://dev.mysql.com/doc/mysql-installation-excerpt/5.7/en)
* Create user **root**. * Create user **root**.
* Note: We are going to access **mysql** as **root** for this setup, but you may want to create a separate user in the future. * Note: We are going to access **mysql** as **root** for this setup, but you may want to create a separate user in the future.
* Keep your password somewhere handy! * Keep your password somewhere handy!
* Create a database called **lbry** and make sure you can use it. * Create a database called **lbry** and make sure you can use it.
`CREATE DATABASE lbry;` `CREATE DATABASE lbry;`
`$ USE lbry;` `$ USE lbry;`
`$ exit; (or press ctl + d)` `$ exit; (or press ctl + d)`
* Try logging into mysql. * Try logging into mysql.
`$ mysql -u username -p` `$ mysql -u username -p`
* If you are using a **LBRY** server, your **password** is the one provided for **ssh**. * If you are using a **LBRY** server, your **password** is the one provided for **ssh**.
* Note: If it fails, try using `sudo`. * Note: If it fails, try using `sudo`.
##2. Install & Run the LBRY Daemon ##2. Install & Run the LBRY Daemon
### Install **lbrynet** ### Install **lbrynet**
_note: if you have a server from LBRY, lbrynet is already installed, you can skip to 2.4._ _note: if you have a server from LBRY, lbrynet is already installed, you can skip to 2.4._
``` ```
$ wget --quiet -O ~/latest_daemon.zip https://lbry.io/get/lbrynet.linux.zip $ wget --quiet -O ~/latest_daemon.zip https://lbry.io/get/lbrynet.linux.zip
@ -75,9 +75,9 @@ $ ./lbrynet-daemon
``` ```
### Detach (exit) the tmux session and leave **lbrynet** running in the background. ### Detach (exit) the tmux session and leave **lbrynet** running in the background.
press `ctrl` + `b` then `d` to detach press `ctrl` + `b` then `d` to detach
### Get LBC! ### Get LBC!
Get a list of your wallets: Get a list of your wallets:
@ -93,7 +93,7 @@ Check your balance again:
``` ```
$ ~/lbrynet-cli wallet_balance $ ~/lbrynet-cli wallet_balance
``` ```
You should have **LBC**! You should have **LBC**!
### Install ffmpeg ### Install ffmpeg
@ -128,10 +128,10 @@ $ npm run configure
Check your site configs Check your site configs
``` ```
$ cd config/ $ cd /site/config/
$ nano siteConfig.json $ nano siteConfig.json
``` ```
### Build & run ### Build & run
Run the below command to transpile, build, and start your server. Run the below command to transpile, build, and start your server.
@ -141,15 +141,15 @@ $ npm run start
_**Note:** if you had to use `sudo` to login to **mysql** above, you may have issues with this step._ _**Note:** if you had to use `sudo` to login to **mysql** above, you may have issues with this step._
Spee.ch should now be running ! Spee.ch should now be running !
Visit your site in the browser. Try publishing an image! Visit your site in the browser. Try publishing an image!
## 4. Bonus: ## 4. Bonus:
### Install PM2 and run your server with PM2 ### Install PM2 and run your server with PM2
Install PM2 Install PM2
``` ```
$ sudo npm i -g pm2 $ sudo npm i -g pm2
@ -161,14 +161,3 @@ $ pm2 start server.js
``` ```
Visit your site and see if it is running! Visit your site and see if it is running!
### Sync Your Spee.ch Instance with the full **Blockchain**
Install **lbrycrdd**
Install **lbry-decoder**
Start **lbry-decoder**
Install & run [spee.ch-sync](https://github.com/billbitt/spee.ch-sync)

8983
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -11,7 +11,6 @@
"configure": "node cli/configure.js", "configure": "node cli/configure.js",
"fix": "eslint . --fix", "fix": "eslint . --fix",
"lint": "eslint .", "lint": "eslint .",
"precommit": "eslint .",
"prestart": "builder run bundle", "prestart": "builder run bundle",
"start": "node server.js", "start": "node server.js",
"start:build": "builder run start", "start:build": "builder run start",
@ -50,33 +49,33 @@
"axios": "^0.18.0", "axios": "^0.18.0",
"bcrypt": "^2.0.1", "bcrypt": "^2.0.1",
"body-parser": "^1.18.3", "body-parser": "^1.18.3",
"connect-multiparty": "^2.1.1", "connect-multiparty": "^2.2.0",
"cookie-session": "^2.0.0-beta.3", "cookie-session": "^2.0.0-beta.3",
"express": "^4.15.2", "express": "^4.16.4",
"express-handlebars": "^3.0.0", "express-handlebars": "^3.0.0",
"express-http-context": "^1.1.0", "express-http-context": "^1.2.0",
"get-video-dimensions": "^1.0.0", "get-video-dimensions": "^1.0.0",
"helmet": "^3.13.0", "helmet": "^3.15.0",
"image-size": "^0.6.3", "image-size": "^0.6.3",
"inquirer": "^5.2.0", "inquirer": "^5.2.0",
"ip": "^1.1.5", "ip": "^1.1.5",
"make-dir": "^1.3.0", "make-dir": "^1.3.0",
"module-alias": "^2.1.0", "module-alias": "^2.1.0",
"mysql2": "^1.6.1", "mysql2": "^1.6.4",
"npm": "^6.3.0", "npm": "^6.3.0",
"passport": "^0.4.0", "passport": "^0.4.0",
"passport-local": "^1.0.0", "passport-local": "^1.0.0",
"prop-types": "^15.6.2", "prop-types": "^15.6.2",
"react": "^16.4.2", "react": "^16.4.2",
"react-dom": "^16.4.2", "react-feather": "^1.1.4",
"react-feather": "^1.1.3", "react-dom": "^16.6.1",
"react-ga": "^2.5.3", "react-ga": "^2.5.3",
"react-helmet": "^5.2.0", "react-helmet": "^5.2.0",
"react-redux": "^5.0.6", "react-redux": "^5.1.1",
"react-router-dom": "^4.3.1", "react-router-dom": "^4.3.1",
"redux": "^4.0.0", "redux": "^4.0.1",
"redux-saga": "^0.16.0", "redux-saga": "^0.16.2",
"sequelize": "^4.38.0", "sequelize": "^4.41.1",
"sequelize-cli": "^4.0.0", "sequelize-cli": "^4.0.0",
"universal-analytics": "^0.4.20", "universal-analytics": "^0.4.20",
"webpack": "^3.10.0", "webpack": "^3.10.0",
@ -87,43 +86,49 @@
"winston-slack-webhook": "github:billbitt/winston-slack-webhook" "winston-slack-webhook": "github:billbitt/winston-slack-webhook"
}, },
"devDependencies": { "devDependencies": {
"@babel/cli": "^7.0.0", "@babel/cli": "^7.1.5",
"@babel/core": "^7.0.0", "@babel/core": "^7.1.5",
"@babel/plugin-proposal-object-rest-spread": "^7.0.0", "@babel/plugin-proposal-object-rest-spread": "^7.0.0",
"@babel/polyfill": "^7.0.0", "@babel/polyfill": "^7.0.0",
"@babel/preset-env": "^7.0.0", "@babel/preset-env": "^7.1.5",
"@babel/preset-react": "^7.0.0", "@babel/preset-react": "^7.0.0",
"@babel/preset-stage-2": "^7.0.0", "@babel/preset-stage-2": "^7.0.0",
"@babel/register": "^7.0.0", "@babel/register": "^7.0.0",
"babel-loader": "^7.1.2", "babel-loader": "^7.1.2",
"babel-plugin-module-resolver": "^3.1.1", "babel-plugin-module-resolver": "^3.1.1",
"babel-eslint": "9.0.0-beta.3",
"builder": "^4.0.0", "builder": "^4.0.0",
"chai": "^4.1.2", "chai": "^4.2.0",
"chai-http": "^4.0.0", "chai-http": "^4.2.0",
"cross-fetch": "^2.2.2", "cross-fetch": "^2.2.3",
"css-loader": "^0.28.11", "css-loader": "^0.28.11",
"eslint": "4.19.1", "eslint": "5.9.0",
"eslint-config-standard": "^11.0.0", "eslint-config-standard": "^12.0.0",
"eslint-config-standard-jsx": "^5.0.0", "eslint-config-standard-jsx": "^6.0.2",
"eslint-plugin-import": "^2.12.0", "eslint-plugin-import": "^2.14.0",
"eslint-plugin-node": "^6.0.1", "eslint-plugin-node": "^8.0.0",
"eslint-plugin-promise": "^3.8.0", "eslint-plugin-promise": "^4.0.1",
"eslint-plugin-react": "^7.9.1", "eslint-plugin-react": "^7.11.1",
"eslint-plugin-standard": "^3.0.1", "eslint-plugin-standard": "^4.0.0",
"extract-text-webpack-plugin": "^3.0.2", "extract-text-webpack-plugin": "^3.0.2",
"file-loader": "^1.1.11", "file-loader": "^1.1.11",
"husky": "^0.14.3", "husky": "^1.1.3",
"mocha": "^5.2.0", "mocha": "^5.2.0",
"ndb": "^1.0.24", "ndb": "^1.0.26",
"node-sass": "^4.9.3", "node-sass": "^4.10.0",
"nodemon": "^1.17.5", "nodemon": "^1.18.6",
"redux-devtools": "^3.4.1", "redux-devtools": "^3.4.1",
"regenerator-transform": "^0.13.0", "regenerator-transform": "^0.13.3",
"rollup": "^0.66.2", "rollup": "^0.67.0",
"sass-loader": "^7.1.0", "sass-loader": "^7.1.0",
"sequelize-cli": "^4.0.0", "sequelize-cli": "^4.0.0",
"style-loader": "^0.21.0", "style-loader": "^0.23.1",
"url-loader": "^1.0.1", "url-loader": "^1.1.2",
"wait-on": "^3.1.0" "wait-on": "^3.2.0"
},
"husky": {
"hooks": {
"pre-commit": "eslint ."
}
} }
} }

View file

@ -4,14 +4,14 @@ const chainquery = require('chainquery');
const getChannelData = async (channelName, channelClaimId) => { const getChannelData = async (channelName, channelClaimId) => {
let longChannelClaimId = await chainquery.claim.queries.getLongClaimId(channelName, channelClaimId).catch(() => false); let longChannelClaimId = await chainquery.claim.queries.getLongClaimId(channelName, channelClaimId).catch(() => false);
if(!longChannelClaimId) { if (!longChannelClaimId) {
// Allow an error to throw here if this fails // Allow an error to throw here if this fails
longChannelClaimId = await db.Certificate.getLongChannelId(channelName, channelClaimId); longChannelClaimId = await db.Certificate.getLongChannelId(channelName, channelClaimId);
} }
let shortChannelClaimId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(longChannelClaimId, channelName).catch(() => false); let shortChannelClaimId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(longChannelClaimId, channelName).catch(() => false);
if(!shortChannelClaimId) { if (!shortChannelClaimId) {
shortChannelClaimId = await db.Certificate.getShortChannelIdFromLongChannelId(longChannelClaimId, channelName); shortChannelClaimId = await db.Certificate.getShortChannelIdFromLongChannelId(longChannelClaimId, channelName);
} }

View file

@ -12,7 +12,7 @@ const channelShortIdRoute = async ({ ip, originalUrl, params }, res) => {
try { try {
let shortId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(params.longId, params.name).catch(() => false); let shortId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(params.longId, params.name).catch(() => false);
if(!shortId) { if (!shortId) {
shortId = await db.Certificate.getShortChannelIdFromLongChannelId(params.longId, params.name); shortId = await db.Certificate.getShortChannelIdFromLongChannelId(params.longId, params.name);
} }

View file

@ -7,7 +7,7 @@ const claimAvailability = async (name) => {
const claimAddresses = additionalClaimAddresses || []; const claimAddresses = additionalClaimAddresses || [];
claimAddresses.push(primaryClaimAddress); claimAddresses.push(primaryClaimAddress);
// find any records where the name is used // find any records where the name is used
return await chainquery.claim return chainquery.claim
.findAll({ .findAll({
attributes: ['claim_address'], attributes: ['claim_address'],
where : { where : {

View file

@ -10,7 +10,6 @@ const db = require('server/models');
*/ */
const claimData = async ({ ip, originalUrl, body, params }, res) => { const claimData = async ({ ip, originalUrl, body, params }, res) => {
try { try {
const resolvedClaim = await fetchClaimData(params); const resolvedClaim = await fetchClaimData(params);
@ -25,7 +24,7 @@ const claimData = async ({ ip, originalUrl, body, params }, res) => {
success: true, success: true,
data : await getClaimData(resolvedClaim), data : await getClaimData(resolvedClaim),
}); });
} catch(error) { } catch (error) {
handleErrorResponse(originalUrl, ip, error, res); handleErrorResponse(originalUrl, ip, error, res);
} }
}; };

View file

@ -18,16 +18,16 @@ const claimGet = async ({ ip, originalUrl, params }, res) => {
try { try {
let claimData = await chainquery.claim.queries.resolveClaim(name, claimId).catch(() => {}); let claimData = await chainquery.claim.queries.resolveClaim(name, claimId).catch(() => {});
if(!claimData) { if (!claimData) {
claimData = await db.Claim.resolveClaim(name, claimId); claimData = await db.Claim.resolveClaim(name, claimId);
} }
if(!claimData) { if (!claimData) {
throw new Error('No matching uri found in Claim table'); throw new Error('No matching uri found in Claim table');
} }
let lbrynetResult = await getClaim(`${name}#${claimId}`); let lbrynetResult = await getClaim(`${name}#${claimId}`);
if(!lbrynetResult) { if (!lbrynetResult) {
throw new Error(`Unable to Get ${name}#${claimId}`); throw new Error(`Unable to Get ${name}#${claimId}`);
} }
@ -38,8 +38,8 @@ const claimGet = async ({ ip, originalUrl, params }, res) => {
try { try {
await waitOn({ await waitOn({
resources: [ lbrynetResult.file_name ], resources: [ lbrynetResult.file_name ],
delay: 100, delay : 100,
timeout: 10000, // 10 seconds timeout : 10000, // 10 seconds
}); });
} catch (e) {} } catch (e) {}
@ -49,7 +49,7 @@ const claimGet = async ({ ip, originalUrl, params }, res) => {
message, message,
completed, completed,
}); });
} catch(error) { } catch (error) {
handleErrorResponse(originalUrl, ip, error, res); handleErrorResponse(originalUrl, ip, error, res);
} }
}; };

View file

@ -125,7 +125,7 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
if (channelName) { if (channelName) {
canonicalUrl = createCanonicalLink({ asset: { ...claimData, channelShortId: shortId } }); canonicalUrl = createCanonicalLink({ asset: { ...claimData, channelShortId: shortId } });
} else { } else {
canonicalUrl = createCanonicalLink({ asset: { ...claimData, shortId } }) canonicalUrl = createCanonicalLink({ asset: { ...claimData, shortId } });
} }
res.status(200).json({ res.status(200).json({

View file

@ -12,12 +12,12 @@ const claimShortId = async ({ ip, originalUrl, body, params }, res) => {
try { try {
let shortId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(params.longId, params.name).catch(() => {}); let shortId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(params.longId, params.name).catch(() => {});
if(!shortId) { if (!shortId) {
shortId = await db.Claim.getShortClaimIdFromLongClaimId(params.longId, params.name); shortId = await db.Claim.getShortClaimIdFromLongClaimId(params.longId, params.name);
} }
res.status(200).json({success: true, data: shortId}); res.status(200).json({success: true, data: shortId});
} catch(error) { } catch (error) {
handleErrorResponse(originalUrl, ip, error, res); handleErrorResponse(originalUrl, ip, error, res);
} }
}; };

View file

@ -107,7 +107,6 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
return [null, null]; return [null, null];
}) })
.then(([fileResult, resolution]) => { .then(([fileResult, resolution]) => {
metadata = Object.assign({}, { metadata = Object.assign({}, {
title : claimRecord.title, title : claimRecord.title,
description: claimRecord.description, description: claimRecord.description,
@ -164,7 +163,7 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
if (channelName) { if (channelName) {
canonicalUrl = createCanonicalLink({ asset: { ...publishResult, channelShortId: shortId } }); canonicalUrl = createCanonicalLink({ asset: { ...publishResult, channelShortId: shortId } });
} else { } else {
canonicalUrl = createCanonicalLink({ asset: { ...publishResult, shortId } }) canonicalUrl = createCanonicalLink({ asset: { ...publishResult, shortId } });
} }
if (publishResult.error) { if (publishResult.error) {
@ -181,10 +180,10 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
data : { data : {
name, name,
claimId, claimId,
url : `${details.host}${canonicalUrl}`, // for backwards compatability with app url : `${details.host}${canonicalUrl}`, // for backwards compatability with app
showUrl : `${details.host}${canonicalUrl}`, showUrl : `${details.host}${canonicalUrl}`,
serveUrl: `${details.host}${canonicalUrl}${fileExtension}`, serveUrl : `${details.host}${canonicalUrl}${fileExtension}`,
pushTo : canonicalUrl, pushTo : canonicalUrl,
claimData: publishResult, claimData: publishResult,
}, },
}); });

View file

@ -20,7 +20,7 @@ const claimViews = async ({ ip, originalUrl, body, params }, res) => {
[claimId]: viewCount, [claimId]: viewCount,
}, },
}); });
} catch(error) { } catch (error) {
handleErrorResponse(originalUrl, ip, error, res); handleErrorResponse(originalUrl, ip, error, res);
} }
}; };

View file

@ -14,21 +14,21 @@ const channelClaims = async ({ ip, originalUrl, body, params }, res) => {
page, page,
} = params; } = params;
if(name === 'trending') { if (name === 'trending') {
const result = await db.Trending.getTrendingClaims(); const result = await db.Trending.getTrendingClaims();
const claims = await Promise.all(result.map((claim) => getClaimData(claim))); const claims = await Promise.all(result.map((claim) => getClaimData(claim)));
return res.status(200).json({ return res.status(200).json({
success: true, success: true,
data: { data : {
channelName: name, channelName : name,
claims, claims,
longChannelClaimId: name, longChannelClaimId: name,
currentPage: 1, currentPage : 1,
nextPage: null, nextPage : null,
previousPage: null, previousPage : null,
totalPages: 1, totalPages : 1,
totalResults: claims.length, totalResults : claims.length,
} },
}); });
} }

View file

@ -28,7 +28,7 @@ const getClaimIdAndServeAsset = (channelName, channelClaimId, claimName, claimId
logger.debug('Full claim id:', claimId); logger.debug('Full claim id:', claimId);
return db.Claim.findOne({ return db.Claim.findOne({
where: { where: {
name : claimName, name: claimName,
claimId, claimId,
}, },
}); });
@ -47,17 +47,15 @@ const getClaimIdAndServeAsset = (channelName, channelClaimId, claimName, claimId
logger.debug('Outpoint:', outpoint); logger.debug('Outpoint:', outpoint);
return db.Blocked.isNotBlocked(outpoint).then(() => { return db.Blocked.isNotBlocked(outpoint).then(() => {
// If content was found, is approved, and not blocked - log a view. // If content was found, is approved, and not blocked - log a view.
if(headers && headers['user-agent'] && /LBRY/.test(headers['user-agent']) === false) { if (headers && headers['user-agent'] && /LBRY/.test(headers['user-agent']) === false) {
db.Views.create({ db.Views.create({
time: Date.now(), time : Date.now(),
isChannel: false, isChannel : false,
claimId: claimDataValues.claim_id || claimDataValues.claimId, claimId : claimDataValues.claim_id || claimDataValues.claimId,
publisherId: claimDataValues.publisher_id || claimDataValues.certificateId, publisherId: claimDataValues.publisher_id || claimDataValues.certificateId,
ip, ip,
}); });
} }
return;
}); });
}) })
.then(() => { .then(() => {

View file

@ -4,57 +4,52 @@ const {
} = require('@config/siteConfig'); } = require('@config/siteConfig');
const padSizes = { const padSizes = {
small: 'padSmall', small : 'padSmall',
medium: 'padMedium', medium: 'padMedium',
large: 'padLarge', large : 'padLarge',
}; };
const argumentProcessors = { const argumentProcessors = {
'bottom': async (config) => { 'bottom': async (config) => {
config.classNames.push('bottom'); config.classNames.push('bottom');
return;
}, },
'right': async (config) => { 'right': async (config) => {
config.classNames.push('right'); config.classNames.push('right');
return;
}, },
'pad': async (config, val) => { 'pad': async (config, val) => {
config.classNames.push(padSizes[val]); config.classNames.push(padSizes[val]);
return;
}, },
'logoClaim': async (config, val) => { 'logoClaim': async (config, val) => {
config.logoUrl = `${host}/${val}`; config.logoUrl = `${host}/${val}`;
return;
}, },
'link': async (config, val) => { 'link': async (config, val) => {
config.logoLink = val; config.logoLink = val;
return; },
}
}; };
const parseLogoConfigParam = async (rawConfig) => { const parseLogoConfigParam = async (rawConfig) => {
if(rawConfig) { if (rawConfig) {
let parsedConfig = { let parsedConfig = {
classNames: ['logoLink'], classNames: ['logoLink'],
logoUrl: thumbnail, logoUrl : thumbnail,
}; };
let splitConfig; let splitConfig;
try { try {
splitConfig = rawConfig.split(','); splitConfig = rawConfig.split(',');
} catch(e) { } } catch (e) { }
if(!splitConfig) { if (!splitConfig) {
return false; return false;
} }
for(let i = 0; i < splitConfig.length; i++) { for (let i = 0; i < splitConfig.length; i++) {
let currentArgument = splitConfig[i]; let currentArgument = splitConfig[i];
if(argumentProcessors[currentArgument]) { if (argumentProcessors[currentArgument]) {
await argumentProcessors[currentArgument](parsedConfig); await argumentProcessors[currentArgument](parsedConfig);
} else { } else {
const splitArgument = currentArgument.split(':'); const splitArgument = currentArgument.split(':');
if(argumentProcessors[splitArgument[0]]) { if (argumentProcessors[splitArgument[0]]) {
await argumentProcessors[splitArgument[0]](parsedConfig, splitArgument[1]); await argumentProcessors[splitArgument[0]](parsedConfig, splitArgument[1]);
} }
} }
@ -66,7 +61,7 @@ const parseLogoConfigParam = async (rawConfig) => {
} }
return false; return false;
} };
const sendVideoEmbedPage = async ({ params }, res) => { const sendVideoEmbedPage = async ({ params }, res) => {
let { let {

View file

@ -8,13 +8,13 @@ const getClaimIdByChannel = async (channelName, channelClaimId, claimName) => {
let channelId = await chainquery.claim.queries.getLongClaimId(channelName, channelClaimId); let channelId = await chainquery.claim.queries.getLongClaimId(channelName, channelClaimId);
if(channelId === null) { if (channelId === null) {
channelId = await db.Certificate.getLongChannelId(channelName, channelClaimId); channelId = await db.Certificate.getLongChannelId(channelName, channelClaimId);
} }
let claimId = await chainquery.claim.queries.getClaimIdByLongChannelId(channelId, claimName); let claimId = await chainquery.claim.queries.getClaimIdByLongChannelId(channelId, claimName);
if(claimId === null) { if (claimId === null) {
claimId = db.Claim.getClaimIdByLongChannelId(channelId, claimName); claimId = db.Claim.getClaimIdByLongChannelId(channelId, claimName);
} }
@ -24,11 +24,11 @@ const getClaimIdByChannel = async (channelName, channelClaimId, claimName) => {
const getClaimId = async (channelName, channelClaimId, name, claimId) => { const getClaimId = async (channelName, channelClaimId, name, claimId) => {
logger.debug(`getClaimId: ${channelName}, ${channelClaimId}, ${name}, ${claimId})`); logger.debug(`getClaimId: ${channelName}, ${channelClaimId}, ${name}, ${claimId})`);
if (channelName) { if (channelName) {
return await getClaimIdByChannel(channelName, channelClaimId, name); return getClaimIdByChannel(channelName, channelClaimId, name);
} else { } else {
let claimIdResult = await chainquery.claim.queries.getLongClaimId(name, claimId); let claimIdResult = await chainquery.claim.queries.getLongClaimId(name, claimId);
if(!claimIdResult) { if (!claimIdResult) {
claimIdResult = await db.Claim.getLongClaimId(name, claimId); claimIdResult = await db.Claim.getLongClaimId(name, claimId);
} }

View file

@ -48,7 +48,7 @@ function Server () {
app.enable('trust proxy'); app.enable('trust proxy');
app.use((req, res, next) => { app.use((req, res, next) => {
if(req.get('User-Agent') === 'Mozilla/5.0 (Windows NT 5.1; rv:14.0) Gecko/20120405 Firefox/14.0a1') { if (req.get('User-Agent') === 'Mozilla/5.0 (Windows NT 5.1; rv:14.0) Gecko/20120405 Firefox/14.0a1') {
res.status(403).send('<h1>Forbidden</h1>If you are seeing this by mistake, please contact us using <a href="https://chat.lbry.io/">https://chat.lbry.io/</a>'); res.status(403).send('<h1>Forbidden</h1>If you are seeing this by mistake, please contact us using <a href="https://chat.lbry.io/">https://chat.lbry.io/</a>');
res.end(); res.end();
} else { } else {
@ -182,7 +182,7 @@ function Server () {
.then(() => { .then(() => {
logger.info('Spee.ch startup is complete'); logger.info('Spee.ch startup is complete');
setInterval(processTrending, 30 * 60000) // 30 minutes setInterval(processTrending, 30 * 60000); // 30 minutes
}) })
.catch(error => { .catch(error => {
if (error.code === 'ECONNREFUSED') { if (error.code === 'ECONNREFUSED') {

View file

@ -2,19 +2,19 @@ const fs = require('fs');
const logger = require('winston'); const logger = require('winston');
const { publishing: { publishingChannelWhitelist } } = require('@config/siteConfig'); const { publishing: { publishingChannelWhitelist } } = require('@config/siteConfig');
const ipBanFile = './config/ipBan.txt'; const ipBanFile = './site/config/ipBan.txt';
const forbiddenMessage = '<h1>Forbidden</h1>If you are seeing this by mistake, please contact us using <a href="https://chat.lbry.io/">https://chat.lbry.io/</a>'; const forbiddenMessage = '<h1>Forbidden</h1>If you are seeing this by mistake, please contact us using <a href="https://chat.lbry.io/">https://chat.lbry.io/</a>';
let ipCounts = {}; let ipCounts = {};
let blockedAddresses = []; let blockedAddresses = [];
if(fs.existsSync(ipBanFile)) { if (fs.existsSync(ipBanFile)) {
const lineReader = require('readline').createInterface({ const lineReader = require('readline').createInterface({
input: require('fs').createReadStream(ipBanFile), input: require('fs').createReadStream(ipBanFile),
}); });
lineReader.on('line', (line) => { lineReader.on('line', (line) => {
if(line && line !== '') { if (line && line !== '') {
blockedAddresses.push(line); blockedAddresses.push(line);
} }
}); });
@ -23,7 +23,7 @@ if(fs.existsSync(ipBanFile)) {
const autoblockPublishMiddleware = (req, res, next) => { const autoblockPublishMiddleware = (req, res, next) => {
let ip = (req.headers['x-forwarded-for'] || req.connection.remoteAddress).split(/,\s?/)[0]; let ip = (req.headers['x-forwarded-for'] || req.connection.remoteAddress).split(/,\s?/)[0];
if(blockedAddresses.indexOf(ip) !== -1) { if (blockedAddresses.indexOf(ip) !== -1) {
res.status(403).send(forbiddenMessage); res.status(403).send(forbiddenMessage);
res.end(); res.end();
@ -33,15 +33,15 @@ const autoblockPublishMiddleware = (req, res, next) => {
let count = ipCounts[ip] = (ipCounts[ip] || 0) + 1; let count = ipCounts[ip] = (ipCounts[ip] || 0) + 1;
setTimeout(() => { setTimeout(() => {
if(ipCounts[ip]) { if (ipCounts[ip]) {
ipCounts[ip]--; ipCounts[ip]--;
if(ipCounts[ip] === 0) { if (ipCounts[ip] === 0) {
delete ipCounts[ip]; delete ipCounts[ip];
} }
} }
}, 600000 /* 10 minute retainer */) }, 600000 /* 10 minute retainer */);
if(count === 10) { if (count === 10) {
logger.error(`Banning IP: ${ip}`); logger.error(`Banning IP: ${ip}`);
blockedAddresses.push(ip); blockedAddresses.push(ip);
res.status(403).send(forbiddenMessage); res.status(403).send(forbiddenMessage);
@ -51,19 +51,19 @@ const autoblockPublishMiddleware = (req, res, next) => {
} else { } else {
next(); next();
} }
} };
const autoblockPublishBodyMiddleware = (req, res, next) => { const autoblockPublishBodyMiddleware = (req, res, next) => {
if(req.body && publishingChannelWhitelist) { if (req.body && publishingChannelWhitelist) {
let ip = (req.headers['x-forwarded-for'] || req.connection.remoteAddress).split(/,\s?/)[0]; let ip = (req.headers['x-forwarded-for'] || req.connection.remoteAddress).split(/,\s?/)[0];
const { channelName } = req.body; const { channelName } = req.body;
if(channelName && publishingChannelWhitelist.indexOf(channelName) !== -1) { if (channelName && publishingChannelWhitelist.indexOf(channelName.toLowerCase()) !== -1) {
delete ipCounts[ip]; delete ipCounts[ip];
} }
} }
next(); next();
} };
module.exports = { module.exports = {
autoblockPublishMiddleware, autoblockPublishMiddleware,

View file

@ -2,39 +2,39 @@ const logger = require('winston');
const db = require('../models'); const db = require('../models');
const httpContext = require('express-http-context'); const httpContext = require('express-http-context');
function logMetricsMiddleware(req, res, next) { function logMetricsMiddleware (req, res, next) {
res.on('finish', () => { res.on('finish', () => {
const userAgent = req.get('user-agent'); const userAgent = req.get('user-agent');
const routePath = httpContext.get('routePath'); const routePath = httpContext.get('routePath');
let referrer = req.get('referrer'); let referrer = req.get('referrer');
if(referrer && referrer.length > 255) { if (referrer && referrer.length > 255) {
try { try {
// Attempt to "safely" clamp long URLs // Attempt to "safely" clamp long URLs
referrer = /(.*?)#.*/.exec(referrer)[1]; referrer = /(.*?)#.*/.exec(referrer)[1];
} catch(e) { } catch (e) {
// Cheap forced string conversion & clamp // Cheap forced string conversion & clamp
referrer = new String(referrer); referrer = String(referrer);
referrer = referrer.substr(0, 255); referrer = referrer.substr(0, 255);
} }
if(referrer.length > 255) { if (referrer.length > 255) {
logger.warn('Request refferer exceeds 255 characters:', referrer); logger.warn('Request refferer exceeds 255 characters:', referrer);
referrer = referrer.substring(0, 255); referrer = referrer.substring(0, 255);
} }
} }
db.Metrics.create({ db.Metrics.create({
time: Date.now(), time : Date.now(),
isInternal: /node\-fetch/.test(userAgent), isInternal: /node-fetch/.test(userAgent),
isChannel: res.isChannel, isChannel : res.isChannel,
claimId: res.claimId, claimId : res.claimId,
routePath: httpContext.get('routePath'), routePath : httpContext.get('routePath'),
params: JSON.stringify(req.params), params : JSON.stringify(req.params),
ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress, ip : req.headers['x-forwarded-for'] || req.connection.remoteAddress,
request: req.url, request : req.url,
routeData: JSON.stringify(httpContext.get('routeData')), routeData : JSON.stringify(httpContext.get('routeData')),
referrer, referrer,
userAgent, userAgent,
}); });
@ -43,7 +43,7 @@ function logMetricsMiddleware(req, res, next) {
next(); next();
} }
function setRouteDataInContextMiddleware(routePath, routeData) { function setRouteDataInContextMiddleware (routePath, routeData) {
return function (req, res, next) { return function (req, res, next) {
httpContext.set('routePath', routePath); httpContext.set('routePath', routePath);
httpContext.set('routeData', routeData); httpContext.set('routeData', routeData);

View file

@ -3,49 +3,49 @@ module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
'Metrics', 'Metrics',
{ {
time: { time: {
type: DATE(6), type : DATE(6),
defaultValue: sequelize.NOW, defaultValue: sequelize.NOW,
}, },
isInternal: { isInternal: {
type: BOOLEAN, type: BOOLEAN,
}, },
isChannel: { isChannel: {
type: BOOLEAN, type : BOOLEAN,
defaultValue: false, defaultValue: false,
}, },
claimId: { claimId: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
ip: { ip: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
request: { request: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
userAgent: { userAgent: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
referrer: { referrer: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
routePath: { routePath: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
params: { params: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
} },
}, },
{ {
freezeTableName: true, freezeTableName: true,
timestamps: false, // don't use default timestamps columns timestamps : false, // don't use default timestamps columns
indexes: [ indexes : [
{ {
fields: ['isInternal', 'isChannel', 'time', 'claimId', 'routePath'], fields: ['isInternal', 'isChannel', 'time', 'claimId', 'routePath'],
}, },

View file

@ -5,43 +5,43 @@ module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
'Trending', 'Trending',
{ {
time: { /* TODO: Historical analysis and log roll */ time: { /* TODO: Historical analysis and log roll */
type: DATE(6), type : DATE(6),
defaultValue: sequelize.NOW, defaultValue: sequelize.NOW,
}, },
isChannel: { isChannel: {
type: BOOLEAN, type : BOOLEAN,
defaultValue: false, defaultValue: false,
}, },
claimId: { claimId: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
publisherId: { publisherId: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
intervalViews: { intervalViews: {
type: INTEGER, type : INTEGER,
defaultValue: 0, defaultValue: 0,
}, },
weight: { weight: {
type: FLOAT, type : FLOAT,
defaultValue: 0, defaultValue: 0,
}, },
zScore: { zScore: {
type: FLOAT, type : FLOAT,
defaultValue: 0, defaultValue: 0,
}, },
pValue: { pValue: {
type: FLOAT, type : FLOAT,
defaultValue: 0, defaultValue: 0,
}, },
// TODO: Calculate t-statistics // TODO: Calculate t-statistics
}, },
{ {
freezeTableName: true, freezeTableName: true,
timestamps: false, // don't use default timestamps columns timestamps : false, // don't use default timestamps columns
indexes: [ indexes : [
{ {
fields: ['claimId'], fields: ['claimId'],
}, },
@ -55,7 +55,7 @@ module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
Trending.getTrendingWeightData = async ({ Trending.getTrendingWeightData = async ({
hours = 2, hours = 2,
minutes = 0, minutes = 0,
limit = 20 limit = 20,
} = {}) => { } = {}) => {
let time = new Date(); let time = new Date();
time.setHours(time.getHours() - hours); time.setHours(time.getHours() - hours);
@ -65,9 +65,9 @@ module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
const selectString = 'DISTINCT(claimId), weight'; const selectString = 'DISTINCT(claimId), weight';
const whereString = `isChannel = false and time > '${sqlTime}'`; const whereString = `isChannel = false and time > '${sqlTime}'`;
const query = `SELECT ${selectString} FROM Trending WHERE ${whereString} ORDER BY weight DESC LIMIT ${limit}` const query = `SELECT ${selectString} FROM Trending WHERE ${whereString} ORDER BY weight DESC LIMIT ${limit}`;
return await sequelize.query(query, { type: sequelize.QueryTypes.SELECT }); return sequelize.query(query, { type: sequelize.QueryTypes.SELECT });
}; };
Trending.getTrendingClaims = async () => { Trending.getTrendingClaims = async () => {
@ -77,7 +77,7 @@ module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
const trendingClaims = trendingWeightData.reduce((claims, trendingData) => { const trendingClaims = trendingWeightData.reduce((claims, trendingData) => {
trendingClaimIds.push(trendingData.claimId); trendingClaimIds.push(trendingData.claimId);
claims[trendingData.claimId] = { claims[trendingData.claimId] = {
...trendingData ...trendingData,
}; };
return claims; return claims;

View file

@ -9,7 +9,7 @@ const getMean = (numArr) => {
let total = 0; let total = 0;
let length = numArr.length; // store local to reduce potential prop lookups let length = numArr.length; // store local to reduce potential prop lookups
for(let i = 0; i < length; i++) { for (let i = 0; i < length; i++) {
total += numArr[i]; total += numArr[i];
} }
@ -28,17 +28,17 @@ const getInformationFromValues = (numArr) => {
return { return {
mean, mean,
standardDeviation: getStandardDeviation(numArr, mean), standardDeviation: getStandardDeviation(numArr, mean),
} };
}; };
const getZScore = (value, mean, sDeviation) => ( sDeviation !== 0 ? (value - mean) / sDeviation : 0 ); const getZScore = (value, mean, sDeviation) => (sDeviation !== 0 ? (value - mean) / sDeviation : 0);
const getFastPValue = (zScore) => { const getFastPValue = (zScore) => {
if(zScore <= MIN_P) { if (zScore <= MIN_P) {
return 0; return 0;
} }
if(zScore >= MAX_P) { if (zScore >= MAX_P) {
return 1; return 1;
} }
let factorialK = 1; let factorialK = 1;
@ -46,8 +46,8 @@ const getFastPValue = (zScore) => {
let sum = 0; let sum = 0;
let term = 1; let term = 1;
while(Math.abs(term) > MAX_P_PRECISION) { while (Math.abs(term) > MAX_P_PRECISION) {
term = ONE_DIV_SQRT_2PI * Math.pow(-1 , k) * Math.pow(zScore , k) / (2 * k + 1) / Math.pow(2 , k) * Math.pow(zScore, k + 1) / factorialK; term = ONE_DIV_SQRT_2PI * Math.pow(-1, k) * Math.pow(zScore, k) / (2 * k + 1) / Math.pow(2, k) * Math.pow(zScore, k + 1) / factorialK;
sum += term; sum += term;
k++; k++;
factorialK *= k; factorialK *= k;
@ -57,7 +57,6 @@ const getFastPValue = (zScore) => {
return sum; return sum;
}; };
const getWeight = (zScore, pValue) => (zScore * pValue); const getWeight = (zScore, pValue) => (zScore * pValue);
module.exports = { module.exports = {

View file

@ -3,30 +3,30 @@ module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
'Views', 'Views',
{ {
time: { time: {
type: DATE(6), type : DATE(6),
defaultValue: sequelize.NOW, defaultValue: sequelize.NOW,
}, },
isChannel: { isChannel: {
type: BOOLEAN, type : BOOLEAN,
defaultValue: false, defaultValue: false,
}, },
claimId: { claimId: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
publisherId: { publisherId: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
ip: { ip: {
type: STRING, type : STRING,
defaultValue: null, defaultValue: null,
}, },
}, },
{ {
freezeTableName: true, freezeTableName: true,
timestamps: false, // don't use default timestamps columns timestamps : false, // don't use default timestamps columns
indexes: [ indexes : [
{ {
fields: ['time', 'isChannel', 'claimId', 'publisherId', 'ip'], fields: ['time', 'isChannel', 'claimId', 'publisherId', 'ip'],
}, },
@ -59,8 +59,8 @@ module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
claimId, claimId,
}, },
distinct: true, distinct: true,
col: 'ip' col : 'ip',
}) });
}; };
return Views; return Views;

View file

@ -26,42 +26,40 @@ const getTorList = require('../../controllers/api/tor');
const getBlockedList = require('../../controllers/api/blocked'); const getBlockedList = require('../../controllers/api/blocked');
const getOEmbedData = require('../../controllers/api/oEmbed'); const getOEmbedData = require('../../controllers/api/oEmbed');
module.exports = { module.exports = {
// homepage routes // homepage routes
'/api/homepage/data/channels': { controller: [ torCheckMiddleware, channelData ] }, '/api/homepage/data/channels' : { controller: [ torCheckMiddleware, channelData ] },
// channel routes // channel routes
'/api/channel/availability/:name': { controller: [ torCheckMiddleware, channelAvailability ] }, '/api/channel/availability/:name' : { controller: [ torCheckMiddleware, channelAvailability ] },
'/api/channel/short-id/:longId/:name': { controller: [ torCheckMiddleware, channelShortId ] }, '/api/channel/short-id/:longId/:name' : { controller: [ torCheckMiddleware, channelShortId ] },
'/api/channel/data/:channelName/:channelClaimId': { controller: [ torCheckMiddleware, channelData ] }, '/api/channel/data/:channelName/:channelClaimId' : { controller: [ torCheckMiddleware, channelData ] },
'/api/channel/claims/:channelName/:channelClaimId/:page': { controller: [ torCheckMiddleware, channelClaims ] }, '/api/channel/claims/:channelName/:channelClaimId/:page': { controller: [ torCheckMiddleware, channelClaims ] },
// sepcial routes // sepcial routes
'/api/special/:name/:page': { controller: [ torCheckMiddleware, specialClaims ] }, '/api/special/:name/:page': { controller: [ torCheckMiddleware, specialClaims ] },
// claim routes // claim routes
'/api/claim/availability/:name': { controller: [ torCheckMiddleware, claimAvailability ] }, '/api/claim/availability/:name' : { controller: [ torCheckMiddleware, claimAvailability ] },
'/api/claim/data/:claimName/:claimId': { controller: [ torCheckMiddleware, claimData ] }, '/api/claim/data/:claimName/:claimId' : { controller: [ torCheckMiddleware, claimData ] },
'/api/claim/get/:name/:claimId': { controller: [ torCheckMiddleware, claimGet ] }, '/api/claim/get/:name/:claimId' : { controller: [ torCheckMiddleware, claimGet ] },
'/api/claim/list/:name': { controller: [ torCheckMiddleware, claimList ] }, '/api/claim/list/:name' : { controller: [ torCheckMiddleware, claimList ] },
'/api/claim/long-id': { method: 'post', controller: [ torCheckMiddleware, claimLongId ] }, // note: should be a 'get' '/api/claim/long-id' : { method: 'post', controller: [ torCheckMiddleware, claimLongId ] }, // note: should be a 'get'
'/api/claim/publish': { method: 'post', controller: [ torCheckMiddleware, autoblockPublishMiddleware, multipartMiddleware, autoblockPublishBodyMiddleware, claimPublish ] }, '/api/claim/publish' : { method: 'post', controller: [ torCheckMiddleware, autoblockPublishMiddleware, multipartMiddleware, autoblockPublishBodyMiddleware, claimPublish ] },
'/api/claim/update': { method: 'post', controller: [ torCheckMiddleware, multipartMiddleware, claimUpdate ] }, '/api/claim/update' : { method: 'post', controller: [ torCheckMiddleware, multipartMiddleware, claimUpdate ] },
'/api/claim/abandon': { method: 'post', controller: [ torCheckMiddleware, multipartMiddleware, claimAbandon ] }, '/api/claim/abandon' : { method: 'post', controller: [ torCheckMiddleware, multipartMiddleware, claimAbandon ] },
'/api/claim/resolve/:name/:claimId': { controller: [ torCheckMiddleware, claimResolve ] }, '/api/claim/resolve/:name/:claimId' : { controller: [ torCheckMiddleware, claimResolve ] },
'/api/claim/short-id/:longId/:name': { controller: [ torCheckMiddleware, claimShortId ] }, '/api/claim/short-id/:longId/:name' : { controller: [ torCheckMiddleware, claimShortId ] },
'/api/claim/views/:claimId': { controller: [ torCheckMiddleware, claimViews ] }, '/api/claim/views/:claimId' : { controller: [ torCheckMiddleware, claimViews ] },
// file routes // file routes
'/api/file/availability/:name/:claimId': { controller: [ torCheckMiddleware, fileAvailability ] }, '/api/file/availability/:name/:claimId': { controller: [ torCheckMiddleware, fileAvailability ] },
// user routes // user routes
'/api/user/password/': { method: 'put', controller: [ torCheckMiddleware, userPassword ] }, '/api/user/password/' : { method: 'put', controller: [ torCheckMiddleware, userPassword ] },
// configs // configs
'/api/config/site/publishing': { controller: [ torCheckMiddleware, publishingConfig ] }, '/api/config/site/publishing' : { controller: [ torCheckMiddleware, publishingConfig ] },
// tor // tor
'/api/tor': { controller: [ torCheckMiddleware, getTorList ] }, '/api/tor' : { controller: [ torCheckMiddleware, getTorList ] },
// blocked // blocked
'/api/blocked': { controller: [ torCheckMiddleware, getBlockedList ] }, '/api/blocked' : { controller: [ torCheckMiddleware, getBlockedList ] },
// open embed // open embed
'/api/oembed': { controller: [ torCheckMiddleware, getOEmbedData ] }, '/api/oembed' : { controller: [ torCheckMiddleware, getOEmbedData ] },
}; };

View file

@ -7,5 +7,5 @@ const Sagas = require('@sagas').default;
module.exports = { module.exports = {
'/:identifier/:claim': { controller: serveByIdentifierAndClaim, action: Actions.onHandleShowPageUri, saga: Sagas.handleShowPageUri }, '/:identifier/:claim': { controller: serveByIdentifierAndClaim, action: Actions.onHandleShowPageUri, saga: Sagas.handleShowPageUri },
'/:claim': { controller: serveByClaim, action: Actions.onHandleShowPageUri, saga: Sagas.handleShowPageUri }, '/:claim' : { controller: serveByClaim, action: Actions.onHandleShowPageUri, saga: Sagas.handleShowPageUri },
}; };

View file

@ -6,7 +6,7 @@ const handleUserRequest = require('../../controllers/auth/user');
module.exports = { module.exports = {
'/signup': { method: 'post', controller: [ speechPassport.authenticate('local-signup'), handleSignupRequest ] }, '/signup': { method: 'post', controller: [ speechPassport.authenticate('local-signup'), handleSignupRequest ] },
'/auth': { method: 'post', controller: handleLoginRequest }, '/auth' : { method: 'post', controller: handleLoginRequest },
'/logout': { controller: handleLogoutRequest }, '/logout': { controller: handleLogoutRequest },
'/user': { controller: handleUserRequest }, '/user' : { controller: handleUserRequest },
}; };

View file

@ -7,15 +7,15 @@ const Actions = require('@actions').default;
const Sagas = require('@sagas').default; const Sagas = require('@sagas').default;
module.exports = { module.exports = {
'/': { controller: handlePageRequest, action: Actions.onHandleShowHomepage, saga: Sagas.handleShowHomepage }, '/' : { controller: handlePageRequest, action: Actions.onHandleShowHomepage, saga: Sagas.handleShowHomepage },
'/login': { controller: handlePageRequest }, '/login' : { controller: handlePageRequest },
'/about': { controller: handlePageRequest }, '/about' : { controller: handlePageRequest },
'/tos': { controller: handlePageRequest }, '/tos' : { controller: handlePageRequest },
'/faq': { controller: handlePageRequest }, '/faq' : { controller: handlePageRequest },
'/trending': { controller: redirect('/popular') }, '/trending' : { controller: redirect('/popular') },
'/popular': { controller: handlePageRequest }, '/popular' : { controller: handlePageRequest },
'/new': { controller: handlePageRequest }, '/new' : { controller: handlePageRequest },
'/edit/:claimId': { controller: handlePageRequest }, '/edit/:claimId' : { controller: handlePageRequest },
'/multisite': { controller: handlePageRequest }, '/multisite' : { controller: handlePageRequest },
'/video-embed/:name/:claimId/:config?': { controller: handleVideoEmbedRequest }, // for twitter '/video-embed/:name/:claimId/:config?': { controller: handleVideoEmbedRequest }, // for twitter
}; };

View file

@ -6,8 +6,8 @@ module.exports = async (data) => {
const certificateId = data.publisher_id || data.certificateId; const certificateId = data.publisher_id || data.certificateId;
let channelName = data.channelName; let channelName = data.channelName;
if(certificateId && !channelName) { if (certificateId && !channelName) {
channelName = await chainquery.claim.queries.getClaimChannelName(certificateId).catch(()=>{}); channelName = await chainquery.claim.queries.getClaimChannelName(certificateId).catch(() => {});
} }
let channelShortId = null; let channelShortId = null;
@ -16,17 +16,17 @@ module.exports = async (data) => {
} }
return ({ return ({
name: data.name, name : data.name,
title: data.title, title : data.title,
certificateId, certificateId,
channelName, channelName,
channelShortId, channelShortId,
contentType: data.content_type || data.contentType, contentType: data.content_type || data.contentType,
claimId: data.claim_id || data.claimId, claimId : data.claim_id || data.claimId,
fileExt: data.generated_extension || data.fileExt, fileExt : data.generated_extension || data.fileExt,
description: data.description, description: data.description,
thumbnail: data.generated_thumbnail || data.thumbnail_url || data.thumbnail, thumbnail : data.generated_thumbnail || data.thumbnail_url || data.thumbnail,
outpoint: data.transaction_hash_id || data.outpoint, outpoint : data.transaction_hash_id || data.outpoint,
host, host,
}) });
} };

View file

@ -1,6 +1,6 @@
module.exports = function(req) { module.exports = function (req) {
let reqIp = req.connection.remoteAddress; let reqIp = req.connection.remoteAddress;
let host = req.get('host'); let host = req.get('host');
return reqIp === '127.0.0.1' || reqIp === '::ffff:127.0.0.1' || reqIp === '::1' || host.indexOf('localhost') !== -1; return reqIp === '127.0.0.1' || reqIp === '::ffff:127.0.0.1' || reqIp === '::1' || host.indexOf('localhost') !== -1;
} };

View file

@ -13,7 +13,7 @@ module.exports = async () => {
const claims = await db.Trending.getTrendingClaims(); const claims = await db.Trending.getTrendingClaims();
const claimViews = await db.Views.getUniqueViews(); const claimViews = await db.Views.getUniqueViews();
if(claimViews.length <= 1) { if (claimViews.length <= 1) {
return; return;
} }
@ -26,7 +26,7 @@ module.exports = async () => {
standardDeviation, standardDeviation,
} = getInformationFromValues(viewsNumArray); } = getInformationFromValues(viewsNumArray);
for(let i = 0; i < claimViews.length; i++) { for (let i = 0; i < claimViews.length; i++) {
let claimViewsEntry = claimViews[i]; let claimViewsEntry = claimViews[i];
const { const {
@ -41,9 +41,9 @@ module.exports = async () => {
const trendingData = { const trendingData = {
time, time,
isChannel: claimViewsEntry.isChannel, isChannel : claimViewsEntry.isChannel,
claimId: claimViewsEntry.claimId, claimId : claimViewsEntry.claimId,
publisherId: claimViewsEntry.publisherId, publisherId : claimViewsEntry.publisherId,
intervalViews: claimViewsEntry.views, intervalViews: claimViewsEntry.views,
weight, weight,
zScore, zScore,
@ -52,7 +52,7 @@ module.exports = async () => {
db.Trending.create(trendingData); db.Trending.create(trendingData);
} }
} catch(e) { } catch (e) {
logger.error('Error processing trending content:', e); logger.error('Error processing trending content:', e);
} }
} };

8288
yarn.lock

File diff suppressed because it is too large Load diff