Merge pull request #947 from lbryio/staging

cuts antimedia-staging from staging
This commit is contained in:
jessopb 2019-03-01 21:17:54 -05:00 committed by GitHub
commit 07bfba4b45
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
39 changed files with 1194 additions and 729 deletions

View file

@ -47,6 +47,7 @@ For a closed, custom-hosted and branded example, check out https://lbry.theantim
- `./lbrynet account_balance` gets your balance (initially 0.0) - `./lbrynet account_balance` gets your balance (initially 0.0)
- `./lbrynet address_list` gets addresses you can use to recieve LBC - `./lbrynet address_list` gets addresses you can use to recieve LBC
- [FFmpeg](https://www.ffmpeg.org/download.html) - [FFmpeg](https://www.ffmpeg.org/download.html)
- [ImageMagick](https://packages.ubuntu.com/xenial/graphics/imagemagick)
- Spee.ch (below) - Spee.ch (below)
- pm2 (optional) process manager such as pm2 to run speech server.js - pm2 (optional) process manager such as pm2 to run speech server.js
- http proxy server e.g. caddy, nginx, or traefik, to forward 80/443 to speech port 3000 - http proxy server e.g. caddy, nginx, or traefik, to forward 80/443 to speech port 3000
@ -118,7 +119,7 @@ Instructions are coming at [lbry-docker] to install your own chainquery instance
## Settings ## Settings
There are a number of settings available for customizing the behavior of your installation. There are a number of settings available for customizing the behavior of your installation.
_INSERT LINK TO SETTINGS.MD_ [Here](https://github.com/lbryio/spee.ch/blob/master/docs/settings.md) is some documentation on them.
## API ## API
@ -260,9 +261,11 @@ Spee.ch has a few types of URL formats that return different assets from the LBR
- retrieve the controlling `LBRY` claim: - retrieve the controlling `LBRY` claim:
- https://spee.ch/`claim` - https://spee.ch/`claim`
- https://spee.ch/`claim`.`ext` (serve) - https://spee.ch/`claim`.`ext` (serve)
- https://spee.ch/`claim`.`ext`&`querystring` (serve transformed)
- retrieve a specific `LBRY` claim: - retrieve a specific `LBRY` claim:
- https://spee.ch/`claim_id`/`claim` - https://spee.ch/`claim_id`/`claim`
- https://spee.ch/`claim_id`/`claim`.`ext` (serve) - https://spee.ch/`claim_id`/`claim`.`ext` (serve)
- https://spee.ch/`claim_id`/`claim`.`ext`&`querystring` (serve transformed)
- retrieve all contents for the controlling `LBRY` channel - retrieve all contents for the controlling `LBRY` channel
- https://spee.ch/`@channel` - https://spee.ch/`@channel`
- a specific `LBRY` channel - a specific `LBRY` channel
@ -270,9 +273,15 @@ Spee.ch has a few types of URL formats that return different assets from the LBR
- retrieve a specific claim within the controlling `LBRY` channel - retrieve a specific claim within the controlling `LBRY` channel
- https://spee.ch/`@channel`/`claim` - https://spee.ch/`@channel`/`claim`
- https://spee.ch/`@channel`/`claim`.`ext` (serve) - https://spee.ch/`@channel`/`claim`.`ext` (serve)
- https://spee.ch/`@channel`/`claim`.`ext`&`querystring` (serve)
- retrieve a specific claim within a specific `LBRY` channel - retrieve a specific claim within a specific `LBRY` channel
- https://spee.ch/`@channel`:`channel_id`/`claim` - https://spee.ch/`@channel`:`channel_id`/`claim`
- https://spee.ch/`@channel`:`channel_id`/`claim`.`ext` (serve) - https://spee.ch/`@channel`:`channel_id`/`claim`.`ext` (serve)
- https://spee.ch/`@channel`:`channel_id`/`claim`.`ext`&`querystring` (serve)
- `querystring` can include the following transformation values separated by `&`
- h=`number` (defines height)
- w=`number` (defines width)
- t=`crop` or `stretch` (defines transformation - missing implies constrained proportions)
### Dependencies ### Dependencies

0
changelog.md Normal file
View file

View file

@ -46,12 +46,13 @@
"customByContentType": { "customByContentType": {
"application/octet-stream": 50000000 "application/octet-stream": 50000000
} }
}, }
"maxSizeImage": 10000000,
"maxSizeGif": 50000000,
"maxSizeVideo": 50000000
}, },
"serving": { "serving": {
"dynamicFileSizing": {
"enabled": true,
"maxDimension": 2000
},
"markdownSettings": { "markdownSettings": {
"skipHtmlMain": true, "skipHtmlMain": true,
"escapeHtmlMain": true, "escapeHtmlMain": true,
@ -86,24 +87,21 @@
"code", "code",
"html", "html",
"parsedHtml" "parsedHtml"
], ]
"disallowedTypesMain": [],
"disallowedTypesDescriptions": ["image", "html"],
"disallowedTypesExample": ["image", "html"]
}, },
"customFileExtensions": { "customFileExtensions": {
"application/x-troff-man": ".man", "application/x-troff-man": "man",
"application/x-troff-me": ".me", "application/x-troff-me": "me",
"application/x-mif": ".mif", "application/x-mif": "mif",
"application/x-troff-ms": ".ms", "application/x-troff-ms": "ms",
"application/x-troff": ".roff", "application/x-troff": "roff",
"application/x-python-code": ".pyc", "application/x-python-code": "pyc",
"text/x-python": ".py", "text/x-python": "py",
"application/x-pn-realaudio": ".ram", "application/x-pn-realaudio": "ram",
"application/x-sgml": ".sgm", "application/x-sgml": "sgm",
"model/stl": ".stl", "model/stl": "stl",
"image/pict": ".pct", "image/pict": "pct",
"text/xul": ".xul", "text/xul": "xul",
"text/x-go": "go" "text/x-go": "go"
} }
}, },

View file

@ -8,7 +8,7 @@
height: 280px; height: 280px;
&:hover { &:hover {
border: 1px solid $highlight-border-color; border: 1px solid $highlight-border-color;
color: #000000; color: $primary-color;
} }
} }

View file

@ -8,6 +8,7 @@ input {
border: 0; border: 0;
background-color: $background-color; background-color: $background-color;
display: inline-block; display: inline-block;
color: $text-color
} }
.input-slider { .input-slider {

View file

@ -9,7 +9,6 @@ a, a:visited {
.link--nav { .link--nav {
color: $text-color; color: $text-color;
border-bottom: 2px solid white;
&:hover { &:hover {
color: $primary-color; color: $primary-color;
} }
@ -18,4 +17,4 @@ a, a:visited {
.link--nav-active { .link--nav-active {
border-bottom: 2px solid $primary-color; border-bottom: 2px solid $primary-color;
} }

View file

@ -1,15 +1,24 @@
.nav-bar { .nav-bar {
box-sizing: border-box; box-sizing: border-box;
padding: $thin-padding $primary-padding; padding: $thin-padding $primary-padding;
background: $base-color; background: $chrome-color;
flex: 0 1 auto; flex: 0 1 auto;
width: 100%; width: 100%;
border-bottom: $subtle-border; border-bottom: $subtle-border;
color: $primary-color;
@media (max-width: $break-point-mobile) { @media (max-width: $break-point-mobile) {
margin-left: 15px; margin-left: 15px;
margin-right: 15px; margin-right: 15px;
} }
input {
background: $chrome-color;
}
select {
background: $chrome-color;
color: $text-color;
}
} }
.nav-bar-link { .nav-bar-link {

View file

@ -1,6 +1,7 @@
select { select {
margin: 0; margin: 0;
display: inline-block; display: inline-block;
background: $base-color; background: $background-color;
border: 0; border: 0;
color: $text-color;
} }

View file

@ -1,7 +1,7 @@
//backgrounds //backgrounds
$base-color: white; //default white $base-color: white; //default white
$card-color: white; //default white $card-color: white; //default white
$chrome-color: lightgray; //default white (navbar) $chrome-color: white; //default white (navbar)
$blockquote-background: #EEEEFF; $blockquote-background: #EEEEFF;
$background-color: $base-color; $background-color: $base-color;
@ -19,7 +19,7 @@ $blockquote-text: $text-color;
$grey: #9095A5; $grey: #9095A5;
$help-color: $grey; $help-color: $grey;
$subtle-border-color: #DDD; $subtle-border-color: #DDD;
$highlight-border-color: #333; $highlight-border-color: #777;
$shadow-color: rgba(169, 173, 186, 0.2); $shadow-color: rgba(169, 173, 186, 0.2);
$subtle-border: 1px dashed $subtle-border-color; $subtle-border: 1px dashed $subtle-border-color;
$grey-border: $subtle-border-color; //factor this out for all customers $grey-border: $subtle-border-color; //factor this out for all customers

View file

@ -7,9 +7,6 @@ const ChannelSelectDropdown = ({ selectedChannel, handleSelection, loggedInChann
id='channel-name-select' id='channel-name-select'
value={selectedChannel} value={selectedChannel}
onChange={handleSelection}> onChange={handleSelection}>
{ loggedInChannelName && (
<option value={loggedInChannelName} >{loggedInChannelName}</option>
)}
<option value={LOGIN}>Existing</option> <option value={LOGIN}>Existing</option>
<option value={CREATE}>New</option> <option value={CREATE}>New</option>
</select> </select>

View file

@ -1,23 +1,28 @@
import React from 'react'; import React from 'react';
import RowLabeled from '@components/RowLabeled'; import RowLabeled from '@components/RowLabeled';
import Label from '@components/Label'; import Label from '@components/Label';
import { LICENSES } from '@clientConstants/publish_license_urls';
const PublishLicenseInput = ({ handleSelect }) => { const PublishLicenseInput = ({ handleSelect, license }) => {
return ( return (
<RowLabeled <RowLabeled
label={ label={
<Label value={'License:'} /> <Label value={'License'} />
} }
content={ content={
<select <select
type='text' type='text'
name='license' name='license'
id='publish-license' id='publish-license'
value={license}
onChange={handleSelect} onChange={handleSelect}
> >
<option value=''>Unspecified</option> <option value=''>Unspecified</option>
<option value='Public Domain'>Public Domain</option> {
<option value='Creative Commons'>Creative Commons</option> LICENSES.map(function(item, i){
return <option key={item + 'license key'} value={item}>{item}</option>;
})
}
</select> </select>
} }
/> />

View file

@ -0,0 +1,32 @@
import React from 'react';
import RowLabeled from '@components/RowLabeled';
import Label from '@components/Label';
import { CC_LICENSES } from '@clientConstants/publish_license_urls';
const PublishLicenseUrlInput = ({ handleSelect, licenseUrl }) => {
return (
<RowLabeled
label={
<Label value={'License Url'} />
}
content={
<select
type='text'
name='licenseUrl'
id='publish-license-url'
value={licenseUrl}
onChange={handleSelect}
>
<option value=''>Unspecified</option>
{
CC_LICENSES.map(function(item, i){
return <option key={item.url} value={item.url}>{item.value}</option>
})
}
</select>
}
/>
);
};
export default PublishLicenseUrlInput;

View file

@ -0,0 +1,33 @@
export const CC_LICENSES = [
{
value: 'CC Attr. 4.0 Int',
url: 'https://creativecommons.org/licenses/by/4.0/legalcode',
},
{
value: 'CC Attr-ShareAlike 4.0 Int',
url: 'https://creativecommons.org/licenses/by-sa/4.0/legalcode',
},
{
value: 'CC Attr-NoDerivatives 4.0 Int',
url: 'https://creativecommons.org/licenses/by-nd/4.0/legalcode',
},
{
value: 'CC Attr-NonComm 4.0 Int',
url: 'https://creativecommons.org/licenses/by-nc/4.0/legalcode',
},
{
value: 'CC Attr-NonComm-ShareAlike 4.0 Int',
url: 'https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode',
},
{
value: 'CC Attr-NonComm-NoDerivatives 4.0 Int',
url: 'https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode',
},
];
export const LICENSES = ['Public Domain', 'Other', 'Copyright', 'Creative Commons'];
export const PUBLIC_DOMAIN = 'Public Domain';
export const OTHER = 'other';
export const COPYRIGHT = 'copyright';
export const CREATIVE_COMMONS = 'Creative Commons';

View file

@ -7,7 +7,7 @@ import AssetShareButtons from '@components/AssetShareButtons';
import ClickToCopy from '@components/ClickToCopy'; import ClickToCopy from '@components/ClickToCopy';
import siteConfig from '@config/siteConfig.json'; import siteConfig from '@config/siteConfig.json';
import createCanonicalLink from '@globalutils/createCanonicalLink'; import createCanonicalLink from '@globalutils/createCanonicalLink';
import AssetInfoFooter from '../../components/AssetInfoFooter/index'; import AssetInfoFooter from '@components/AssetInfoFooter/index';
import { createPermanentURI } from '@clientutils/createPermanentURI'; import { createPermanentURI } from '@clientutils/createPermanentURI';
import ReactMarkdown from 'react-markdown'; import ReactMarkdown from 'react-markdown';
@ -18,7 +18,20 @@ class AssetInfo extends React.Component {
render () { render () {
const { editable, asset } = this.props; const { editable, asset } = this.props;
const { claimViews, claimData } = asset; const { claimViews, claimData } = asset;
const { channelName, claimId, channelShortId, description, name, fileExt, contentType, host, certificateId } = claimData; const {
channelName,
claimId,
channelShortId,
description,
name,
fileExt,
contentType,
host,
certificateId,
license,
licenseUrl,
transactionTime
} = claimData;
const canonicalUrl = createCanonicalLink({ asset: { ...claimData, shortId: asset.shortId }}); const canonicalUrl = createCanonicalLink({ asset: { ...claimData, shortId: asset.shortId }});
const assetCanonicalUrl = `${host}${canonicalUrl}`; const assetCanonicalUrl = `${host}${canonicalUrl}`;
@ -55,7 +68,7 @@ class AssetInfo extends React.Component {
{editable && ( {editable && (
<RowLabeled <RowLabeled
label={<Label value={'Edit'} />} label={<Label value={'Edit'} />}
content={<Link to={`/edit${canonicalUrl}`}>{name}</Link>} content={<Link className='link--primary' to={`/edit${canonicalUrl}`}>{name}</Link>}
/> />
)} )}
{channelName && ( {channelName && (
@ -71,19 +84,35 @@ class AssetInfo extends React.Component {
} }
/> />
)} )}
{claimViews ? ( <SpaceBetween>
<RowLabeled {claimViews ? (
label={ <RowLabeled
<Label value={'Views'} /> label={
} <Label value={'Views'} />
content={ }
<span className='text'> content={
{claimViews} <span className='text'>
</span> {claimViews}
} </span>
/> }
) : null} />
) : null}
{license && (
<RowLabeled
label={
<Label value={'License'} />
}
content={
<div className='text'>
{licenseUrl ? (
<a className={'link--primary'} href={licenseUrl} target={'_blank'}>{license}</a>
) : (
<span>{license}</span> )}
</div>
}
/>
)}
</SpaceBetween>
<RowLabeled <RowLabeled
label={ label={
<Label value={'Share'} /> <Label value={'Share'} />
@ -151,7 +180,7 @@ class AssetInfo extends React.Component {
<a <a
className={'link--primary'} className={'link--primary'}
href={`${assetCanonicalUrl}.${fileExt}`} href={`${assetCanonicalUrl}.${fileExt}`}
download={name} download={`${name}.${fileExt}`}
> >
Download Download
</a> </a>

View file

@ -1,14 +1,15 @@
import {connect} from 'react-redux'; import { connect } from 'react-redux';
import {updateMetadata, toggleMetadataInputs} from '../../actions/publish'; import { updateMetadata, toggleMetadataInputs } from '../../actions/publish';
import View from './view'; import View from './view';
const mapStateToProps = ({ publish }) => { const mapStateToProps = ({ publish }) => {
return { return {
showMetadataInputs: publish.showMetadataInputs, showMetadataInputs: publish.showMetadataInputs,
description : publish.metadata.description, description: publish.metadata.description,
license : publish.metadata.license, license: publish.metadata.license,
nsfw : publish.metadata.nsfw, licenseUrl: publish.metadata.licenseUrl,
isUpdate : publish.isUpdate, nsfw: publish.metadata.nsfw,
isUpdate: publish.isUpdate,
}; };
}; };
@ -17,10 +18,13 @@ const mapDispatchToProps = dispatch => {
onMetadataChange: (name, value) => { onMetadataChange: (name, value) => {
dispatch(updateMetadata(name, value)); dispatch(updateMetadata(name, value));
}, },
onToggleMetadataInputs: (value) => { onToggleMetadataInputs: value => {
dispatch(toggleMetadataInputs(value)); dispatch(toggleMetadataInputs(value));
}, },
}; };
}; };
export default connect(mapStateToProps, mapDispatchToProps)(View); export default connect(
mapStateToProps,
mapDispatchToProps
)(View);

View file

@ -1,9 +1,9 @@
import React from 'react'; import React from 'react';
import PublishDescriptionInput from '@components/PublishDescriptionInput'; import PublishDescriptionInput from '@components/PublishDescriptionInput';
import PublishLicenseInput from '@components/PublishLicenseInput'; import PublishLicenseInput from '@components/PublishLicenseInput';
import PublishLicenseUrlInput from '@components/PublishLicenseUrlInput';
import PublishNsfwInput from '@components/PublishNsfwInput'; import PublishNsfwInput from '@components/PublishNsfwInput';
import ButtonSecondary from '@components/ButtonSecondary'; import ButtonSecondary from '@components/ButtonSecondary';
import Row from '@components/Row';
class PublishMetadataInputs extends React.Component { class PublishMetadataInputs extends React.Component {
constructor (props) { constructor (props) {
@ -25,22 +25,32 @@ class PublishMetadataInputs extends React.Component {
const name = event.target.name; const name = event.target.name;
const selectedOption = event.target.selectedOptions[0].value; const selectedOption = event.target.selectedOptions[0].value;
this.props.onMetadataChange(name, selectedOption); this.props.onMetadataChange(name, selectedOption);
if (name === 'license' && selectedOption !== 'Creative Commons'){
this.props.onMetadataChange('licenseUrl', '');
}
} }
render () { render () {
const { showMetadataInputs, description, isUpdate, nsfw } = this.props; const { showMetadataInputs, description, isUpdate, nsfw, license, licenseUrl } = this.props;
return ( return (
<div> <div>
{(showMetadataInputs || isUpdate) && ( {(showMetadataInputs || isUpdate) && (
<React.Fragment> <React.Fragment>
<PublishDescriptionInput <PublishDescriptionInput
description={this.props.description} description={description}
handleInput={this.handleInput} handleInput={this.handleInput}
/> />
<PublishLicenseInput <PublishLicenseInput
handleSelect={this.handleSelect} handleSelect={this.handleSelect}
license={license}
/> />
{ (this.props.license === 'Creative Commons') && (
<PublishLicenseUrlInput
handleSelect={this.handleSelect}
licenseUrl={licenseUrl}
/>
)}
<PublishNsfwInput <PublishNsfwInput
nsfw={this.props.nsfw} nsfw={nsfw}
handleInput={this.handleInput} handleInput={this.handleInput}
/> />
</React.Fragment> </React.Fragment>

View file

@ -9,7 +9,7 @@ class EditPage extends React.Component {
onHandleShowPageUri(match.params); onHandleShowPageUri(match.params);
setUpdateTrue(); setUpdateTrue();
if (asset) { if (asset) {
['title', 'description', 'license', 'nsfw'].forEach(meta => updateMetadata(meta, asset.claimData[meta])); ['title', 'description', 'license', 'licenseUrl', 'nsfw'].forEach(meta => updateMetadata(meta, asset.claimData[meta]));
} }
setHasChanged(false); setHasChanged(false);
} }

View file

@ -19,40 +19,42 @@ if (siteConfig) {
// create initial state // create initial state
const initialState = { const initialState = {
disabled : disabledConfig, disabled: disabledConfig,
disabledMessage : disabledMessageConfig, disabledMessage: disabledMessageConfig,
publishInChannel : false, publishInChannel: false,
selectedChannel : LOGIN, selectedChannel: LOGIN,
showMetadataInputs: false, showMetadataInputs: false,
status : { status: {
status : null, status: null,
message: null, message: null,
}, },
error: { error: {
file : null, file: null,
url : null, url: null,
channel: null, channel: null,
}, },
file : null, file: null,
claim : '', claim: '',
metadata: { metadata: {
title : '', title: '',
description: '', description: '',
license : '', license: '',
nsfw : false, licenseUrl: '',
nsfw: false,
}, },
isUpdate : false, isUpdate: false,
hasChanged: false, hasChanged: false,
thumbnail : null, thumbnail: null,
thumbnailChannel, thumbnailChannel,
thumbnailChannelId, thumbnailChannelId,
}; };
export default function (state = initialState, action) { export default function(state = initialState, action) {
switch (action.type) { switch (action.type) {
case actions.FILE_SELECTED: case actions.FILE_SELECTED:
return Object.assign({}, state.isUpdate ? state : initialState, { // note: clears to initial state return Object.assign({}, state.isUpdate ? state : initialState, {
file : action.data, // note: clears to initial state
file: action.data,
hasChanged: true, hasChanged: true,
}); });
case actions.FILE_CLEAR: case actions.FILE_CLEAR:
@ -66,13 +68,13 @@ export default function (state = initialState, action) {
}); });
case actions.CLAIM_UPDATE: case actions.CLAIM_UPDATE:
return Object.assign({}, state, { return Object.assign({}, state, {
claim : action.data, claim: action.data,
hasChanged: true, hasChanged: true,
}); });
case actions.SET_PUBLISH_IN_CHANNEL: case actions.SET_PUBLISH_IN_CHANNEL:
return Object.assign({}, state, { return Object.assign({}, state, {
publishInChannel: action.channel, publishInChannel: action.channel,
hasChanged : true, hasChanged: true,
}); });
case actions.PUBLISH_STATUS_UPDATE: case actions.PUBLISH_STATUS_UPDATE:
return Object.assign({}, state, { return Object.assign({}, state, {
@ -96,7 +98,7 @@ export default function (state = initialState, action) {
case actions.THUMBNAIL_NEW: case actions.THUMBNAIL_NEW:
return { return {
...state, ...state,
thumbnail : action.data, thumbnail: action.data,
hasChanged: true, hasChanged: true,
}; };
case actions.SET_UPDATE_TRUE: case actions.SET_UPDATE_TRUE:
@ -112,4 +114,4 @@ export default function (state = initialState, action) {
default: default:
return state; return state;
} }
}; }

View file

@ -1,9 +1,16 @@
export const createPublishMetadata = (claim, { type }, { title, description, license, nsfw }, publishInChannel, selectedChannel) => { export const createPublishMetadata = (
claim,
{ type },
{ title, description, license, licenseUrl, nsfw },
publishInChannel,
selectedChannel
) => {
let metadata = { let metadata = {
name: claim, name: claim,
title, title,
description, description,
license, license,
licenseUrl,
nsfw, nsfw,
type, type,
}; };

View file

@ -26,7 +26,6 @@ PUBLISHING:
"primaryClaimAddress": null, - generally supplied by your lbrynet sdk "primaryClaimAddress": null, - generally supplied by your lbrynet sdk
"uploadDirectory": "/home/lbry/Uploads", - lbrynet sdk will know your uploads are here "uploadDirectory": "/home/lbry/Uploads", - lbrynet sdk will know your uploads are here
"lbrynetHome": "/home/lbry",
"thumbnailChannel": null, - when publishing non-image content, thumbnails will go here. "thumbnailChannel": null, - when publishing non-image content, thumbnails will go here.
"thumbnailChannelId": null, "thumbnailChannelId": null,
"additionalClaimAddresses": [], "additionalClaimAddresses": [],
@ -39,54 +38,63 @@ PUBLISHING:
"publishingChannelWhitelist": [], "publishingChannelWhitelist": [],
"channelClaimBidAmount": "0.1", - When creating a channel, how much you deposit to control the name "channelClaimBidAmount": "0.1", - When creating a channel, how much you deposit to control the name
"fileClaimBidAmount": "0.01", - When publishing content, how much you deposit to control the name "fileClaimBidAmount": "0.01", - When publishing content, how much you deposit to control the name
"maxSizeImage": 10000000, - You may not want people uploading 50GB files. 1000000 = 1MB "fileSizeLimits": {
"maxSizeGif": 50000000, "image": 50000000,
"maxSizeVideo": 50000000 "video": 50000000,
"audio": 50000000,
"text": 10000000,
"model": 50000000,
"application": 500000000,
"customByContentType": {
"application/octet-stream": 50000000
}
}
SERVING: SERVING:
"markdownSettings": { "dynamicFileSizing": {
"skipHtmlMain": true, - false: render html, in a somewhat unpredictable way~ "enabled": false, - if you choose to allow your instance to serve transform images
"escapeHtmlMain": true, - true: rather than render html, escape it and print it visibly "maxDimension": 2000 - the maximum size you allow transform to scale
"skipHtmlDescriptions": true, - as above, for descriptions },
"escapeHtmlDescriptions": true, - as above, for descriptions "markdownSettings": {
"allowedTypesMain": [], - markdown rendered as main content "skipHtmlMain": true, - false: render html, in a somewhat unpredictable way~
"allowedTypesDescriptions": [], - markdown rendered in description in content details "escapeHtmlMain": true, - true: rather than render html, escape it and print it visibly
"allowedTypesExample": [ - here are examples of allowed types "skipHtmlDescriptions": true, - as above, for descriptions
"see react-markdown docs", `https://github.com/rexxars/react-markdown` "escapeHtmlDescriptions": true, - as above, for descriptions
"root", "allowedTypesMain": [], - markdown rendered as main content
"text", "allowedTypesDescriptions": [], - markdown rendered in description in content details
"break", "allowedTypesExample": [ - here are examples of allowed types
"paragraph", "see react-markdown docs", `https://github.com/rexxars/react-markdown`
"emphasis", "root",
"strong", "text",
"thematicBreak", "break",
"blockquote", "paragraph",
"delete", "emphasis",
"link", "strong",
"image", - you may not have a lot of control over how these are rendered "thematicBreak",
"linkReference", "blockquote",
"imageReference", "delete",
"table", "link",
"tableHead", "image", - you may not have a lot of control over how these are rendered
"tableBody", "linkReference",
"tableRow", "imageReference",
"tableCell", "table",
"list", "tableHead",
"listItem", "tableBody",
"heading", "tableRow",
"inlineCode", "tableCell",
"code", "list",
"html", - potentially DANGEROUS, intended for `serveOnlyApproved = true` environments, includes iframes, divs. "listItem",
"parsedHtml" "heading",
], "inlineCode",
"disallowedTypesMain": [], - not implemented "code",
"disallowedTypesDescriptions": ["image", "html"], - not implemented "html", - potentially DANGEROUS, intended for `serveOnlyApproved = true` environments, includes iframes, divs.
"disallowedTypesExample": ["image", "html"] - not implemented "parsedHtml"
}, ],
"customFileExtensions": { - suggest a file extension for experimental content types you may be publishing },
"application/example-type": "example" "customFileExtensions": { - suggest a file extension for experimental content types you may be publishing
} "application/example-type": "example"
}
STARTUP: STARTUP:

View file

@ -163,7 +163,7 @@ Log in as username@domainname or username@ip_address
`mysql -u root -p` and then entering your_mysql_password should give you the mysql> shell `mysql -u root -p` and then entering your_mysql_password should give you the mysql> shell
# 5 Get Lbrynet Daemon # 5 Get Lbrynet SDK Daemon
## Start tmux ## Start tmux
@ -174,11 +174,34 @@ tmux allows you to run multiple things in different sessions. Useful for manuall
* `tmux`, reenters tmux, then * `tmux`, reenters tmux, then
* `Ctrl+b`, `(` goes back to through sessions * `Ctrl+b`, `(` goes back to through sessions
## Get the daemon ## Get the SDK
`wget -O ~/latest_daemon.zip https://lbry.io/get/lbrynet.linux.zip` `wget -O ~/latest_daemon.zip https://lbry.io/get/lbrynet.linux.zip`
`unzip -o -u ~/latest_daemon.zip` `unzip -o -u ~/latest_daemon.zip`
## Customize SDK settings
These settings will prevent you and your users from spending your server's LBC on paid content. Full documentation is [here](https://lbry.tech/resources/daemon-settings).
~$
`mkdir .lbrynet`
`cd .lbrynet`
`nano daemon_settings.yml`
copy and paste in the following code (Ctrl+Shift V)
```
run_reflector_server: false
disable_max_key_fee: false
max_key_fee: {amount: 0, currency: LBC}
use_upnp: false
auto_re_reflect_interval: 0
```
`CONTROL+O` then `CONTROL+X` to save and exit
## Start the daemon ## Start the daemon
`./lbrynet start` `./lbrynet start`

943
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -46,6 +46,7 @@
"express-http-context": "^1.2.0", "express-http-context": "^1.2.0",
"generate-password": "^1.4.1", "generate-password": "^1.4.1",
"get-video-dimensions": "^1.0.0", "get-video-dimensions": "^1.0.0",
"gm": "^1.23.1",
"helmet": "^3.15.0", "helmet": "^3.15.0",
"image-size": "^0.6.3", "image-size": "^0.6.3",
"inquirer": "^5.2.0", "inquirer": "^5.2.0",

View file

@ -6,7 +6,7 @@ const {
const getterMethods = { const getterMethods = {
generated_extension() { generated_extension() {
logger.info('trying to generate extension', this.content_type); logger.debug('trying to generate extension', this.content_type);
if (customFileExtensions.hasOwnProperty(this.content_type)) { if (customFileExtensions.hasOwnProperty(this.content_type)) {
return customFileExtensions[this.content_type]; return customFileExtensions[this.content_type];
} else { } else {
@ -136,6 +136,14 @@ export default (sequelize, { BOOLEAN, DATE, DECIMAL, ENUM, INTEGER, STRING, TEXT
type: STRING, type: STRING,
set() {}, set() {},
}, },
license: {
type: STRING,
set() {},
},
license_url: {
type: STRING,
set() {},
},
}, },
{ {
freezeTableName: true, freezeTableName: true,

View file

@ -1,8 +1,10 @@
const chainquery = require('chainquery').default; const chainquery = require('chainquery').default;
const logger = require('winston');
const getClaimData = require('server/utils/getClaimData'); const getClaimData = require('server/utils/getClaimData');
const { returnPaginatedChannelClaims } = require('./channelPagination.js'); const { returnPaginatedChannelClaims } = require('./channelPagination.js');
const getChannelClaims = async (channelName, channelLongId, page) => { const getChannelClaims = async (channelName, channelLongId, page) => {
logger.debug(`getChannelClaims: ${channelName}, ${channelLongId}, ${page}`);
let channelShortId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId( let channelShortId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(
channelLongId, channelLongId,
channelName channelName

View file

@ -1,11 +1,11 @@
const { getClaim } = require('../../../../lbrynet'); const { getClaim } = require('server/lbrynet');
const { createFileRecordDataAfterGet } = require('../../../../models/utils/createFileRecordData.js'); const { createFileRecordDataAfterGet } = require('server/models/utils/createFileRecordData.js');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js'); const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const getClaimData = require('server/utils/getClaimData'); const getClaimData = require('server/utils/getClaimData');
const chainquery = require('chainquery').default; const chainquery = require('chainquery').default;
const db = require('../../../../models'); const db = require('server/models');
const waitOn = require('wait-on');
const logger = require('winston'); const logger = require('winston');
const awaitFileSize = require('server/utils/awaitFileSize');
/* /*
@ -36,11 +36,11 @@ const claimGet = async ({ ip, originalUrl, params }, res) => {
if (!claimData) { if (!claimData) {
throw new Error('claim/get: getClaimData failed to get file blobs'); throw new Error('claim/get: getClaimData failed to get file blobs');
} }
await waitOn({ const fileReady = await awaitFileSize(lbrynetResult.outpoint, 10000000, 250, 10000);
resources: [ lbrynetResult.download_path ],
timeout : 10000, // 10 seconds if (fileReady !== 'ready') {
window : 500, throw new Error('claim/get: failed to get file after 10 seconds');
}); }
const fileData = await createFileRecordDataAfterGet(claimData, lbrynetResult); const fileData = await createFileRecordDataAfterGet(claimData, lbrynetResult);
if (!fileData) { if (!fileData) {
throw new Error('claim/get: createFileRecordDataAfterGet failed to create file in time'); throw new Error('claim/get: createFileRecordDataAfterGet failed to create file in time');

View file

@ -1,6 +1,17 @@
const logger = require('winston'); const logger = require('winston');
const { details, publishing } = require('@config/siteConfig'); const { details, publishing } = require('@config/siteConfig');
const createPublishParams = (filePath, name, title, description, license, nsfw, thumbnail, channelName, channelClaimId) => { const createPublishParams = (
filePath,
name,
title,
description,
license,
licenseUrl,
nsfw,
thumbnail,
channelName,
channelClaimId
) => {
// provide defaults for title // provide defaults for title
if (title === null || title.trim() === '') { if (title === null || title.trim() === '') {
title = name; title = name;
@ -11,19 +22,24 @@ const createPublishParams = (filePath, name, title, description, license, nsfw,
} }
// provide default for license // provide default for license
if (license === null || license.trim() === '') { if (license === null || license.trim() === '') {
license = ''; // default to empty string license = ''; // default to empty string
}
// provide default for licenseUrl
if (licenseUrl === null || licenseUrl.trim() === '') {
licenseUrl = ''; // default to empty string
} }
// create the basic publish params // create the basic publish params
const publishParams = { const publishParams = {
name, name,
file_path: filePath, file_path: filePath,
bid : publishing.fileClaimBidAmount, bid: publishing.fileClaimBidAmount,
metadata : { metadata: {
description, description,
title, title,
author : details.title, author: details.title,
language: 'en', language: 'en',
license, license,
licenseUrl,
nsfw, nsfw,
}, },
claim_address: publishing.primaryClaimAddress, claim_address: publishing.primaryClaimAddress,

View file

@ -1,27 +1,28 @@
const logger = require('winston'); const logger = require('winston');
const { details, publishing } = require('@config/siteConfig'); const { details, publishing } = require('@config/siteConfig');
const createThumbnailPublishParams = (thumbnailFilePath, claimName, license, nsfw) => { const createThumbnailPublishParams = (thumbnailFilePath, claimName, license, licenseUrl, nsfw) => {
if (!thumbnailFilePath) { if (!thumbnailFilePath) {
return; return;
} }
logger.debug(`Creating Thumbnail Publish Parameters`); logger.debug(`Creating Thumbnail Publish Parameters`);
// create the publish params // create the publish params
return { return {
name : `${claimName}-thumb`, name: `${claimName}-thumb`,
file_path: thumbnailFilePath, file_path: thumbnailFilePath,
bid : publishing.fileClaimBidAmount, bid: publishing.fileClaimBidAmount,
metadata : { metadata: {
title : `${claimName} thumbnail`, title: `${claimName} thumbnail`,
description: `a thumbnail for ${claimName}`, description: `a thumbnail for ${claimName}`,
author : details.title, author: details.title,
language : 'en', language: 'en',
license, license,
licenseUrl,
nsfw, nsfw,
}, },
claim_address: publishing.primaryClaimAddress, claim_address: publishing.primaryClaimAddress,
channel_name : publishing.thumbnailChannel, channel_name: publishing.thumbnailChannel,
channel_id : publishing.thumbnailChannelId, channel_id: publishing.thumbnailChannelId,
}; };
}; };

View file

@ -1,10 +1,15 @@
const logger = require('winston'); const logger = require('winston');
const { details: { host }, publishing: { disabled, disabledMessage } } = require('@config/siteConfig'); const {
details: { host },
publishing: { disabled, disabledMessage },
} = require('@config/siteConfig');
const { sendGATimingEvent } = require('../../../../utils/googleAnalytics.js'); const { sendGATimingEvent } = require('server/utils/googleAnalytics.js');
const isApprovedChannel = require('@globalutils/isApprovedChannel'); const isApprovedChannel = require('@globalutils/isApprovedChannel');
const { publishing: { publishOnlyApproved, approvedChannels } } = require('@config/siteConfig'); const {
publishing: { publishOnlyApproved, approvedChannels },
} = require('@config/siteConfig');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js'); const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
@ -55,6 +60,7 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
fileType, fileType,
gaStartTime, gaStartTime,
license, license,
licenseUrl,
name, name,
nsfw, nsfw,
thumbnail, thumbnail,
@ -69,18 +75,34 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
// validate the body and files of the request // validate the body and files of the request
try { try {
// validateApiPublishRequest(body, files); // validateApiPublishRequest(body, files);
({name, nsfw, license, title, description, thumbnail} = parsePublishApiRequestBody(body)); ({
({fileName, filePath, fileExtension, fileType, thumbnailFileName, thumbnailFilePath, thumbnailFileType} = parsePublishApiRequestFiles(files)); name,
({channelName, channelId, channelPassword} = body); nsfw,
license,
licenseUrl,
title,
description,
thumbnail,
} = parsePublishApiRequestBody(body));
({
fileName,
filePath,
fileExtension,
fileType,
thumbnailFileName,
thumbnailFilePath,
thumbnailFileType,
} = parsePublishApiRequestFiles(files));
({ channelName, channelId, channelPassword } = body);
} catch (error) { } catch (error) {
return res.status(400).json({success: false, message: error.message}); return res.status(400).json({ success: false, message: error.message });
} }
// check channel authorization // check channel authorization
authenticateUser(channelName, channelId, channelPassword, user) authenticateUser(channelName, channelId, channelPassword, user)
.then(({ channelName, channelClaimId }) => { .then(({ channelName, channelClaimId }) => {
if (publishOnlyApproved && !isApprovedChannel({ longId: channelClaimId }, approvedChannels)) { if (publishOnlyApproved && !isApprovedChannel({ longId: channelClaimId }, approvedChannels)) {
const error = { const error = {
name : UNAPPROVED_CHANNEL, name: UNAPPROVED_CHANNEL,
message: 'This spee.ch instance only allows publishing to approved channels', message: 'This spee.ch instance only allows publishing to approved channels',
}; };
throw error; throw error;
@ -88,14 +110,25 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
return Promise.all([ return Promise.all([
checkClaimAvailability(name), checkClaimAvailability(name),
createPublishParams(filePath, name, title, description, license, nsfw, thumbnail, channelName, channelClaimId), createPublishParams(
createThumbnailPublishParams(thumbnailFilePath, name, license, nsfw), filePath,
name,
title,
description,
license,
licenseUrl,
nsfw,
thumbnail,
channelName,
channelClaimId
),
createThumbnailPublishParams(thumbnailFilePath, name, license, licenseUrl, nsfw),
]); ]);
}) })
.then(([ claimAvailable, publishParams, thumbnailPublishParams ]) => { .then(([claimAvailable, publishParams, thumbnailPublishParams]) => {
if (!claimAvailable) { if (!claimAvailable) {
const error = { const error = {
name : CLAIM_TAKEN, name: CLAIM_TAKEN,
message: 'That claim name is already taken', message: 'That claim name is already taken',
}; };
throw error; throw error;
@ -110,14 +143,20 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
.then(publishResults => { .then(publishResults => {
logger.info('Publish success >', publishResults); logger.info('Publish success >', publishResults);
claimData = publishResults; claimData = publishResults;
({claimId} = claimData); ({ claimId } = claimData);
if (channelName) { if (channelName) {
return chainquery.claim.queries.getShortClaimIdFromLongClaimId(claimData.certificateId, channelName); logger.info(`api/claim/publish: claimData.certificateId ${claimData.certificateId}`);
return chainquery.claim.queries.getShortClaimIdFromLongClaimId(
claimData.certificateId,
channelName
);
} else { } else {
return chainquery.claim.queries.getShortClaimIdFromLongClaimId(claimId, name, claimData).catch(() => { return chainquery.claim.queries
return claimId.slice(0, 1); .getShortClaimIdFromLongClaimId(claimId, name, claimData)
}); .catch(() => {
return claimId.slice(0, 1);
});
} }
}) })
.then(shortId => { .then(shortId => {
@ -131,13 +170,13 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
res.status(200).json({ res.status(200).json({
success: true, success: true,
message: 'publish completed successfully', message: 'publish completed successfully',
data : { data: {
name, name,
claimId, claimId,
url : `${host}${canonicalUrl}`, // for backwards compatability with app url: `${host}${canonicalUrl}`, // for backwards compatability with app
showUrl : `${host}${canonicalUrl}`, showUrl: `${host}${canonicalUrl}`,
serveUrl: `${host}${canonicalUrl}${fileExtension}`, serveUrl: `${host}${canonicalUrl}${fileExtension}`,
pushTo : canonicalUrl, pushTo: canonicalUrl,
claimData, claimData,
}, },
}); });

View file

@ -1,15 +1,26 @@
const parsePublishApiRequestBody = ({name, nsfw, license, title, description, thumbnail}) => { const parsePublishApiRequestBody = ({
name,
nsfw,
license,
licenseUrl,
title,
description,
thumbnail,
}) => {
// validate name // validate name
if (!name) { if (!name) {
throw new Error('no name field found in request'); throw new Error('no name field found in request');
} }
const invalidNameCharacters = /[^A-Za-z0-9,-]/.exec(name); const invalidNameCharacters = /[^A-Za-z0-9,-]/.exec(name);
if (invalidNameCharacters) { if (invalidNameCharacters) {
throw new Error('The claim name you provided is not allowed. Only the following characters are allowed: A-Z, a-z, 0-9, and "-"'); throw new Error(
'The claim name you provided is not allowed. Only the following characters are allowed: A-Z, a-z, 0-9, and "-"'
);
} }
// optional parameters // optional parameters
nsfw = (nsfw === 'true'); nsfw = nsfw === 'true';
license = license || null; license = license || null;
licenseUrl = licenseUrl || null;
title = title || null; title = title || null;
description = description || null; description = description || null;
thumbnail = thumbnail || null; thumbnail = thumbnail || null;
@ -18,6 +29,7 @@ const parsePublishApiRequestBody = ({name, nsfw, license, title, description, th
name, name,
nsfw, nsfw,
license, license,
licenseUrl,
title, title,
description, description,
thumbnail, thumbnail,

View file

@ -1,6 +1,9 @@
const logger = require('winston'); const logger = require('winston');
const db = require('server/models'); const db = require('server/models');
const { details, publishing: { disabled, disabledMessage, primaryClaimAddress } } = require('@config/siteConfig'); const {
details,
publishing: { disabled, disabledMessage, primaryClaimAddress },
} = require('@config/siteConfig');
const { resolveUri } = require('server/lbrynet'); const { resolveUri } = require('server/lbrynet');
const { sendGATimingEvent } = require('../../../../utils/googleAnalytics.js'); const { sendGATimingEvent } = require('../../../../utils/googleAnalytics.js');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js'); const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
@ -16,10 +19,11 @@ const createCanonicalLink = require('@globalutils/createCanonicalLink');
route to update a claim through the daemon route to update a claim through the daemon
*/ */
const updateMetadata = ({nsfw, license, title, description}) => { const updateMetadata = ({ nsfw, license, licenseUrl, title, description }) => {
const update = {}; const update = {};
if (nsfw) update['nsfw'] = nsfw; if (nsfw) update['nsfw'] = nsfw;
if (license) update['license'] = license; if (license) update['license'] = license;
if (licenseUrl) update['licenseUrl'] = licenseUrl;
if (title) update['title'] = title; if (title) update['title'] = title;
if (description) update['description'] = description; if (description) update['description'] = description;
return update; return update;
@ -62,6 +66,7 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
thumbnail, thumbnail,
fileExtension, fileExtension,
license, license,
licenseUrl,
name, name,
nsfw, nsfw,
thumbnailFileName, thumbnailFileName,
@ -76,11 +81,27 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
gaStartTime = Date.now(); gaStartTime = Date.now();
try { try {
({name, nsfw, license, title, description, thumbnail} = parsePublishApiRequestBody(body)); ({
({fileName, filePath, fileExtension, fileType, thumbnailFileName, thumbnailFilePath, thumbnailFileType} = parsePublishApiRequestFiles(files, true)); name,
({channelName, channelId, channelPassword} = body); nsfw,
license,
licenseUrl,
title,
description,
thumbnail,
} = parsePublishApiRequestBody(body));
({
fileName,
filePath,
fileExtension,
fileType,
thumbnailFileName,
thumbnailFilePath,
thumbnailFileType,
} = parsePublishApiRequestFiles(files, true));
({ channelName, channelId, channelPassword } = body);
} catch (error) { } catch (error) {
return res.status(400).json({success: false, message: error.message}); return res.status(400).json({ success: false, message: error.message });
} }
// check channel authorization // check channel authorization
@ -89,7 +110,9 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
if (!channelId) { if (!channelId) {
channelId = channelClaimId; channelId = channelClaimId;
} }
return chainquery.claim.queries.resolveClaimInChannel(name, channelClaimId).then(claim => claim.dataValues); return chainquery.claim.queries
.resolveClaimInChannel(name, channelClaimId)
.then(claim => claim.dataValues);
}) })
.then(claim => { .then(claim => {
claimRecord = claim; claimRecord = claim;
@ -107,20 +130,25 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
return [null, null]; return [null, null];
}) })
.then(([fileResult, resolution]) => { .then(([fileResult, resolution]) => {
metadata = Object.assign({}, { metadata = Object.assign(
title : claimRecord.title, {},
description: claimRecord.description, {
nsfw : claimRecord.nsfw, title: claimRecord.title,
license : claimRecord.license, description: claimRecord.description,
language : 'en', nsfw: claimRecord.nsfw,
author : details.title, license: claimRecord.license,
}, updateMetadata({title, description, nsfw, license})); licenseUrl: claimRecord.license_url,
language: 'en',
author: details.title,
},
updateMetadata({ title, description, nsfw, license, licenseUrl })
);
const publishParams = { const publishParams = {
name, name,
bid : '0.01', bid: '0.01',
claim_address: primaryClaimAddress, claim_address: primaryClaimAddress,
channel_name : channelName, channel_name: channelName,
channel_id : channelId, channel_id: channelId,
metadata, metadata,
}; };
@ -128,19 +156,24 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
if (thumbnailUpdate) { if (thumbnailUpdate) {
// publish new thumbnail // publish new thumbnail
const newThumbnailName = `${name}-${rando()}`; const newThumbnailName = `${name}-${rando()}`;
const newThumbnailParams = createThumbnailPublishParams(filePath, newThumbnailName, license, nsfw); const newThumbnailParams = createThumbnailPublishParams(
filePath,
newThumbnailName,
license,
nsfw
);
newThumbnailParams['file_path'] = filePath; newThumbnailParams['file_path'] = filePath;
publish(newThumbnailParams, fileName, fileType); publish(newThumbnailParams, fileName, fileType);
publishParams['sources'] = resolution.claim.value.stream.source; publishParams['thumbnail'] = `${details.host}/${newThumbnailParams.channel_name}:${
publishParams['thumbnail'] = `${details.host}/${newThumbnailParams.channel_name}:${newThumbnailParams.channel_id}/${newThumbnailName}-thumb.jpg`; newThumbnailParams.channel_id
}/${newThumbnailName}-thumb.jpg`;
} else { } else {
publishParams['file_path'] = filePath; publishParams['file_path'] = filePath;
} }
} else { } else {
fileName = fileResult.fileName; fileName = fileResult.fileName;
fileType = fileResult.fileType; fileType = fileResult.fileType;
publishParams['sources'] = resolution.claim.value.stream.source;
publishParams['thumbnail'] = claimRecord.thumbnail_url; publishParams['thumbnail'] = claimRecord.thumbnail_url;
} }
@ -151,17 +184,24 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
publishResult = result; publishResult = result;
if (channelName) { if (channelName) {
return chainquery.claim.queries.getShortClaimIdFromLongClaimId(result.certificateId, channelName); return chainquery.claim.queries.getShortClaimIdFromLongClaimId(
result.certificateId,
channelName
);
} else { } else {
return chainquery.claim.queries.getShortClaimIdFromLongClaimId(result.claimId, name, result).catch(() => { return chainquery.claim.queries
return result.claimId.slice(0, 1); .getShortClaimIdFromLongClaimId(result.claimId, name, result)
}); .catch(() => {
return result.claimId.slice(0, 1);
});
} }
}) })
.then(shortId => { .then(shortId => {
let canonicalUrl; let canonicalUrl;
if (channelName) { if (channelName) {
canonicalUrl = createCanonicalLink({ asset: { ...publishResult, channelShortId: shortId } }); canonicalUrl = createCanonicalLink({
asset: { ...publishResult, channelShortId: shortId },
});
} else { } else {
canonicalUrl = createCanonicalLink({ asset: { ...publishResult, shortId } }); canonicalUrl = createCanonicalLink({ asset: { ...publishResult, shortId } });
} }
@ -173,17 +213,17 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
}); });
} }
const {claimId} = publishResult; const { claimId } = publishResult;
res.status(200).json({ res.status(200).json({
success: true, success: true,
message: 'update successful', message: 'update successful',
data : { data: {
name, name,
claimId, claimId,
url : `${details.host}${canonicalUrl}`, // for backwards compatability with app url: `${details.host}${canonicalUrl}`, // for backwards compatability with app
showUrl : `${details.host}${canonicalUrl}`, showUrl: `${details.host}${canonicalUrl}`,
serveUrl : `${details.host}${canonicalUrl}${fileExtension}`, serveUrl: `${details.host}${canonicalUrl}${fileExtension}`,
pushTo : canonicalUrl, pushTo: canonicalUrl,
claimData: publishResult, claimData: publishResult,
}, },
}); });

View file

@ -15,9 +15,20 @@ const BLOCKED_CLAIM = 'BLOCKED_CLAIM';
const NO_FILE = 'NO_FILE'; const NO_FILE = 'NO_FILE';
const CONTENT_UNAVAILABLE = 'CONTENT_UNAVAILABLE'; const CONTENT_UNAVAILABLE = 'CONTENT_UNAVAILABLE';
const { publishing: { serveOnlyApproved, approvedChannels } } = require('@config/siteConfig'); const {
publishing: { serveOnlyApproved, approvedChannels },
} = require('@config/siteConfig');
const getClaimIdAndServeAsset = (channelName, channelClaimId, claimName, claimId, originalUrl, ip, res, headers) => { const getClaimIdAndServeAsset = (
channelName,
channelClaimId,
claimName,
claimId,
originalUrl,
ip,
res,
headers
) => {
getClaimId(channelName, channelClaimId, claimName, claimId) getClaimId(channelName, channelClaimId, claimName, claimId)
.then(fullClaimId => { .then(fullClaimId => {
claimId = fullClaimId; claimId = fullClaimId;
@ -39,19 +50,27 @@ const getClaimIdAndServeAsset = (channelName, channelClaimId, claimName, claimId
.then(claim => { .then(claim => {
let claimDataValues = claim.dataValues; let claimDataValues = claim.dataValues;
if (serveOnlyApproved && !isApprovedChannel({ longId: claimDataValues.publisher_id || claimDataValues.certificateId }, approvedChannels)) { if (
serveOnlyApproved &&
!isApprovedChannel(
{ longId: claimDataValues.publisher_id || claimDataValues.certificateId },
approvedChannels
)
) {
throw new Error(CONTENT_UNAVAILABLE); throw new Error(CONTENT_UNAVAILABLE);
} }
let outpoint = claimDataValues.outpoint || `${claimDataValues.transaction_hash_id}:${claimDataValues.vout}`; let outpoint =
claimDataValues.outpoint ||
`${claimDataValues.transaction_hash_id}:${claimDataValues.vout}`;
logger.debug('Outpoint:', outpoint); logger.debug('Outpoint:', outpoint);
return db.Blocked.isNotBlocked(outpoint).then(() => { return db.Blocked.isNotBlocked(outpoint).then(() => {
// If content was found, is approved, and not blocked - log a view. // If content was found, is approved, and not blocked - log a view.
if (headers && headers['user-agent'] && /LBRY/.test(headers['user-agent']) === false) { if (headers && headers['user-agent'] && /LBRY/.test(headers['user-agent']) === false) {
db.Views.create({ db.Views.create({
time : Date.now(), time: Date.now(),
isChannel : false, isChannel: false,
claimId : claimDataValues.claim_id || claimDataValues.claimId, claimId: claimDataValues.claim_id || claimDataValues.claimId,
publisherId: claimDataValues.publisher_id || claimDataValues.certificateId, publisherId: claimDataValues.publisher_id || claimDataValues.certificateId,
ip, ip,
}); });
@ -70,7 +89,7 @@ const getClaimIdAndServeAsset = (channelName, channelClaimId, claimName, claimId
if (!fileRecord) { if (!fileRecord) {
throw NO_FILE; throw NO_FILE;
} }
serveFile(fileRecord.dataValues, res); serveFile(fileRecord.dataValues, res, originalUrl);
}) })
.catch(error => { .catch(error => {
if (error === NO_CLAIM) { if (error === NO_CLAIM) {
@ -98,7 +117,8 @@ const getClaimIdAndServeAsset = (channelName, channelClaimId, claimName, claimId
logger.debug('claim was blocked'); logger.debug('claim was blocked');
return res.status(451).json({ return res.status(451).json({
success: false, success: false,
message: 'In response to a complaint we received under the US Digital Millennium Copyright Act, we have blocked access to this content from our applications. For more details, see https://lbry.io/faq/dmca', message:
'In response to a complaint we received under the US Digital Millennium Copyright Act, we have blocked access to this content from our applications. For more details, see https://lbry.io/faq/dmca',
}); });
} }
if (error === NO_FILE) { if (error === NO_FILE) {

View file

@ -1,19 +1,68 @@
const logger = require('winston'); const logger = require('winston');
const transformImage = require('./transformImage');
const isValidQueryObject = require('server/utils/isValidQueryObj');
const {
serving: { dynamicFileSizing },
} = require('@config/siteConfig');
const { enabled: dynamicEnabled } = dynamicFileSizing;
const serveFile = async ({ filePath, fileType }, res, originalUrl) => {
const queryObject = {};
// TODO: replace quick/dirty try catch with better practice
try {
originalUrl
.split('?')[1]
.split('&')
.map(pair => {
if (pair.includes('=')) {
let parr = pair.split('=');
queryObject[parr[0]] = parr[1];
} else queryObject[pair] = true;
});
} catch (e) {}
const serveFile = ({ filePath, fileType }, res) => {
if (!fileType) { if (!fileType) {
logger.error(`no fileType provided for ${filePath}`); logger.error(`no fileType provided for ${filePath}`);
} }
let mediaType = fileType ? fileType.substr(0, fileType.indexOf('/')) : '';
const transform =
mediaType === 'image' &&
queryObject.hasOwnProperty('h') &&
queryObject.hasOwnProperty('w') &&
dynamicEnabled;
const sendFileOptions = { const sendFileOptions = {
headers: { headers: {
'X-Content-Type-Options' : 'nosniff', 'X-Content-Type-Options': 'nosniff',
'Content-Type' : fileType, 'Content-Type': fileType,
'Access-Control-Allow-Origin' : '*', 'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'Origin, X-Requested-With, Content-Type, Accept', 'Access-Control-Allow-Headers': 'Origin, X-Requested-With, Content-Type, Accept',
}, },
}; };
logger.debug(`fileOptions for ${filePath}:`, sendFileOptions); logger.debug(`fileOptions for ${filePath}:`, sendFileOptions);
res.status(200).sendFile(filePath, sendFileOptions); try {
if (transform) {
if (!isValidQueryObject(queryObject)) {
logger.debug(`Unacceptable querystring`, { queryObject });
res.status(400).json({
success: false,
message: 'Querystring may not have dimensions greater than 2000',
});
res.end();
}
logger.debug(`transforming and sending file`);
let xformed = await transformImage(filePath, queryObject);
res.status(200).set(sendFileOptions.headers);
res.end(xformed, 'binary');
} else {
res.status(200).sendFile(filePath, sendFileOptions);
}
} catch (e) {
logger.debug(e);
}
}; };
module.exports = serveFile; module.exports = serveFile;

View file

@ -0,0 +1,76 @@
const gm = require('gm');
const logger = require('winston');
const imageMagick = gm.subClass({ imageMagick: true });
const { getImageHeightAndWidth } = require('../../../utils/imageProcessing');
module.exports = function transformImage(path, queryObj) {
return new Promise((resolve, reject) => {
let { h: cHeight = null } = queryObj;
let { w: cWidth = null } = queryObj;
let { t: transform = null } = queryObj;
let { x: xOrigin = null } = queryObj;
let { y: yOrigin = null } = queryObj;
let oHeight,
oWidth = null;
try {
getImageHeightAndWidth(path).then(hwarr => {
oHeight = hwarr[0];
oWidth = hwarr[1];
// conditional logic here
if (transform === 'crop') {
resolve(_cropCenter(path, cWidth, cHeight, oWidth, oHeight));
} else if (transform === 'stretch') {
imageMagick(path)
.resize(cWidth, cHeight, '!')
.toBuffer(null, (err, buf) => {
resolve(buf);
});
} else {
// resize scaled
imageMagick(path)
.resize(cWidth, cHeight)
.toBuffer(null, (err, buf) => {
resolve(buf);
});
}
});
} catch (e) {
logger.error(e);
reject(e);
}
});
};
function _cropCenter(path, cropWidth, cropHeight, originalWidth, originalHeight) {
let oAspect = originalWidth / originalHeight;
let cAspect = cropWidth / cropHeight;
let resizeX,
resizeY,
xpoint,
ypoint = null;
if (oAspect >= cAspect) {
// if crop is narrower aspect than original
resizeY = cropHeight;
xpoint = (oAspect * cropHeight) / 2 - cropWidth / 2;
ypoint = 0;
} else {
// if crop is wider aspect than original
resizeX = cropWidth;
xpoint = 0;
ypoint = cropWidth / oAspect / 2 - cropHeight / 2;
}
return new Promise((resolve, reject) => {
try {
imageMagick(path)
.resize(resizeX, resizeY)
.crop(cropWidth, cropHeight, xpoint, ypoint)
.toBuffer(null, (err, buf) => {
resolve(buf);
});
} catch (e) {
logger.error(e);
reject(e);
}
});
}

View file

@ -53,6 +53,26 @@ module.exports = {
}); });
}); });
}, },
getFileListFileByOutpoint(outpoint) {
logger.debug(`lbryApi >> Getting File_List for "${outpoint}"`);
const gaStartTime = Date.now();
return new Promise((resolve, reject) => {
axios
.post(lbrynetUri, {
method: 'file_list',
params: {
outpoint,
},
})
.then(response => {
sendGATimingEvent('lbrynet', 'getFileList', 'FILE_LIST', gaStartTime, Date.now());
handleLbrynetResponse(response, resolve, reject);
})
.catch(error => {
reject(error);
});
});
},
async abandonClaim({ claimId }) { async abandonClaim({ claimId }) {
logger.debug(`lbryApi >> Abandon claim "${claimId}"`); logger.debug(`lbryApi >> Abandon claim "${claimId}"`);
const gaStartTime = Date.now(); const gaStartTime = Date.now();
@ -93,7 +113,7 @@ module.exports = {
axios axios
.post(lbrynetUri, { .post(lbrynetUri, {
method: 'resolve', method: 'resolve',
params: { uri }, params: { urls: uri },
}) })
.then(({ data }) => { .then(({ data }) => {
sendGATimingEvent('lbrynet', 'resolveUri', 'RESOLVE', gaStartTime, Date.now()); sendGATimingEvent('lbrynet', 'resolveUri', 'RESOLVE', gaStartTime, Date.now());

View file

@ -0,0 +1,30 @@
const { getFileListFileByOutpoint } = require('server/lbrynet');
const logger = require('winston');
function delay(t) {
return new Promise(function(resolve) {
setTimeout(resolve, t);
});
}
const awaitFileSize = (outpoint, size, interval, timeout) => {
logger.debug('awaitFileSize');
let start = Date.now();
function checkFileList() {
logger.debug('checkFileList');
return getFileListFileByOutpoint(outpoint).then(result => {
logger.debug('File List Result', result);
if (result[0]['completed'] === true || result[0]['written_bytes'] > size) {
logger.debug('FILE READY');
return 'ready';
} else if (timeout !== 0 && Date.now() - start > timeout) {
throw new Error('Timeout on awaitFileSize');
} else {
return delay(interval).then(checkFileList);
}
});
}
return checkFileList();
};
module.exports = awaitFileSize;

View file

@ -59,5 +59,8 @@ module.exports = async (data, chName = null, chShortId = null) => {
host, host,
pending: Boolean(dataVals.height === 0), pending: Boolean(dataVals.height === 0),
blocked: blocked, blocked: blocked,
license: dataVals.license,
licenseUrl: dataVals.license_url,
transactionTime: dataVals.transaction_time,
}; };
}; };

View file

@ -0,0 +1,24 @@
const {
serving: { dynamicFileSizing },
} = require('@config/siteConfig');
const { maxDimension } = dynamicFileSizing;
const isValidQueryObj = queryObj => {
let {
h: cHeight = null,
w: cWidth = null,
t: transform = null,
x: xOrigin = null,
y: yOrigin = null,
} = queryObj;
return (
((cHeight <= maxDimension && cHeight > 0) || cHeight === null) &&
((cWidth <= maxDimension && cWidth > 0) || cWidth === null) &&
(transform === null || transform === 'crop' || transform === 'stretch') &&
((xOrigin <= maxDimension && xOrigin >= 0) || xOrigin === null) &&
((yOrigin <= maxDimension && yOrigin >= 0) || yOrigin === null)
);
};
module.exports = isValidQueryObj;

View file

@ -39,6 +39,9 @@ module.exports = () => {
moduleAliases['@clientutils'] = resolve(`${DEFAULT_ROOT}/utils`); moduleAliases['@clientutils'] = resolve(`${DEFAULT_ROOT}/utils`);
// moduleAliases['@serverutils'] = resolve('server/utils'); // moduleAliases['@serverutils'] = resolve('server/utils');
// aliases for constants
moduleAliases['@clientConstants'] = resolve(`${DEFAULT_ROOT}/constants`);
// create specific aliases for locally defined components in the following folders // create specific aliases for locally defined components in the following folders
moduleAliases = addAliasesForCustomComponentFolder('containers', moduleAliases); moduleAliases = addAliasesForCustomComponentFolder('containers', moduleAliases);
moduleAliases = addAliasesForCustomComponentFolder('components', moduleAliases); moduleAliases = addAliasesForCustomComponentFolder('components', moduleAliases);
@ -53,7 +56,6 @@ module.exports = () => {
moduleAliases['@sagas'] = resolve(`${DEFAULT_ROOT}/sagas`); moduleAliases['@sagas'] = resolve(`${DEFAULT_ROOT}/sagas`);
moduleAliases['@app'] = resolve(`${DEFAULT_ROOT}/app.js`); moduleAliases['@app'] = resolve(`${DEFAULT_ROOT}/app.js`);
// return finished aliases // return finished aliases
return moduleAliases; return moduleAliases;
}; };