Merge branch 'master' into licenseDev2
This commit is contained in:
commit
b107346c28
8 changed files with 148 additions and 61 deletions
|
@ -47,6 +47,7 @@ For a closed, custom-hosted and branded example, check out https://lbry.theantim
|
|||
- `./lbrynet account_balance` gets your balance (initially 0.0)
|
||||
- `./lbrynet address_list` gets addresses you can use to recieve LBC
|
||||
- [FFmpeg](https://www.ffmpeg.org/download.html)
|
||||
- [ImageMagick](https://packages.ubuntu.com/xenial/graphics/imagemagick)
|
||||
- Spee.ch (below)
|
||||
- pm2 (optional) process manager such as pm2 to run speech server.js
|
||||
- http proxy server e.g. caddy, nginx, or traefik, to forward 80/443 to speech port 3000
|
||||
|
@ -260,9 +261,11 @@ Spee.ch has a few types of URL formats that return different assets from the LBR
|
|||
- retrieve the controlling `LBRY` claim:
|
||||
- https://spee.ch/`claim`
|
||||
- https://spee.ch/`claim`.`ext` (serve)
|
||||
- https://spee.ch/`claim`.`ext`&`querystring` (serve transformed)
|
||||
- retrieve a specific `LBRY` claim:
|
||||
- https://spee.ch/`claim_id`/`claim`
|
||||
- https://spee.ch/`claim_id`/`claim`.`ext` (serve)
|
||||
- https://spee.ch/`claim_id`/`claim`.`ext`&`querystring` (serve transformed)
|
||||
- retrieve all contents for the controlling `LBRY` channel
|
||||
- https://spee.ch/`@channel`
|
||||
- a specific `LBRY` channel
|
||||
|
@ -270,9 +273,15 @@ Spee.ch has a few types of URL formats that return different assets from the LBR
|
|||
- retrieve a specific claim within the controlling `LBRY` channel
|
||||
- https://spee.ch/`@channel`/`claim`
|
||||
- https://spee.ch/`@channel`/`claim`.`ext` (serve)
|
||||
- https://spee.ch/`@channel`/`claim`.`ext`&`querystring` (serve)
|
||||
- retrieve a specific claim within a specific `LBRY` channel
|
||||
- https://spee.ch/`@channel`:`channel_id`/`claim`
|
||||
- https://spee.ch/`@channel`:`channel_id`/`claim`.`ext` (serve)
|
||||
- https://spee.ch/`@channel`:`channel_id`/`claim`.`ext`&`querystring` (serve)
|
||||
- `querystring` can include the following transformation values separated by `&`
|
||||
- h=`number` (defines height)
|
||||
- w=`number` (defines width)
|
||||
- t=`crop` or `stretch` (defines transformation - missing implies constrained proportions)
|
||||
|
||||
### Dependencies
|
||||
|
||||
|
|
0
changelog.md
Normal file
0
changelog.md
Normal file
|
@ -49,6 +49,10 @@
|
|||
}
|
||||
},
|
||||
"serving": {
|
||||
"dynamicFileSizing": {
|
||||
"enabled": true,
|
||||
"maxDimension": 2000
|
||||
},
|
||||
"markdownSettings": {
|
||||
"skipHtmlMain": true,
|
||||
"escapeHtmlMain": true,
|
||||
|
@ -83,10 +87,7 @@
|
|||
"code",
|
||||
"html",
|
||||
"parsedHtml"
|
||||
],
|
||||
"disallowedTypesMain": [],
|
||||
"disallowedTypesDescriptions": ["image", "html"],
|
||||
"disallowedTypesExample": ["image", "html"]
|
||||
]
|
||||
},
|
||||
"customFileExtensions": {
|
||||
"application/x-troff-man": "man",
|
||||
|
|
|
@ -26,7 +26,6 @@ PUBLISHING:
|
|||
|
||||
"primaryClaimAddress": null, - generally supplied by your lbrynet sdk
|
||||
"uploadDirectory": "/home/lbry/Uploads", - lbrynet sdk will know your uploads are here
|
||||
"lbrynetHome": "/home/lbry",
|
||||
"thumbnailChannel": null, - when publishing non-image content, thumbnails will go here.
|
||||
"thumbnailChannelId": null,
|
||||
"additionalClaimAddresses": [],
|
||||
|
@ -50,48 +49,52 @@ PUBLISHING:
|
|||
"application/octet-stream": 50000000
|
||||
}
|
||||
}
|
||||
|
||||
SERVING:
|
||||
|
||||
"markdownSettings": {
|
||||
"skipHtmlMain": true, - false: render html, in a somewhat unpredictable way~
|
||||
"escapeHtmlMain": true, - true: rather than render html, escape it and print it visibly
|
||||
"skipHtmlDescriptions": true, - as above, for descriptions
|
||||
"escapeHtmlDescriptions": true, - as above, for descriptions
|
||||
"allowedTypesMain": [], - markdown rendered as main content
|
||||
"allowedTypesDescriptions": [], - markdown rendered in description in content details
|
||||
"allowedTypesExample": [ - here are examples of allowed types
|
||||
"see react-markdown docs", `https://github.com/rexxars/react-markdown`
|
||||
"root",
|
||||
"text",
|
||||
"break",
|
||||
"paragraph",
|
||||
"emphasis",
|
||||
"strong",
|
||||
"thematicBreak",
|
||||
"blockquote",
|
||||
"delete",
|
||||
"link",
|
||||
"image", - you may not have a lot of control over how these are rendered
|
||||
"linkReference",
|
||||
"imageReference",
|
||||
"table",
|
||||
"tableHead",
|
||||
"tableBody",
|
||||
"tableRow",
|
||||
"tableCell",
|
||||
"list",
|
||||
"listItem",
|
||||
"heading",
|
||||
"inlineCode",
|
||||
"code",
|
||||
"html", - potentially DANGEROUS, intended for `serveOnlyApproved = true` environments, includes iframes, divs.
|
||||
"parsedHtml"
|
||||
],
|
||||
},
|
||||
"customFileExtensions": { - suggest a file extension for experimental content types you may be publishing
|
||||
"application/example-type": "example"
|
||||
}
|
||||
SERVING:
|
||||
|
||||
"dynamicFileSizing": {
|
||||
"enabled": false, - if you choose to allow your instance to serve transform images
|
||||
"maxDimension": 2000 - the maximum size you allow transform to scale
|
||||
},
|
||||
"markdownSettings": {
|
||||
"skipHtmlMain": true, - false: render html, in a somewhat unpredictable way~
|
||||
"escapeHtmlMain": true, - true: rather than render html, escape it and print it visibly
|
||||
"skipHtmlDescriptions": true, - as above, for descriptions
|
||||
"escapeHtmlDescriptions": true, - as above, for descriptions
|
||||
"allowedTypesMain": [], - markdown rendered as main content
|
||||
"allowedTypesDescriptions": [], - markdown rendered in description in content details
|
||||
"allowedTypesExample": [ - here are examples of allowed types
|
||||
"see react-markdown docs", `https://github.com/rexxars/react-markdown`
|
||||
"root",
|
||||
"text",
|
||||
"break",
|
||||
"paragraph",
|
||||
"emphasis",
|
||||
"strong",
|
||||
"thematicBreak",
|
||||
"blockquote",
|
||||
"delete",
|
||||
"link",
|
||||
"image", - you may not have a lot of control over how these are rendered
|
||||
"linkReference",
|
||||
"imageReference",
|
||||
"table",
|
||||
"tableHead",
|
||||
"tableBody",
|
||||
"tableRow",
|
||||
"tableCell",
|
||||
"list",
|
||||
"listItem",
|
||||
"heading",
|
||||
"inlineCode",
|
||||
"code",
|
||||
"html", - potentially DANGEROUS, intended for `serveOnlyApproved = true` environments, includes iframes, divs.
|
||||
"parsedHtml"
|
||||
],
|
||||
},
|
||||
"customFileExtensions": { - suggest a file extension for experimental content types you may be publishing
|
||||
"application/example-type": "example"
|
||||
}
|
||||
|
||||
STARTUP:
|
||||
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
const { getClaim } = require('../../../../lbrynet');
|
||||
const { createFileRecordDataAfterGet } = require('../../../../models/utils/createFileRecordData.js');
|
||||
const {
|
||||
createFileRecordDataAfterGet,
|
||||
} = require('../../../../models/utils/createFileRecordData.js');
|
||||
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
|
||||
const getClaimData = require('server/utils/getClaimData');
|
||||
const chainquery = require('chainquery').default;
|
||||
const db = require('../../../../models');
|
||||
const waitOn = require('wait-on');
|
||||
const logger = require('winston');
|
||||
const awaitFileSize = require('server/utils/awaitFileSize');
|
||||
|
||||
/*
|
||||
|
||||
|
@ -36,11 +38,11 @@ const claimGet = async ({ ip, originalUrl, params }, res) => {
|
|||
if (!claimData) {
|
||||
throw new Error('claim/get: getClaimData failed to get file blobs');
|
||||
}
|
||||
await waitOn({
|
||||
resources: [ lbrynetResult.download_path ],
|
||||
timeout : 10000, // 10 seconds
|
||||
window : 500,
|
||||
});
|
||||
let fileReady = await awaitFileSize(lbrynetResult.download_path, 2000000, 10000, 250);
|
||||
|
||||
if (fileReady !== 'ready') {
|
||||
throw new Error('claim/get: failed to get file after 10 seconds');
|
||||
}
|
||||
const fileData = await createFileRecordDataAfterGet(claimData, lbrynetResult);
|
||||
if (!fileData) {
|
||||
throw new Error('claim/get: createFileRecordDataAfterGet failed to create file in time');
|
||||
|
|
|
@ -1,6 +1,12 @@
|
|||
const logger = require('winston');
|
||||
const transformImage = require('./transformImage');
|
||||
|
||||
const isValidQueryObject = require('server/utils/isValidQueryObj');
|
||||
const {
|
||||
serving: { dynamicFileSizing },
|
||||
} = require('@config/siteConfig');
|
||||
const { enabled: dynamicEnabled } = dynamicFileSizing;
|
||||
|
||||
const serveFile = async ({ filePath, fileType }, res, originalUrl) => {
|
||||
const queryObject = {};
|
||||
// TODO: replace quick/dirty try catch with better practice
|
||||
|
@ -22,7 +28,10 @@ const serveFile = async ({ filePath, fileType }, res, originalUrl) => {
|
|||
|
||||
let mediaType = fileType ? fileType.substr(0, fileType.indexOf('/')) : '';
|
||||
const transform =
|
||||
mediaType === 'image' && queryObject.hasOwnProperty('h') && queryObject.hasOwnProperty('w');
|
||||
mediaType === 'image' &&
|
||||
queryObject.hasOwnProperty('h') &&
|
||||
queryObject.hasOwnProperty('w') &&
|
||||
dynamicEnabled;
|
||||
|
||||
const sendFileOptions = {
|
||||
headers: {
|
||||
|
@ -33,14 +42,26 @@ const serveFile = async ({ filePath, fileType }, res, originalUrl) => {
|
|||
},
|
||||
};
|
||||
logger.debug(`fileOptions for ${filePath}:`, sendFileOptions);
|
||||
if (transform) {
|
||||
logger.debug(`transforming and sending file`);
|
||||
try {
|
||||
if (transform) {
|
||||
if (!isValidQueryObject(queryObject)) {
|
||||
logger.debug(`Unacceptable querystring`, { queryObject });
|
||||
res.status(400).json({
|
||||
success: false,
|
||||
message: 'Querystring may not have dimensions greater than 2000',
|
||||
});
|
||||
res.end();
|
||||
}
|
||||
logger.debug(`transforming and sending file`);
|
||||
|
||||
let xformed = await transformImage(filePath, queryObject);
|
||||
res.status(200).set(sendFileOptions.headers);
|
||||
res.end(xformed, 'binary');
|
||||
} else {
|
||||
res.status(200).sendFile(filePath, sendFileOptions);
|
||||
let xformed = await transformImage(filePath, queryObject);
|
||||
res.status(200).set(sendFileOptions.headers);
|
||||
res.end(xformed, 'binary');
|
||||
} else {
|
||||
res.status(200).sendFile(filePath, sendFileOptions);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug(e);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
27
server/utils/awaitFileSize.js
Normal file
27
server/utils/awaitFileSize.js
Normal file
|
@ -0,0 +1,27 @@
|
|||
const fs = require('fs');
|
||||
const { promisify } = require('util');
|
||||
|
||||
const fsstat = promisify(fs.stat);
|
||||
const awaitFileSize = (path, sizeInBytes, timeout, interval) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let totalTime = 0;
|
||||
let timer = setInterval(() => {
|
||||
totalTime = totalTime + interval;
|
||||
fsstat(path)
|
||||
.then(stats => {
|
||||
if (stats.size > sizeInBytes) {
|
||||
clearInterval(interval);
|
||||
resolve('ready');
|
||||
}
|
||||
if (totalTime > timeout) {
|
||||
const error = new Error('File did not arrive in time');
|
||||
error.name = 'FILE_NOT_ARRIVED';
|
||||
reject(error);
|
||||
}
|
||||
})
|
||||
.catch();
|
||||
}, interval);
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = awaitFileSize;
|
24
server/utils/isValidQueryObj.js
Normal file
24
server/utils/isValidQueryObj.js
Normal file
|
@ -0,0 +1,24 @@
|
|||
const {
|
||||
serving: { dynamicFileSizing },
|
||||
} = require('@config/siteConfig');
|
||||
const { maxDimension } = dynamicFileSizing;
|
||||
|
||||
const isValidQueryObj = queryObj => {
|
||||
let {
|
||||
h: cHeight = null,
|
||||
w: cWidth = null,
|
||||
t: transform = null,
|
||||
x: xOrigin = null,
|
||||
y: yOrigin = null,
|
||||
} = queryObj;
|
||||
|
||||
return (
|
||||
((cHeight <= maxDimension && cHeight > 0) || cHeight === null) &&
|
||||
((cWidth <= maxDimension && cWidth > 0) || cWidth === null) &&
|
||||
(transform === null || transform === 'crop' || transform === 'stretch') &&
|
||||
((xOrigin <= maxDimension && xOrigin >= 0) || xOrigin === null) &&
|
||||
((yOrigin <= maxDimension && yOrigin >= 0) || yOrigin === null)
|
||||
);
|
||||
};
|
||||
|
||||
module.exports = isValidQueryObj;
|
Loading…
Reference in a new issue