replaces most require with es6 import/export

This commit is contained in:
jessop 2019-04-12 10:25:06 -04:00
parent 042069f759
commit dd0886baf4
119 changed files with 1412 additions and 1327 deletions

120
package-lock.json generated
View file

@ -196,6 +196,116 @@
"@babel/types": "^7.0.0"
}
},
"@babel/helper-create-class-features-plugin": {
"version": "7.3.2",
"resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.3.2.tgz",
"integrity": "sha512-tdW8+V8ceh2US4GsYdNVNoohq5uVwOf9k6krjwW4E1lINcHgttnWcNqgdoessn12dAy8QkbezlbQh2nXISNY+A==",
"dev": true,
"requires": {
"@babel/helper-function-name": "^7.1.0",
"@babel/helper-member-expression-to-functions": "^7.0.0",
"@babel/helper-optimise-call-expression": "^7.0.0",
"@babel/helper-plugin-utils": "^7.0.0",
"@babel/helper-replace-supers": "^7.2.3"
},
"dependencies": {
"@babel/generator": {
"version": "7.3.3",
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.3.3.tgz",
"integrity": "sha512-aEADYwRRZjJyMnKN7llGIlircxTCofm3dtV5pmY6ob18MSIuipHpA2yZWkPlycwu5HJcx/pADS3zssd8eY7/6A==",
"dev": true,
"requires": {
"@babel/types": "^7.3.3",
"jsesc": "^2.5.1",
"lodash": "^4.17.11",
"source-map": "^0.5.0",
"trim-right": "^1.0.1"
},
"dependencies": {
"@babel/types": {
"version": "7.3.3",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.3.3.tgz",
"integrity": "sha512-2tACZ80Wg09UnPg5uGAOUvvInaqLk3l/IAhQzlxLQOIXacr6bMsra5SH6AWw/hIDRCSbCdHP2KzSOD+cT7TzMQ==",
"dev": true,
"requires": {
"esutils": "^2.0.2",
"lodash": "^4.17.11",
"to-fast-properties": "^2.0.0"
}
}
}
},
"@babel/helper-replace-supers": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.2.3.tgz",
"integrity": "sha512-GyieIznGUfPXPWu0yLS6U55Mz67AZD9cUk0BfirOWlPrXlBcan9Gz+vHGz+cPfuoweZSnPzPIm67VtQM0OWZbA==",
"dev": true,
"requires": {
"@babel/helper-member-expression-to-functions": "^7.0.0",
"@babel/helper-optimise-call-expression": "^7.0.0",
"@babel/traverse": "^7.2.3",
"@babel/types": "^7.0.0"
}
},
"@babel/parser": {
"version": "7.3.3",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.3.3.tgz",
"integrity": "sha512-xsH1CJoln2r74hR+y7cg2B5JCPaTh+Hd+EbBRk9nWGSNspuo6krjhX0Om6RnRQuIvFq8wVXCLKH3kwKDYhanSg==",
"dev": true
},
"@babel/traverse": {
"version": "7.2.3",
"resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.2.3.tgz",
"integrity": "sha512-Z31oUD/fJvEWVR0lNZtfgvVt512ForCTNKYcJBGbPb1QZfve4WGH8Wsy7+Mev33/45fhP/hwQtvgusNdcCMgSw==",
"dev": true,
"requires": {
"@babel/code-frame": "^7.0.0",
"@babel/generator": "^7.2.2",
"@babel/helper-function-name": "^7.1.0",
"@babel/helper-split-export-declaration": "^7.0.0",
"@babel/parser": "^7.2.3",
"@babel/types": "^7.2.2",
"debug": "^4.1.0",
"globals": "^11.1.0",
"lodash": "^4.17.10"
},
"dependencies": {
"@babel/types": {
"version": "7.3.3",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.3.3.tgz",
"integrity": "sha512-2tACZ80Wg09UnPg5uGAOUvvInaqLk3l/IAhQzlxLQOIXacr6bMsra5SH6AWw/hIDRCSbCdHP2KzSOD+cT7TzMQ==",
"dev": true,
"requires": {
"esutils": "^2.0.2",
"lodash": "^4.17.11",
"to-fast-properties": "^2.0.0"
}
}
}
},
"debug": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
"integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
"dev": true,
"requires": {
"ms": "^2.1.1"
}
},
"ms": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz",
"integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==",
"dev": true
},
"source-map": {
"version": "0.5.7",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
"integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=",
"dev": true
}
}
},
"@babel/helper-define-map": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/@babel/helper-define-map/-/helper-define-map-7.1.0.tgz",
@ -422,6 +532,16 @@
"@babel/plugin-syntax-async-generators": "^7.2.0"
}
},
"@babel/plugin-proposal-class-properties": {
"version": "7.3.3",
"resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.3.3.tgz",
"integrity": "sha512-XO9eeU1/UwGPM8L+TjnQCykuVcXqaO5J1bkRPIygqZ/A2L1xVMJ9aZXrY31c0U4H2/LHKL4lbFQLsxktSrc/Ng==",
"dev": true,
"requires": {
"@babel/helper-create-class-features-plugin": "^7.3.0",
"@babel/helper-plugin-utils": "^7.0.0"
}
},
"@babel/plugin-proposal-json-strings": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.2.0.tgz",

View file

@ -86,6 +86,7 @@
"devDependencies": {
"@babel/cli": "^7.1.5",
"@babel/core": "^7.2.0",
"@babel/plugin-proposal-class-properties": "^7.3.3",
"@babel/plugin-proposal-object-rest-spread": "^7.0.0",
"@babel/polyfill": "^7.0.0",
"@babel/preset-env": "^7.2.0",

View file

@ -14,12 +14,11 @@ try {
let currentApp;
try {
const Server = require('./server/');
const Server = require('./server/').default;
currentApp = Server;
const speech = new Server();
speech.start();
} catch (error) {
console.log('server startup error:', error);
process.exit(1);

View file

@ -1,5 +1,5 @@
const Sequelize = require('sequelize');
const logger = require('winston');
import Sequelize from 'sequelize';
import logger from 'winston';
import abnormalClaimTable from './tables/abnormalClaimTable';
import addressTable from './tables/addressTable';
@ -22,51 +22,45 @@ import transactionAddressQueries from './queries/transactionAddressQueries';
import transactionQueries from './queries/transactionQueries';
const DATABASE_STRUCTURE = {
'abnormal_claim': {
abnormal_claim: {
table: abnormalClaimTable,
queries: abnormalClaimQueries,
},
'address': {
address: {
table: addressTable,
queries: addressQueries,
},
'block': {
block: {
table: blockTable,
queries: blockQueries,
},
'claim': {
claim: {
table: claimTable,
queries: claimQueries,
},
'input': {
input: {
table: inputTable,
queries: inputQueries,
},
'output': {
output: {
table: outputTable,
queries: outputQueries,
},
'support': {
support: {
table: supportTable,
queries: supportQueries,
},
'transaction_address': {
transaction_address: {
table: transactionAddressTable,
queries: transactionAddressQueries,
},
'transaction': {
transaction: {
table: transactionTable,
queries: transactionQueries,
},
};
const {
host,
port,
database,
username,
password,
} = require('@config/chainqueryConfig');
const { host, port, database, username, password } = require('@config/chainqueryConfig');
if (!database || !username || !password) {
logger.warn('missing database, user, or password from chainqueryConfig');

View file

@ -1,5 +1,5 @@
const logger = require('winston');
const db = require('../../../models');
import logger from 'winston';
import db from 'server/models';
const updateBlockedList = (req, res) => {
db.Blocked.refreshTable()
@ -19,4 +19,4 @@ const updateBlockedList = (req, res) => {
});
};
module.exports = updateBlockedList;
export default updateBlockedList;

View file

@ -1,18 +1,17 @@
const db = require('../../../../models');
import db from 'server/models';
const checkChannelAvailability = (name) => {
return db.Channel
.findAll({
const checkChannelAvailability = name => {
return db.Channel.findAll({
where: {
channelName: name,
},
})
.then(result => {
return (result.length <= 0);
return result.length <= 0;
})
.catch(error => {
throw error;
});
};
module.exports = checkChannelAvailability;
export default checkChannelAvailability;

View file

@ -1,6 +1,6 @@
const checkChannelAvailability = require('./checkChannelAvailability.js');
const { sendGATimingEvent } = require('../../../../utils/googleAnalytics.js');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
import checkChannelAvailability from './checkChannelAvailability.js';
import { sendGATimingEvent } from '@serverutils/googleAnalytics.js';
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
/*
@ -37,4 +37,4 @@ const channelAvailability = ({ ip, originalUrl, params: { name } }, res) => {
});
};
module.exports = channelAvailability;
export default channelAvailability;

View file

@ -1,7 +1,7 @@
const chainquery = require('chainquery').default;
const logger = require('winston');
const getClaimData = require('server/utils/getClaimData');
const { returnPaginatedChannelClaims } = require('./channelPagination.js');
import chainquery from 'chainquery';
import logger from 'winston';
import getClaimData from 'server/utils/getClaimData';
import { returnPaginatedChannelClaims } from './channelPagination.js';
const getChannelClaims = async (channelName, channelLongId, page) => {
logger.debug(`getChannelClaims: ${channelName}, ${channelLongId}, ${page}`);
@ -33,4 +33,4 @@ const getChannelClaims = async (channelName, channelLongId, page) => {
return returnPaginatedChannelClaims(channelName, channelShortId, processedChannelClaims, page);
};
module.exports = getChannelClaims;
export default getChannelClaims;

View file

@ -1,5 +1,5 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const getChannelClaims = require('./getChannelClaims.js');
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import getChannelClaims from './getChannelClaims.js';
const NO_CHANNEL = 'NO_CHANNEL';
@ -29,4 +29,4 @@ const channelClaims = ({ ip, originalUrl, body, params }, res) => {
});
};
module.exports = channelClaims;
export default channelClaims;

View file

@ -1,18 +1,25 @@
const db = require('server/models');
const chainquery = require('chainquery').default;
import db from 'server/models';
import chainquery from 'chainquery';
const getChannelData = async (channelName, channelClaimId) => {
let longChannelClaimId = await chainquery.claim.queries.getLongClaimId(channelName, channelClaimId).catch(() => false);
let longChannelClaimId = await chainquery.claim.queries
.getLongClaimId(channelName, channelClaimId)
.catch(() => false);
if (!longChannelClaimId) {
// Allow an error to throw here if this fails
longChannelClaimId = await db.Certificate.getLongChannelId(channelName, channelClaimId);
}
let shortChannelClaimId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(longChannelClaimId, channelName).catch(() => false);
let shortChannelClaimId = await chainquery.claim.queries
.getShortClaimIdFromLongClaimId(longChannelClaimId, channelName)
.catch(() => false);
if (!shortChannelClaimId) {
shortChannelClaimId = await db.Certificate.getShortChannelIdFromLongChannelId(longChannelClaimId, channelName);
shortChannelClaimId = await db.Certificate.getShortChannelIdFromLongChannelId(
longChannelClaimId,
channelName
);
}
return {
@ -22,4 +29,4 @@ const getChannelData = async (channelName, channelClaimId) => {
};
};
module.exports = getChannelData;
export default getChannelData;

View file

@ -1,7 +1,8 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const getChannelData = require('./getChannelData.js');
const isApprovedChannel = require('../../../../../utils/isApprovedChannel');
const { publishing: { serveOnlyApproved, approvedChannels } } = require('@config/siteConfig');
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import getChannelData from './getChannelData.js';
import isApprovedChannel from '@globalutils/isApprovedChannel';
import { publishing } from '@config/siteConfig';
const { serveOnlyApproved, approvedChannels } = publishing;
const NO_CHANNEL = 'NO_CHANNEL';
const LONG_ID = 'longId';
@ -20,7 +21,9 @@ const channelData = ({ ip, originalUrl, body, params }, res) => {
if (channelClaimId === 'none') channelClaimId = null;
const chanObj = {};
if (channelName) chanObj.name = channelName;
if (channelClaimId) chanObj[(channelClaimId.length === LONG_CLAIM_LENGTH ? LONG_ID : SHORT_ID)] = channelClaimId;
if (channelClaimId) {
chanObj[channelClaimId.length === LONG_CLAIM_LENGTH ? LONG_ID : SHORT_ID] = channelClaimId;
}
if (serveOnlyApproved && !isApprovedChannel(chanObj, approvedChannels)) {
return res.status(404).json({
success: false,
@ -46,4 +49,4 @@ const channelData = ({ ip, originalUrl, body, params }, res) => {
});
};
module.exports = channelData;
export default channelData;

View file

@ -1,6 +1,6 @@
const { handleErrorResponse } = require('server/controllers/utils/errorHandlers.js');
const db = require('server/models');
const chainquery = require('chainquery').default;
import { handleErrorResponse } from 'server/controllers/utils/errorHandlers.js';
import db from 'server/models';
import chainquery from 'chainquery';
/*
@ -10,7 +10,9 @@ route to get a short channel id from long channel Id
const channelShortIdRoute = async ({ ip, originalUrl, params }, res) => {
try {
let shortId = await chainquery.claim.queries.getShortClaimIdFromLongClaimId(params.longId, params.name).catch(() => false);
let shortId = await chainquery.claim.queries
.getShortClaimIdFromLongClaimId(params.longId, params.name)
.catch(() => false);
if (!shortId) {
shortId = await db.Certificate.getShortChannelIdFromLongChannelId(params.longId, params.name);
@ -22,4 +24,4 @@ const channelShortIdRoute = async ({ ip, originalUrl, params }, res) => {
}
};
module.exports = channelShortIdRoute;
export default channelShortIdRoute;

View file

@ -1,9 +1,9 @@
const logger = require('winston');
const db = require('server/models');
const chainquery = require('chainquery').default;
const { abandonClaim } = require('server/lbrynet');
const deleteFile = require('../publish/deleteFile.js');
const authenticateUser = require('../publish/authentication.js');
import logger from 'winston';
import db from 'server/models';
import chainquery from 'chainquery';
import { abandonClaim } from 'server/lbrynet';
import deleteFile from '../publish/deleteFile.js';
import authenticateUser from '../publish/authentication.js';
/*
route to abandon a claim through the daemon
@ -47,4 +47,4 @@ const claimAbandon = async (req, res) => {
}
};
module.exports = claimAbandon;
export default claimAbandon;

View file

@ -1,9 +1,11 @@
const chainquery = require('chainquery').default;
const { publishing: { primaryClaimAddress, additionalClaimAddresses } } = require('@config/siteConfig');
const Sequelize = require('sequelize');
import chainquery from 'chainquery';
import Sequelize from 'sequelize';
import { publishing } from '@config/siteConfig';
const { primaryClaimAddress, additionalClaimAddresses } = publishing;
const Op = Sequelize.Op;
const claimAvailability = async (name) => {
const claimAvailability = async name => {
const claimAddresses = additionalClaimAddresses || [];
claimAddresses.push(primaryClaimAddress);
// find any records where the name is used
@ -18,11 +20,11 @@ const claimAvailability = async (name) => {
},
})
.then(result => {
return (result.length <= 0);
return result.length <= 0;
})
.catch(error => {
throw error;
});
};
module.exports = claimAvailability;
export default claimAvailability;

View file

@ -1,6 +1,6 @@
const checkClaimAvailability = require('./checkClaimAvailability.js');
const { sendGATimingEvent } = require('../../../../utils/googleAnalytics.js');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
import checkClaimAvailability from './checkClaimAvailability.js';
import { sendGATimingEvent } from '../../../../utils/googleAnalytics.js';
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
/*
@ -29,4 +29,4 @@ const claimAvailability = ({ ip, originalUrl, params: { name } }, res) => {
});
};
module.exports = claimAvailability;
export default claimAvailability;

View file

@ -1,7 +1,7 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const getClaimData = require('server/utils/getClaimData');
const fetchClaimData = require('server/utils/fetchClaimData');
const logger = require('winston');
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import getClaimData from 'server/utils/getClaimData';
import fetchClaimData from 'server/utils/fetchClaimData';
import logger from 'winston';
/*
route to return data for a claim
@ -28,4 +28,4 @@ const claimData = async ({ ip, originalUrl, body, params }, res) => {
}
};
module.exports = claimData;
export default claimData;

View file

@ -1,13 +1,13 @@
import { createFileRecordDataAfterGet } from 'server/models/utils/createFileRecordData.js';
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import getClaimData from 'server/utils/getClaimData';
import chainquery from 'chainquery';
import db from 'server/models';
import logger from 'winston';
import awaitFileSize from 'server/utils/awaitFileSize';
import isBot from 'isbot';
import publishCache from 'server/utils/publishCache';
const { getClaim, resolveUri } = require('server/lbrynet');
const { createFileRecordDataAfterGet } = require('server/models/utils/createFileRecordData.js');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const getClaimData = require('server/utils/getClaimData');
const chainquery = require('chainquery').default;
const db = require('server/models');
const logger = require('winston');
const awaitFileSize = require('server/utils/awaitFileSize');
const isBot = require('isbot');
const publishCache = require('server/utils/publishCache');
const RETRY_MS = 250;
const TIMEOUT_MS = 15000;
@ -83,4 +83,4 @@ const claimGet = async ({ ip, originalUrl, params, headers }, res) => {
handleErrorResponse(originalUrl, ip, error, res);
}
};
module.exports = claimGet;
export default claimGet;

View file

@ -1,5 +1,5 @@
const { getClaimList } = require('../../../../lbrynet');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
import { getClaimList } from 'server/lbrynet';
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
/*
@ -17,4 +17,4 @@ const claimList = ({ ip, originalUrl, params }, res) => {
});
};
module.exports = claimList;
export default claimList;

View file

@ -1,10 +1,10 @@
const db = require('server/models');
const chainquery = require('chainquery').default;
const logger = require('winston');
const publishCache = require('server/utils/publishCache');
const { handleErrorResponse } = require('server/controllers/utils/errorHandlers.js');
import db from 'server/models';
import chainquery from 'chainquery';
import logger from 'winston';
import publishCache from 'server/utils/publishCache';
import { handleErrorResponse } from 'server/controllers/utils/errorHandlers.js';
const getClaimId = require('server/controllers/utils/getClaimId.js');
import getClaimId from 'server/controllers/utils/getClaimId.js';
const NO_CHANNEL = 'NO_CHANNEL';
const NO_CLAIM = 'NO_CLAIM';
@ -72,4 +72,4 @@ const claimLongId = ({ ip, originalUrl, body, params }, res) => {
});
};
module.exports = claimLongId;
export default claimLongId;

View file

@ -1,5 +1,5 @@
const logger = require('winston');
const db = require('../../../../models');
import logger from 'winston';
import db from 'server/models';
const authenticateChannelCredentials = (channelName, channelId, userPassword) => {
return new Promise((resolve, reject) => {
@ -10,8 +10,7 @@ const authenticateChannelCredentials = (channelName, channelId, userPassword) =>
if (channelName) channelFindParams['channelName'] = channelName;
if (channelId) channelFindParams['channelClaimId'] = channelId;
// find the channel
db.Channel
.findOne({
db.Channel.findOne({
where: channelFindParams,
})
.then(channel => {
@ -81,4 +80,4 @@ const authenticateUser = (channelName, channelId, channelPassword, user) => {
});
};
module.exports = authenticateUser;
export default authenticateUser;

View file

@ -1,5 +1,5 @@
const logger = require('winston');
const { details, publishing } = require('@config/siteConfig');
import logger from 'winston';
import { details, publishing } from '@config/siteConfig';
const createPublishParams = (
filePath,
name,
@ -59,4 +59,4 @@ const createPublishParams = (
return publishParams;
};
module.exports = createPublishParams;
export default createPublishParams;

View file

@ -1,5 +1,5 @@
const logger = require('winston');
const { details, publishing } = require('@config/siteConfig');
import logger from 'winston';
import { details, publishing } from '@config/siteConfig';
const createThumbnailPublishParams = (thumbnailFilePath, claimName, license, licenseUrl, nsfw) => {
if (!thumbnailFilePath) {
@ -26,4 +26,4 @@ const createThumbnailPublishParams = (thumbnailFilePath, claimName, license, lic
};
};
module.exports = createThumbnailPublishParams;
export default createThumbnailPublishParams;

View file

@ -1,7 +1,7 @@
const logger = require('winston');
const fs = require('fs');
import logger from 'winston';
import fs from 'fs';
const deleteFile = (filePath) => {
const deleteFile = filePath => {
fs.unlink(filePath, err => {
if (err) {
return logger.error(`error deleting temporary file ${filePath}`);
@ -10,4 +10,4 @@ const deleteFile = (filePath) => {
});
};
module.exports = deleteFile;
export default deleteFile;

View file

@ -1,30 +1,33 @@
const logger = require('winston');
import logger from 'winston';
import { sendGATimingEvent } from 'server/utils/googleAnalytics.js';
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import checkClaimAvailability from '../availability/checkClaimAvailability.js';
import publish from './publish.js';
import createPublishParams from './createPublishParams.js';
import createThumbnailPublishParams from './createThumbnailPublishParams.js';
import parsePublishApiRequestBody from './parsePublishApiRequestBody.js';
import parsePublishApiRequestFiles from './parsePublishApiRequestFiles.js';
import authenticateUser from './authentication.js';
import chainquery from 'chainquery';
import publishCache from 'server/utils/publishCache';
import isApprovedChannel from '@globalutils/isApprovedChannel';
import { details, publishing } from '@config/siteConfig';
import createCanonicalLink from '@globalutils/createCanonicalLink';
const { host } = details;
const {
details: { host },
publishing: { disabled, disabledMessage },
} = require('@config/siteConfig');
const { sendGATimingEvent } = require('server/utils/googleAnalytics.js');
const isApprovedChannel = require('@globalutils/isApprovedChannel');
const {
publishing: { publishOnlyApproved, approvedChannels, thumbnailChannel, thumbnailChannelId },
} = require('@config/siteConfig');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const checkClaimAvailability = require('../availability/checkClaimAvailability.js');
const publish = require('./publish.js');
const createPublishParams = require('./createPublishParams.js');
const createThumbnailPublishParams = require('./createThumbnailPublishParams.js');
const parsePublishApiRequestBody = require('./parsePublishApiRequestBody.js');
const parsePublishApiRequestFiles = require('./parsePublishApiRequestFiles.js');
const authenticateUser = require('./authentication.js');
const chainquery = require('chainquery').default;
const createCanonicalLink = require('@globalutils/createCanonicalLink');
const publishCache = require('server/utils/publishCache');
disabled,
disabledMessage,
publishOnlyApproved,
approvedChannels,
thumbnailChannel,
thumbnailChannelId,
} = publishing;
const CLAIM_TAKEN = 'CLAIM_TAKEN';
const UNAPPROVED_CHANNEL = 'UNAPPROVED_CHANNEL';
@ -218,4 +221,4 @@ const claimPublish = ({ body, files, headers, ip, originalUrl, user, tor }, res)
});
};
module.exports = claimPublish;
export default claimPublish;

View file

@ -36,4 +36,4 @@ const parsePublishApiRequestBody = ({
};
};
module.exports = parsePublishApiRequestBody;
export default parsePublishApiRequestBody;

View file

@ -1,4 +1,4 @@
const chai = require('chai');
import chai from 'chai';
const expect = chai.expect;
describe('#parsePublishApiRequestBody()', function() {

View file

@ -1,6 +1,6 @@
const path = require('path');
const validateFileTypeAndSize = require('./validateFileTypeAndSize.js');
const validateFileForPublish = require('./validateFileForPublish.js');
import path from 'path';
import validateFileTypeAndSize from './validateFileTypeAndSize.js';
import validateFileForPublish from './validateFileForPublish.js';
const parsePublishApiRequestFiles = ({ file, thumbnail }, isUpdate) => {
// make sure a file was provided
@ -59,4 +59,4 @@ const parsePublishApiRequestFiles = ({ file, thumbnail }, isUpdate) => {
return obj;
};
module.exports = parsePublishApiRequestFiles;
export default parsePublishApiRequestFiles;

View file

@ -1,4 +1,4 @@
const chai = require('chai');
import chai from 'chai';
const expect = chai.expect;
describe('#parsePublishApiRequestFiles()', function() {

View file

@ -1,11 +1,9 @@
const logger = require('winston');
const db = require('../../../../models');
const { publishClaim } = require('../../../../lbrynet');
const { createFileRecordDataAfterPublish } = require('server/models/utils/createFileRecordData.js');
const {
createClaimRecordDataAfterPublish,
} = require('server/models/utils/createClaimRecordData.js');
const deleteFile = require('./deleteFile.js');
import logger from 'winston';
import db from '../../../../models';
import { publishClaim } from '../../../../lbrynet';
import { createFileRecordDataAfterPublish } from '../../../../models/utils/createFileRecordData.js';
import { createClaimRecordDataAfterPublish } from '../../../../models/utils/createClaimRecordData.js';
import deleteFile from './deleteFile.js';
const publish = async (publishParams, fileName, fileType) => {
let publishResults;
@ -80,5 +78,4 @@ const publish = async (publishParams, fileName, fileType) => {
};
}
};
module.exports = publish;
export default publish;

View file

@ -1,6 +1,6 @@
const logger = require('winston');
import logger from 'winston';
const { publishing } = require('@config/siteConfig.json');
import { publishing } from '@config/siteConfig.json';
const { fileSizeLimits } = publishing;
@ -35,4 +35,4 @@ const validateFileForPublish = file => {
return file;
};
module.exports = validateFileForPublish;
export default validateFileForPublish;

View file

@ -1,4 +1,4 @@
const logger = require('winston');
import logger from 'winston';
const {
publishing: { maxSizeImage = 10000000, maxSizeGif = 50000000, maxSizeVideo = 50000000 },
@ -41,4 +41,4 @@ const validateFileTypeAndSize = file => {
return file;
};
module.exports = validateFileTypeAndSize;
export default validateFileTypeAndSize;

View file

@ -1,5 +1,5 @@
const { resolveUri } = require('../../../../lbrynet/index');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
import { resolveUri } from 'server/lbrynet/index';
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
/*
@ -17,4 +17,4 @@ const claimResolve = ({ headers, ip, originalUrl, params }, res) => {
});
};
module.exports = claimResolve;
export default claimResolve;

View file

@ -1,5 +1,5 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const chainquery = require('chainquery').default;
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import chainquery from 'chainquery';
/*
@ -22,4 +22,4 @@ const claimShortId = async ({ ip, originalUrl, body, params }, res) => {
}
};
module.exports = claimShortId;
export default claimShortId;

View file

@ -1,21 +1,20 @@
const logger = require('winston');
const db = require('server/models');
const {
details,
publishing: { disabled, disabledMessage, primaryClaimAddress },
} = require('@config/siteConfig');
const { resolveUri } = require('server/lbrynet');
const { sendGATimingEvent } = require('../../../../utils/googleAnalytics.js');
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const publish = require('../publish/publish.js');
const parsePublishApiRequestBody = require('../publish/parsePublishApiRequestBody');
const parsePublishApiRequestFiles = require('../publish/parsePublishApiRequestFiles.js');
const authenticateUser = require('../publish/authentication.js');
const createThumbnailPublishParams = require('../publish/createThumbnailPublishParams.js');
const chainquery = require('chainquery').default;
const createCanonicalLink = require('@globalutils/createCanonicalLink');
const { getFileListFileByOutpoint } = require('server/lbrynet');
const publishCache = require('server/utils/publishCache');
import logger from 'winston';
import { sendGATimingEvent } from '@serverutils/googleAnalytics.js';
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import publish from '../publish/publish.js';
import parsePublishApiRequestBody from '../publish/parsePublishApiRequestBody';
import parsePublishApiRequestFiles from '../publish/parsePublishApiRequestFiles.js';
import authenticateUser from '../publish/authentication.js';
import createThumbnailPublishParams from '../publish/createThumbnailPublishParams.js';
import chainquery from 'chainquery';
import { getFileListFileByOutpoint } from 'server/lbrynet';
import publishCache from 'server/utils/publishCache';
import createCanonicalLink from '@globalutils/createCanonicalLink';
import isApprovedChannel from '@globalutils/isApprovedChannel';
import { details, publishing } from '@config/siteConfig';
const { disabled, disabledMessage, primaryClaimAddress } = publishing;
//, approvedChannels, thumbnailChannel, thumbnailChannelId
/*
route to update a claim through the daemon
@ -242,4 +241,4 @@ const claimUpdate = ({ body, files, headers, ip, originalUrl, user, tor }, res)
});
};
module.exports = claimUpdate;
export default claimUpdate;

View file

@ -1,5 +1,5 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const db = require('server/models');
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import db from 'server/models';
/*
@ -25,4 +25,4 @@ const claimViews = async ({ ip, originalUrl, body, params }, res) => {
}
};
module.exports = claimViews;
export default claimViews;

View file

@ -1,4 +1,5 @@
const { publishing: {
const {
publishing: {
primaryClaimAddress,
uploadDirectory,
thumbnailChannel,
@ -6,7 +7,8 @@ const { publishing: {
additionalClaimAddresses,
disabled,
disabledMessage,
} } = require('@config/siteConfig');
},
} = require('@config/siteConfig');
/*
@ -26,4 +28,4 @@ const publishingConfig = (req, res) => {
});
};
module.exports = publishingConfig;
export default publishingConfig;

View file

@ -1,10 +1,10 @@
const logger = require('winston');
import logger from 'winston';
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const { getFileListFileByOutpoint } = require('server/lbrynet');
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import { getFileListFileByOutpoint } from 'server/lbrynet';
const chainquery = require('chainquery').default;
const publishCache = require('server/utils/publishCache');
import chainquery from 'chainquery';
import publishCache from 'server/utils/publishCache';
/*
@ -37,4 +37,4 @@ const fileAvailability = async ({ ip, originalUrl, params }, res) => {
}
};
module.exports = fileAvailability;
export default fileAvailability;

View file

@ -1,16 +1,13 @@
const db = require('../../../../models');
import db from 'server/models';
const getChannelData = (channelName, channelClaimId) => {
return new Promise((resolve, reject) => {
let longChannelClaimId;
// 1. get the long channel Id (make sure channel exists)
db.Certificate
.getLongChannelId(channelName, channelClaimId)
db.Certificate.getLongChannelId(channelName, channelClaimId)
.then(fullClaimId => {
longChannelClaimId = fullClaimId;
return db
.Certificate
.getShortChannelIdFromLongChannelId(fullClaimId, channelName);
return db.Certificate.getShortChannelIdFromLongChannelId(fullClaimId, channelName);
})
.then(shortChannelClaimId => {
resolve({
@ -25,4 +22,4 @@ const getChannelData = (channelName, channelClaimId) => {
});
};
module.exports = getChannelData;
export default getChannelData;

View file

@ -1,6 +1,6 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
const getChannelData = require('./getChannelData.js');
import getChannelData from './getChannelData.js';
const NO_CHANNEL = 'NO_CHANNEL';
@ -32,4 +32,4 @@ const channelData = ({ ip, originalUrl, body, params }, res) => {
});
};
module.exports = channelData;
export default channelData;

View file

@ -1,11 +1,10 @@
const logger = require('winston');
const db = require('../../../models');
const getClaimId = require('../../utils/getClaimId');
const publishCache = require('server/utils/publishCache');
import logger from 'winston';
import db from 'server/models';
import getClaimId from '../../utils/getClaimId';
import publishCache from 'server/utils/publishCache';
const {
details: { host, title: siteTitle },
} = require('@config/siteConfig');
import { details } from '@config/siteConfig';
const { host, title: siteTitle } = details;
const getOEmbedDataForAsset = (channelName, channelClaimId, claimName, claimId) => {
let fileData, claimData;
@ -66,4 +65,4 @@ const getOEmbedDataForAsset = (channelName, channelClaimId, claimName, claimId)
});
};
module.exports = getOEmbedDataForAsset;
export default getOEmbedDataForAsset;

View file

@ -1,21 +1,14 @@
const db = require('../../../models');
const {
details: {
host,
title: siteTitle,
},
} = require('@config/siteConfig');
import db from 'server/models';
import { details } from '@config/siteConfig';
const { host, title: siteTitle } = details;
const getOEmbedDataForChannel = (channelName, channelClaimId) => {
return db.Certificate
.findOne({
return db.Certificate.findOne({
where: {
name: channelName,
claimId: channelClaimId,
},
})
.then(certificateRecord => {
}).then(certificateRecord => {
const certificateData = certificateRecord.dataValues;
return {
version: 1.0,
@ -30,4 +23,4 @@ const getOEmbedDataForChannel = (channelName, channelClaimId) => {
});
};
module.exports = getOEmbedDataForChannel;
export default getOEmbedDataForChannel;

View file

@ -1,12 +1,14 @@
const logger = require('winston');
const lbryUri = require('../../../../utils/lbryUri');
import logger from 'winston';
import lbryUri from '@globalutils/lbryUri';
const getOEmbedDataForChannel = require('./getOEmbedDataForChannel');
const getOEmbedDataForAsset = require('./getOEmbedDataForAsset');
const parseSpeechUrl = require('./parseSpeechUrl');
import getOEmbedDataForChannel from './getOEmbedDataForChannel';
import getOEmbedDataForAsset from './getOEmbedDataForAsset';
import parseSpeechUrl from './parseSpeechUrl';
const getOEmbedData = (req, res) => {
const { query: { url, format } } = req;
const {
query: { url, format },
} = req;
logger.debug('req url', url);
logger.debug('req format', format);
@ -36,7 +38,7 @@ const getOEmbedData = (req, res) => {
return res.status(200).json(data);
}
})
.catch((error) => {
.catch(error => {
return res.status(404).json({
success: false,
message: error,
@ -54,7 +56,7 @@ const getOEmbedData = (req, res) => {
return res.status(200).json(data);
}
})
.catch((error) => {
.catch(error => {
return res.status(404).json({
success: false,
message: error,
@ -63,4 +65,4 @@ const getOEmbedData = (req, res) => {
}
};
module.exports = getOEmbedData;
export default getOEmbedData;

View file

@ -1,17 +1,10 @@
const logger = require('winston');
import logger from 'winston';
const parseSpeechUrl = (url) => {
const parseSpeechUrl = url => {
const componentsRegex = new RegExp(
'([^:/?#]+://)' +
'([^/?#]*)' +
'(/)' +
'([^/?#]*)' +
'(/)' +
'([^/?#]*)'
'([^:/?#]+://)' + '([^/?#]*)' + '(/)' + '([^/?#]*)' + '(/)' + '([^/?#]*)'
);
const [, , , , paramOne, , paramTwo] = componentsRegex
.exec(url)
.map(match => match || null);
const [, , , , paramOne, , paramTwo] = componentsRegex.exec(url).map(match => match || null);
logger.debug(`params from speech url: ${paramOne} ${paramTwo}`);
@ -21,4 +14,4 @@ const parseSpeechUrl = (url) => {
};
};
module.exports = parseSpeechUrl;
export default parseSpeechUrl;

View file

@ -1,6 +1,6 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const db = require('server/models');
const getClaimData = require('server/utils/getClaimData');
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import db from 'server/models';
import getClaimData from 'server/utils/getClaimData';
/*
@ -9,14 +9,11 @@ const getClaimData = require('server/utils/getClaimData');
*/
const channelClaims = async ({ ip, originalUrl, body, params }, res) => {
const {
name,
page,
} = params;
const { name, page } = params;
if (name === 'trending') {
const result = await db.Trending.getTrendingClaims();
const claims = await Promise.all(result.map((claim) => getClaimData(claim)));
const claims = await Promise.all(result.map(claim => getClaimData(claim)));
return res.status(200).json({
success: true,
data: {
@ -39,4 +36,4 @@ const channelClaims = async ({ ip, originalUrl, body, params }, res) => {
handleErrorResponse(originalUrl, ip, 'Feature endpoint not found', res);
};
module.exports = channelClaims;
export default channelClaims;

View file

@ -1,5 +1,5 @@
const logger = require('winston');
const db = require('../../../models');
import logger from 'winston';
import db from 'server/models';
/*
@ -13,7 +13,7 @@ const getTorList = (req, res) => {
logger.debug('number of records', result.length);
res.status(200).json(result);
})
.catch((error) => {
.catch(error => {
logger.error(error);
res.status(500).json({
success: false,
@ -22,4 +22,4 @@ const getTorList = (req, res) => {
});
};
module.exports = getTorList;
export default getTorList;

View file

@ -1,7 +1,7 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const logger = require('winston');
const db = require('../../../../models');
const { masterPassword } = require('@private/authConfig.json');
import { handleErrorResponse } from '../../../utils/errorHandlers.js';
import logger from 'winston';
import db from 'server/models';
import { masterPassword } from '@private/authConfig.json';
/*
route to update a password
@ -22,7 +22,8 @@ const updateUserPassword = ({ ip, originalUrl, body }, res) => {
if (!userName || !oldPassword || !newPassword) {
return res.status(400).json({
success: false,
message: 'body should include userName (channel name without the @), oldPassword, & newPassword',
message:
'body should include userName (channel name without the @), oldPassword, & newPassword',
});
}
@ -60,9 +61,9 @@ const updateUserPassword = ({ ip, originalUrl, body }, res) => {
newPassword,
});
})
.catch((error) => {
.catch(error => {
handleErrorResponse(originalUrl, ip, error, res);
});
};
module.exports = updateUserPassword;
export default updateUserPassword;

View file

@ -1,5 +1,5 @@
const { sendGAServeEvent } = require('../../../utils/googleAnalytics');
const getClaimIdAndServeAsset = require('../utils/getClaimIdAndServeAsset.js');
import { sendGAServeEvent } from '@serverutils/googleAnalytics';
import getClaimIdAndServeAsset from '../utils/getClaimIdAndServeAsset.js';
/*
@ -14,4 +14,4 @@ const serveAsset = ({ headers, ip, originalUrl, params: { claimName, claimId } }
getClaimIdAndServeAsset(null, null, claimName, claimId, originalUrl, ip, res, headers);
};
module.exports = serveAsset;
export default serveAsset;

View file

@ -1,14 +1,14 @@
const logger = require('winston');
import logger from 'winston';
const { sendGAServeEvent } = require('../../../utils/googleAnalytics');
const handleShowRender = require('../../../render/handleShowRender').default;
import { sendGAServeEvent } from '@serverutils/googleAnalytics';
import handleShowRender from 'server/render/handleShowRender';
const lbryUri = require('../../../../utils/lbryUri.js');
import lbryUri from '@globalutils/lbryUri.js';
const determineRequestType = require('../utils/determineRequestType.js');
const getClaimIdAndServeAsset = require('../utils/getClaimIdAndServeAsset.js');
import determineRequestType from '../utils/determineRequestType.js';
import getClaimIdAndServeAsset from '../utils/getClaimIdAndServeAsset.js';
const { SHOW } = require('../constants/request_types.js');
import { SHOW } from '../constants/request_types.js';
/*
@ -45,4 +45,4 @@ const serveByClaim = (req, res) => {
}
};
module.exports = serveByClaim;
export default serveByClaim;

View file

@ -1,15 +1,15 @@
const logger = require('winston');
import logger from 'winston';
const { sendGAServeEvent } = require('../../../utils/googleAnalytics');
const handleShowRender = require('../../../render/handleShowRender').default;
import { sendGAServeEvent } from 'server/utils/googleAnalytics';
import handleShowRender from 'server/render/handleShowRender';
const lbryUri = require('../../../../utils/lbryUri.js');
import lbryUri from '@globalutils/lbryUri.js';
const determineRequestType = require('../utils/determineRequestType.js');
const getClaimIdAndServeAsset = require('../utils/getClaimIdAndServeAsset.js');
const flipClaimNameAndId = require('../utils/flipClaimNameAndId.js');
import determineRequestType from '../utils/determineRequestType.js';
import getClaimIdAndServeAsset from '../utils/getClaimIdAndServeAsset.js';
import flipClaimNameAndId from '../utils/flipClaimNameAndId.js';
const { SHOW } = require('../constants/request_types.js');
import { SHOW } from '../constants/request_types.js';
/*
@ -30,7 +30,9 @@ const serverByIdentifierAndClaim = (req, res) => {
}
({ claimName } = lbryUri.parseClaim(params.claim));
({ isChannel, channelName, channelClaimId, claimId } = lbryUri.parseIdentifier(params.identifier));
({ isChannel, channelName, channelClaimId, claimId } = lbryUri.parseIdentifier(
params.identifier
));
if (!isChannel) {
[claimId, claimName] = flipClaimNameAndId(claimId, claimName);
@ -47,7 +49,16 @@ const serverByIdentifierAndClaim = (req, res) => {
claimId,
});
getClaimIdAndServeAsset(channelName, channelClaimId, claimName, claimId, originalUrl, ip, res, headers);
getClaimIdAndServeAsset(
channelName,
channelClaimId,
claimName,
claimId,
originalUrl,
ip,
res,
headers
);
sendGAServeEvent(headers, ip, originalUrl);
} catch (error) {
@ -55,4 +66,4 @@ const serverByIdentifierAndClaim = (req, res) => {
}
};
module.exports = serverByIdentifierAndClaim;
export default serverByIdentifierAndClaim;

View file

@ -1,4 +1,4 @@
const { SERVE, SHOW } = require('../constants/request_types.js');
import { SERVE, SHOW } from '../constants/request_types.js';
function clientWantsAsset({ accept, range }) {
const imageIsWanted = accept && accept.match(/image\/.*/) && !accept.match(/text\/html/);
@ -13,4 +13,4 @@ const determineRequestType = (hasFileExtension, headers) => {
return SHOW;
};
module.exports = determineRequestType;
export default determineRequestType;

View file

@ -1,14 +1,14 @@
function isValidClaimId(claimId) {
return ((claimId.length === 40) && !/[^A-Za-z0-9]/g.test(claimId));
};
return claimId.length === 40 && !/[^A-Za-z0-9]/g.test(claimId);
}
function isValidShortId(claimId) {
return claimId.length === 1; // it should really evaluate the short url itself
};
}
function isValidShortIdOrClaimId(input) {
return (isValidClaimId(input) || isValidShortId(input));
};
return isValidClaimId(input) || isValidShortId(input);
}
const flipClaimNameAndId = (identifier, name) => {
// this is a patch for backwards compatability with '/name/claimId' url format
@ -20,4 +20,4 @@ const flipClaimNameAndId = (identifier, name) => {
return [identifier, name];
};
module.exports = flipClaimNameAndId;
export default flipClaimNameAndId;

View file

@ -1,16 +1,20 @@
const logger = require('winston');
import logger from 'winston';
const db = require('server/models');
const chainquery = require('chainquery').default;
const isApprovedChannel = require('@globalutils/isApprovedChannel');
const { getFileListFileByOutpoint, getClaim } = require('server/lbrynet');
const getClaimId = require('../../utils/getClaimId.js');
const { handleErrorResponse } = require('../../utils/errorHandlers.js');
const awaitFileSize = require('server/utils/awaitFileSize');
const serveFile = require('./serveFile.js');
const parseQueryString = require('server/utils/parseQuerystring');
const publishCache = require('server/utils/publishCache');
const isBot = require('isbot');
import db from 'server/models';
import chainquery from 'chainquery';
import { getFileListFileByOutpoint, getClaim } from 'server/lbrynet';
import getClaimId from '../../utils/getClaimId.js';
import { handleErrorResponse } from '../../utils/errorHandlers.js';
import awaitFileSize from 'server/utils/awaitFileSize';
import serveFile from './serveFile.js';
import parseQueryString from 'server/utils/parseQuerystring';
import publishCache from 'server/utils/publishCache';
import isBot from 'isbot';
import isApprovedChannel from '@globalutils/isApprovedChannel';
import { publishing } from '@config/siteConfig';
const { serveOnlyApproved, approvedChannels } = publishing;
const NO_CHANNEL = 'NO_CHANNEL';
const NO_CLAIM = 'NO_CLAIM';
@ -22,10 +26,6 @@ const RETRY_MS = 250;
const TIMEOUT_MS = 15000;
const MIN_BYTES = 15000000;
const {
publishing: { serveOnlyApproved, approvedChannels },
} = require('@config/siteConfig');
const getClaimIdAndServeAsset = async (
channelName,
channelClaimId,
@ -125,4 +125,4 @@ const getClaimIdAndServeAsset = async (
}
};
module.exports = getClaimIdAndServeAsset;
export default getClaimIdAndServeAsset;

View file

@ -1,10 +1,9 @@
const db = require('../../../models');
import db from 'server/models';
const NO_FILE = 'NO_FILE';
const getLocalFileRecord = (claimId, name) => {
return db.File.findOne({where: {claimId, name}})
.then(file => {
return db.File.findOne({ where: { claimId, name } }).then(file => {
if (!file) {
return NO_FILE;
}
@ -12,4 +11,4 @@ const getLocalFileRecord = (claimId, name) => {
});
};
module.exports = getLocalFileRecord;
export default getLocalFileRecord;

View file

@ -1,4 +1,4 @@
const logger = require('winston');
import logger from 'winston';
const logRequestData = (responseType, claimName, channelName, claimId) => {
logger.debug('responseType ===', responseType);
@ -7,4 +7,4 @@ const logRequestData = (responseType, claimName, channelName, claimId) => {
logger.debug('claim id ===', claimId);
};
module.exports = logRequestData;
export default logRequestData;

View file

@ -1,10 +1,10 @@
const logger = require('winston');
const transformImage = require('./transformImage');
const parseQueryString = require('server/utils/parseQuerystring');
const isValidQueryObject = require('server/utils/isValidQueryObj');
const {
serving: { dynamicFileSizing },
} = require('@config/siteConfig');
import logger from 'winston';
import transformImage from './transformImage';
import parseQueryString from 'server/utils/parseQuerystring';
import isValidQueryObject from 'server/utils/isValidQueryObj';
import { serving } from '@config/siteConfig';
const { dynamicFileSizing } = serving;
const { enabled: dynamicEnabled } = dynamicFileSizing;
const serveFile = async (
@ -57,4 +57,4 @@ const serveFile = async (
}
};
module.exports = serveFile;
export default serveFile;

View file

@ -1,9 +1,9 @@
const gm = require('gm');
const logger = require('winston');
import gm from 'gm';
import logger from 'winston';
import { getImageHeightAndWidth } from '@serverutils/imageProcessing';
const imageMagick = gm.subClass({ imageMagick: true });
const { getImageHeightAndWidth } = require('../../../utils/imageProcessing');
module.exports = function transformImage(path, queryObj) {
export default function transformImage(path, queryObj) {
return new Promise((resolve, reject) => {
let { h: cHeight = null } = queryObj;
let { w: cWidth = null } = queryObj;
@ -39,7 +39,7 @@ module.exports = function transformImage(path, queryObj) {
reject(e);
}
});
};
}
function _cropCenter(path, cropWidth, cropHeight, originalWidth, originalHeight) {
let oAspect = originalWidth / originalHeight;

View file

@ -1,4 +1,4 @@
const speechPassport = require('../../../speechPassport/index');
import speechPassport from 'server/speechPassport/index';
const login = (req, res, next) => {
speechPassport.authenticate('local-login', (err, user, info) => {
@ -11,7 +11,7 @@ const login = (req, res, next) => {
message: info.message,
});
}
req.logIn(user, (err) => {
req.logIn(user, err => {
if (err) {
return next(err);
}
@ -25,4 +25,4 @@ const login = (req, res, next) => {
})(req, res, next);
};
module.exports = login;
export default login;

View file

@ -7,4 +7,4 @@ const logout = (req, res) => {
res.status(200).json(responseObject);
};
module.exports = logout;
export default logout;

View file

@ -7,4 +7,4 @@ const signup = (req, res) => {
});
};
module.exports = signup;
export default signup;

View file

@ -6,4 +6,4 @@ const user = (req, res) => {
res.status(200).json(responseObject);
};
module.exports = user;
export default user;

View file

@ -1,8 +1,6 @@
const {
assetDefaults: { thumbnail },
details: { host },
} = require('@config/siteConfig');
import { assetDefaults, details } from '@config/siteConfig';
const { thumbnail } = assetDefaults;
const { host } = details;
const padSizes = {
small: 'padSmall',
medium: 'padMedium',
@ -10,24 +8,24 @@ const padSizes = {
};
const argumentProcessors = {
'bottom': async (config) => {
bottom: async config => {
config.classNames.push('bottom');
},
'right': async (config) => {
right: async config => {
config.classNames.push('right');
},
'pad': async (config, val) => {
pad: async (config, val) => {
config.classNames.push(padSizes[val]);
},
'logoClaim': async (config, val) => {
logoClaim: async (config, val) => {
config.logoUrl = `${host}/${val}`;
},
'link': async (config, val) => {
link: async (config, val) => {
config.logoLink = val;
},
};
const parseLogoConfigParam = async (rawConfig) => {
const parseLogoConfigParam = async rawConfig => {
if (rawConfig) {
let parsedConfig = {
classNames: ['logoLink'],
@ -64,11 +62,7 @@ const parseLogoConfigParam = async (rawConfig) => {
};
const sendVideoEmbedPage = async ({ params }, res) => {
let {
claimId,
config,
name,
} = params;
let { claimId, config, name } = params;
// if channel then swap name and claimId for order
if (name[0] === '@' && name.includes(':')) {
@ -86,4 +80,4 @@ const sendVideoEmbedPage = async ({ params }, res) => {
res.status(200).render('embed', { host, claimId, name, logoConfig });
};
module.exports = sendVideoEmbedPage;
export default sendVideoEmbedPage;

View file

@ -1,14 +1,12 @@
const logger = require('winston');
import logger from 'winston';
module.exports = {
handleErrorResponse: function (originalUrl, ip, error, res) {
logger.error(`Error on ${originalUrl}`, module.exports.useObjectPropertiesIfNoKeys(error));
const [status, message] = module.exports.returnErrorMessageAndStatus(error);
res
.status(status)
.json(module.exports.createErrorResponsePayload(status, message));
},
returnErrorMessageAndStatus: function (error) {
export function handleErrorResponse(originalUrl, ip, error, res) {
logger.error(`Error on ${originalUrl}`, useObjectPropertiesIfNoKeys(error));
const [status, message] = returnErrorMessageAndStatus(error);
res.status(status).json(createErrorResponsePayload(status, message));
}
export function returnErrorMessageAndStatus(error) {
let status, message;
// check for daemon being turned off
if (error.code === 'ECONNREFUSED') {
@ -24,22 +22,23 @@ module.exports = {
}
}
return [status, message];
},
useObjectPropertiesIfNoKeys: function (err) {
}
export function useObjectPropertiesIfNoKeys(err) {
if (Object.keys(err).length === 0) {
let newErrorObject = {};
Object.getOwnPropertyNames(err).forEach((key) => {
Object.getOwnPropertyNames(err).forEach(key => {
newErrorObject[key] = err[key];
});
return newErrorObject;
}
return err;
},
createErrorResponsePayload (status, message) {
}
function createErrorResponsePayload(status, message) {
return {
status,
success: false,
message,
};
},
};
}

View file

@ -1,8 +1,8 @@
const logger = require('winston');
const db = require('../../models');
const chainquery = require('chainquery').default;
const publishCache = require('server/utils/publishCache');
const createCanonicalLink = require('@globalutils/createCanonicalLink');
import logger from 'winston';
import db from 'server/models';
import chainquery from 'chainquery';
import publishCache from 'server/utils/publishCache';
import createCanonicalLink from '@globalutils/createCanonicalLink';
const getClaimIdByChannel = async (channelName, channelClaimId, claimName) => {
logger.debug(`getClaimIdByChannel(${channelName}, ${channelClaimId}, ${claimName})`);
@ -36,4 +36,4 @@ const getClaimId = async (channelName, channelClaimId, name, claimId) => {
}
};
module.exports = getClaimId;
export default getClaimId;

View file

@ -1,7 +1,7 @@
const redirect = (route) => {
const redirect = route => {
return (req, res) => {
res.status(301).redirect(route);
};
};
module.exports = redirect;
export default redirect;

View file

@ -1,36 +1,33 @@
// load modules
const express = require('express');
const bodyParser = require('body-parser');
const expressHandlebars = require('express-handlebars');
const helmet = require('helmet');
const cookieSession = require('cookie-session');
const http = require('http');
const logger = require('winston');
const Path = require('path');
const httpContext = require('express-http-context');
import express from 'express';
import bodyParser from 'body-parser';
import expressHandlebars from 'express-handlebars';
import helmet from 'helmet';
import cookieSession from 'cookie-session';
import http from 'http';
import logger from 'winston';
import Path from 'path';
import httpContext from 'express-http-context';
// load local modules
const db = require('./models');
const requestLogger = require('./middleware/requestLogger');
const createDatabaseIfNotExists = require('./models/utils/createDatabaseIfNotExists');
const { getAccountBalance } = require('./lbrynet/index');
const configureLogging = require('./utils/configureLogging');
const configureSlack = require('./utils/configureSlack');
const { setupBlockList } = require('./utils/blockList');
const speechPassport = require('./speechPassport');
const processTrending = require('./utils/processTrending');
import db from './models';
import requestLogger from './middleware/requestLogger';
import createDatabaseIfNotExists from './models/utils/createDatabaseIfNotExists';
import { getAccountBalance } from './lbrynet/index';
import configureLogging from './utils/configureLogging';
import configureSlack from './utils/configureSlack';
import { setupBlockList } from './utils/blockList';
import speechPassport from './speechPassport';
import processTrending from './utils/processTrending';
const {
import {
logMetricsMiddleware,
setRouteDataInContextMiddleware,
} = require('./middleware/logMetricsMiddleware');
const {
details: { port: PORT, blockListEndpoint },
startup: { performChecks, performUpdates },
} = require('@config/siteConfig');
const { sessionKey } = require('@private/authConfig.json');
} from './middleware/logMetricsMiddleware';
import { sessionKey } from '@private/authConfig.json';
import { details, startup } from '@config/siteConfig';
const { port: PORT, blockListEndpoint } = details;
const { performChecks, performUpdates } = startup;
// configure.js doesn't handle new keys in config.json files yet. Make sure it doens't break.
let finalBlockListEndpoint;
@ -246,4 +243,4 @@ function Server() {
};
}
module.exports = Server;
export default Server;

View file

@ -1,14 +1,14 @@
const axios = require('axios');
const logger = require('winston');
const { apiHost, apiPort, getTimeout } = require('@config/lbryConfig');
const lbrynetUri = 'http://' + apiHost + ':' + apiPort;
const db = require('../models');
const { chooseGaLbrynetPublishLabel, sendGATimingEvent } = require('../utils/googleAnalytics.js');
const handleLbrynetResponse = require('./utils/handleLbrynetResponse.js');
const { publishing } = require('@config/siteConfig');
import axios from 'axios';
import logger from 'winston';
import db from 'server/models';
import { handleLbrynetResponse } from './utils/handleLbrynetResponse.js';
import { apiHost, apiPort, getTimeout } from '@config/lbryConfig';
import { chooseGaLbrynetPublishLabel, sendGATimingEvent } from '../utils/googleAnalytics.js';
import { publishing } from '@config/siteConfig';
module.exports = {
publishClaim(publishParams) {
const lbrynetUri = 'http://' + apiHost + ':' + apiPort;
export function publishClaim(publishParams) {
logger.debug(`lbryApi >> Publishing claim to "${publishParams.name}"`);
const gaStartTime = Date.now();
return new Promise((resolve, reject) => {
@ -31,8 +31,9 @@ module.exports = {
reject(error);
});
});
},
getClaim(uri) {
}
export function getClaim(uri) {
logger.debug(`lbryApi >> Getting Claim for "${uri}"`);
const gaStartTime = Date.now();
return new Promise((resolve, reject) => {
@ -52,8 +53,25 @@ module.exports = {
reject(error);
});
});
},
getFileListFileByOutpoint(outpoint) {
}
export async function abandonClaim({ claimId }) {
logger.debug(`lbryApi >> Abandon claim "${claimId}"`);
const gaStartTime = Date.now();
try {
const abandon = await axios.post(lbrynetUri, {
method: 'claim_abandon',
params: { claim_id: claimId },
});
sendGATimingEvent('lbrynet', 'abandonClaim', 'ABANDON_CLAIM', gaStartTime, Date.now());
return abandon.data;
} catch (error) {
logger.error(error);
return error;
}
}
export function getFileListFileByOutpoint(outpoint) {
logger.debug(`lbryApi >> Getting File_List for "${outpoint}"`);
const gaStartTime = Date.now();
return new Promise((resolve, reject) => {
@ -72,23 +90,9 @@ module.exports = {
reject(error);
});
});
},
async abandonClaim({ claimId }) {
logger.debug(`lbryApi >> Abandon claim "${claimId}"`);
const gaStartTime = Date.now();
try {
const abandon = await axios.post(lbrynetUri, {
method: 'claim_abandon',
params: { claim_id: claimId },
});
sendGATimingEvent('lbrynet', 'abandonClaim', 'ABANDON_CLAIM', gaStartTime, Date.now());
return abandon.data;
} catch (error) {
logger.error(error);
return error;
}
},
getClaimList(claimName) {
export function getClaimList(claimName) {
logger.debug(`lbryApi >> Getting claim_list for "${claimName}"`);
const gaStartTime = Date.now();
return new Promise((resolve, reject) => {
@ -105,8 +109,8 @@ module.exports = {
reject(error);
});
});
},
resolveUri(uri) {
}
export function resolveUri(uri) {
logger.debug(`lbryApi >> Resolving URI for "${uri}"`);
const gaStartTime = Date.now();
return new Promise((resolve, reject) => {
@ -135,8 +139,8 @@ module.exports = {
reject(error);
});
});
},
getDownloadDirectory() {
}
export function getDownloadDirectory() {
logger.debug('lbryApi >> Retrieving the download directory path from lbry daemon...');
const gaStartTime = Date.now();
return new Promise((resolve, reject) => {
@ -153,7 +157,7 @@ module.exports = {
Date.now()
);
if (data.result) {
resolve(data.result.download_dir);
resolve(data.result.download_directory);
} else {
return new Error(
'Successfully connected to lbry daemon, but unable to retrieve the download directory.'
@ -165,8 +169,8 @@ module.exports = {
resolve('/home/lbry/Downloads/');
});
});
},
createChannel(name) {
}
export function createChannel(name) {
logger.debug(`lbryApi >> Creating channel for ${name}...`);
const gaStartTime = Date.now();
return new Promise((resolve, reject) => {
@ -186,8 +190,8 @@ module.exports = {
reject(error);
});
});
},
getAccountBalance() {
}
export function getAccountBalance() {
const gaStartTime = Date.now();
return new Promise((resolve, reject) => {
axios
@ -195,18 +199,11 @@ module.exports = {
method: 'account_balance',
})
.then(response => {
sendGATimingEvent(
'lbrynet',
'getAccountBalance',
'SETTINGS_GET',
gaStartTime,
Date.now()
);
sendGATimingEvent('lbrynet', 'getAccountBalance', 'SETTINGS_GET', gaStartTime, Date.now());
handleLbrynetResponse(response, resolve, reject);
})
.catch(error => {
reject(error);
});
});
},
};
}

View file

@ -1,6 +1,6 @@
const logger = require('winston');
import logger from 'winston';
const handleLbrynetResponse = ({ data }, resolve, reject) => {
export const handleLbrynetResponse = ({ data }, resolve, reject) => {
logger.debug('lbry api data:', data);
if (data) {
// check for an error
@ -15,5 +15,3 @@ const handleLbrynetResponse = ({ data }, resolve, reject) => {
// fallback in case it just timed out
reject(JSON.stringify(data));
};
module.exports = handleLbrynetResponse;

View file

@ -1,9 +1,8 @@
const fs = require('fs');
import fs from 'fs';
const logger = require('winston');
const {
publishing: { publishingChannelWhitelist },
} = require('@config/siteConfig');
import logger from 'winston';
import { publishing } from '@config/siteConfig';
const { publishingChannelWhitelist } = publishing;
const ipBanFile = './site/config/ipBan.txt';
const forbiddenMessage =
'<h1>Forbidden</h1>If you are seeing this by mistake, please contact us using <a href="https://chat.lbry.com/">https://chat.lbry.com/</a>';
@ -23,7 +22,7 @@ if (fs.existsSync(ipBanFile)) {
});
}
const autoblockPublishMiddleware = (req, res, next) => {
export const autoblockPublishMiddleware = (req, res, next) => {
let ip = (req.headers['x-forwarded-for'] || req.connection.remoteAddress).split(/,\s?/)[0];
if (blockedAddresses.indexOf(ip) !== -1) {
@ -56,7 +55,7 @@ const autoblockPublishMiddleware = (req, res, next) => {
}
};
const autoblockPublishBodyMiddleware = (req, res, next) => {
export const autoblockPublishBodyMiddleware = (req, res, next) => {
if (req.body && publishingChannelWhitelist) {
let ip = (req.headers['x-forwarded-for'] || req.connection.remoteAddress).split(/,\s?/)[0];
const { channelName } = req.body;
@ -67,8 +66,3 @@ const autoblockPublishBodyMiddleware = (req, res, next) => {
}
next();
};
module.exports = {
autoblockPublishMiddleware,
autoblockPublishBodyMiddleware,
};

View file

@ -1,8 +1,8 @@
const logger = require('winston');
const db = require('../models');
const httpContext = require('express-http-context');
import logger from 'winston';
import db from '../models';
import httpContext from 'express-http-context';
function logMetricsMiddleware (req, res, next) {
export function logMetricsMiddleware(req, res, next) {
res.on('finish', () => {
const userAgent = req.get('user-agent');
const routePath = httpContext.get('routePath');
@ -43,15 +43,10 @@ function logMetricsMiddleware (req, res, next) {
next();
}
function setRouteDataInContextMiddleware (routePath, routeData) {
export function setRouteDataInContextMiddleware(routePath, routeData) {
return function(req, res, next) {
httpContext.set('routePath', routePath);
httpContext.set('routeData', routeData);
next();
};
}
module.exports = {
logMetricsMiddleware,
setRouteDataInContextMiddleware,
};

View file

@ -1,5 +1,6 @@
const multipart = require('connect-multiparty');
const { publishing: { uploadDirectory } } = require('@config/siteConfig');
import multipart from 'connect-multiparty';
import { publishing } from '@config/siteConfig';
const { uploadDirectory } = publishing;
const multipartMiddleware = multipart({ uploadDir: uploadDirectory });
module.exports = multipartMiddleware;
export default multipartMiddleware;

View file

@ -1,8 +1,8 @@
const logger = require('winston');
import logger from 'winston';
const requestLogger = (req, res, next) => { // custom logging middleware to log all incoming http requests
const requestLogger = (req, res, next) => {
// custom logging middleware to log all incoming http requests
logger.debug(`Request on ${req.originalUrl} from ${req.ip}`);
next();
};
module.exports = requestLogger;
export default requestLogger;

View file

@ -1,11 +1,10 @@
const logger = require('winston');
const db = require('../models');
import logger from 'winston';
import db from 'server/models';
const torCheck = (req, res, next) => {
const { ip } = req;
logger.debug(`tor check for: ${ip}`);
return db.Tor.findAll(
{
return db.Tor.findAll({
where: {
address: ip,
},
@ -16,7 +15,8 @@ const torCheck = (req, res, next) => {
logger.info('Tor request blocked:', ip);
const failureResponse = {
success: false,
message: 'Unfortunately this api route is not currently available for tor users. We are working on a solution that will allow tor users to use this endpoint in the future.',
message:
'Unfortunately this api route is not currently available for tor users. We are working on a solution that will allow tor users to use this endpoint in the future.',
};
res.status(403).json(failureResponse);
} else {
@ -27,5 +27,4 @@ const torCheck = (req, res, next) => {
logger.error(error);
});
};
module.exports = torCheck;
export default torCheck;

View file

@ -1,7 +1,7 @@
const logger = require('winston');
import logger from 'winston';
const BLOCKED_CLAIM = 'BLOCKED_CLAIM';
module.exports = (sequelize, { STRING }) => {
export default (sequelize, { STRING }) => {
const Blocked = sequelize.define(
'Blocked',
{
@ -18,8 +18,9 @@ module.exports = (sequelize, { STRING }) => {
Blocked.getBlockList = function() {
logger.debug('returning full block list');
return new Promise((resolve, reject) => {
this.findAll()
.then(list => { return resolve(list) });
this.findAll().then(list => {
return resolve(list);
});
});
};

View file

@ -1,17 +1,17 @@
const logger = require('winston');
const returnShortId = require('./utils/returnShortId.js');
import logger from 'winston';
import returnShortId from './utils/returnShortId';
const NO_CHANNEL = 'NO_CHANNEL';
function isLongChannelId(channelId) {
return (channelId && (channelId.length === 40));
return channelId && channelId.length === 40;
}
function isShortChannelId(channelId) {
return (channelId && (channelId.length < 40));
return channelId && channelId.length < 40;
}
module.exports = (sequelize, { STRING, BOOLEAN, INTEGER, TEXT, DECIMAL }) => {
export default (sequelize, { STRING, BOOLEAN, INTEGER, TEXT, DECIMAL }) => {
const Certificate = sequelize.define(
'Certificate',
{
@ -112,8 +112,7 @@ module.exports = (sequelize, { STRING, BOOLEAN, INTEGER, TEXT, DECIMAL }) => {
Certificate.getShortChannelIdFromLongChannelId = function(longChannelId, channelName) {
logger.debug(`getShortChannelIdFromLongChannelId ${channelName}:${longChannelId}`);
return new Promise((resolve, reject) => {
this
.findAll({
this.findAll({
where: { name: channelName },
order: [['height', 'ASC']],
})
@ -153,8 +152,7 @@ module.exports = (sequelize, { STRING, BOOLEAN, INTEGER, TEXT, DECIMAL }) => {
Certificate.getLongChannelIdFromShortChannelId = function(channelName, channelClaimId) {
logger.debug(`getLongChannelIdFromShortChannelId(${channelName}, ${channelClaimId})`);
return new Promise((resolve, reject) => {
this
.findAll({
this.findAll({
where: {
name: channelName,
claimId: {
@ -181,8 +179,7 @@ module.exports = (sequelize, { STRING, BOOLEAN, INTEGER, TEXT, DECIMAL }) => {
Certificate.getLongChannelIdFromChannelName = function(channelName) {
logger.debug(`getLongChannelIdFromChannelName(${channelName})`);
return new Promise((resolve, reject) => {
this
.findAll({
this.findAll({
where: { name: channelName },
order: [['effectiveAmount', 'DESC'], ['height', 'ASC']],
})

View file

@ -1,4 +1,4 @@
module.exports = (sequelize, { STRING }) => {
export default (sequelize, { STRING }) => {
const Channel = sequelize.define(
'Channel',
{

View file

@ -1,13 +1,10 @@
const logger = require('winston');
const returnShortId = require('./utils/returnShortId.js');
const isApprovedChannel = require('../../utils/isApprovedChannel');
const {
assetDefaults: { thumbnail: defaultThumbnail },
details: { host },
} = require('@config/siteConfig');
const {
publishing: { serveOnlyApproved, approvedChannels },
} = require('@config/siteConfig');
import logger from 'winston';
import returnShortId from './utils/returnShortId.js';
import isApprovedChannel from '@globalutils/isApprovedChannel';
import { assetDefaults, details, publishing } from '@config/siteConfig';
const { thumbnail: defaultThumbnail } = assetDefaults;
const { host } = details;
const { serveOnlyApproved, approvedChannels } = publishing;
const NO_CLAIM = 'NO_CLAIM';
@ -53,7 +50,7 @@ function isShortClaimId(claimId) {
return claimId && claimId.length < 40;
}
module.exports = (sequelize, { STRING, BOOLEAN, INTEGER, TEXT, DECIMAL }) => {
export default (sequelize, { STRING, BOOLEAN, INTEGER, TEXT, DECIMAL }) => {
const Claim = sequelize.define(
'Claim',
{

View file

@ -1,4 +1,4 @@
module.exports = (sequelize, { STRING, BOOLEAN, INTEGER }) => {
export default (sequelize, { STRING, BOOLEAN, INTEGER }) => {
const File = sequelize.define(
'File',
{

View file

@ -1,22 +1,18 @@
const Sequelize = require('sequelize');
const logger = require('winston');
import Sequelize from 'sequelize';
import logger from 'winston';
const Blocked = require('./blocked');
const Certificate = require('./certificate');
const Channel = require('./channel');
const Claim = require('./claim');
const File = require('./file');
const Metrics = require('./metrics');
const Tor = require('./tor');
const Trending = require('./trending');
const User = require('./user');
const Views = require('./views');
import Blocked from './blocked';
import Certificate from './certificate';
import Channel from './channel';
import Claim from './claim';
import File from './file';
import Metrics from './metrics';
import Tor from './tor';
import Trending from './trending';
import User from './user';
import Views from './views';
const {
database,
username,
password,
} = require('@config/mysqlConfig');
import { database, username, password } from '@config/mysqlConfig';
if (!database || !username || !password) {
logger.warn('missing database, user, or password from mysqlConfig');
@ -77,15 +73,16 @@ db.sequelize = sequelize;
db.Sequelize = Sequelize;
// add an 'upsert' method to the db object
db.upsert = (Model, values, condition, tableName) => {
return Model
.findOne({
return Model.findOne({
where: condition,
})
.then(obj => {
if (obj) { // update
if (obj) {
// update
logger.debug(`updating record in db.${tableName}`);
return obj.update(values);
} else { // insert
} else {
// insert
logger.debug(`creating record in db.${tableName}`);
return Model.create(values);
}
@ -96,4 +93,4 @@ db.upsert = (Model, values, condition, tableName) => {
});
};
module.exports = db;
export default db;

View file

@ -1,4 +1,4 @@
module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
export default (sequelize, { BOOLEAN, DATE, STRING }) => {
const Metrics = sequelize.define(
'Metrics',
{

View file

@ -1,7 +1,8 @@
const logger = require('winston');
const { details: { ipAddress } } = require('@config/siteConfig');
import logger from 'winston';
import { details } from '@config/siteConfig';
const { ipAddress } = details;
module.exports = (sequelize, { STRING }) => {
export default (sequelize, { STRING }) => {
const Tor = sequelize.define(
'Tor',
{

View file

@ -1,10 +1,11 @@
const chainquery = require('chainquery').default;
import chainquery from 'chainquery';
module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
export default (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
const Trending = sequelize.define(
'Trending',
{
time: { /* TODO: Historical analysis and log roll */
time: {
/* TODO: Historical analysis and log roll */
type: DATE(6),
defaultValue: sequelize.NOW,
},
@ -52,16 +53,15 @@ module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
}
);
Trending.getTrendingWeightData = async ({
hours = 2,
minutes = 0,
limit = 20,
} = {}) => {
Trending.getTrendingWeightData = async ({ hours = 2, minutes = 0, limit = 20 } = {}) => {
let time = new Date();
time.setHours(time.getHours() - hours);
time.setMinutes(time.getMinutes() - minutes);
const sqlTime = time.toISOString().slice(0, 19).replace('T', ' ');
const sqlTime = time
.toISOString()
.slice(0, 19)
.replace('T', ' ');
const selectString = 'DISTINCT(claimId), weight';
const whereString = `isChannel = false and time > '${sqlTime}'`;
@ -89,7 +89,7 @@ module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
},
});
return claimData.map((claimData) => {
return claimData.map(claimData => {
return Object.assign(trendingClaims[claimData.claim_id], claimData.dataValues);
});
};

View file

@ -1,8 +1,8 @@
'use strict';
const bcrypt = require('bcrypt');
const logger = require('winston');
import bcrypt from 'bcrypt';
import logger from 'winston';
module.exports = (sequelize, { STRING }) => {
export default (sequelize, { STRING }) => {
const User = sequelize.define(
'User',
{
@ -46,8 +46,7 @@ module.exports = (sequelize, { STRING }) => {
return;
}
// replace the current password with the new hash
this
.update({password: hash})
this.update({ password: hash })
.then(() => {
resolve();
})

View file

@ -1,26 +1,23 @@
const db = require('../index.js');
import db from 'server/models';
const createClaimRecordDataAfterPublish = (certificateId, channelName, fileName, fileType, publishParams, publishResults) => {
export const createClaimRecordDataAfterPublish = (
certificateId,
channelName,
fileName,
fileType,
publishParams,
publishResults
) => {
const {
name,
metadata: {
title,
description,
thumbnail,
nsfw,
},
metadata: { title, description, thumbnail, nsfw },
claim_address: address,
bid: amount,
} = publishParams;
const {
claim_id: claimId,
txid,
nout,
} = publishResults;
const { claim_id: claimId, txid, nout } = publishResults;
return db.Claim.getCurrentHeight()
.then(height => {
return db.Claim.getCurrentHeight().then(height => {
return {
name,
claimId,
@ -38,7 +35,3 @@ const createClaimRecordDataAfterPublish = (certificateId, channelName, fileName,
};
});
};
module.exports = {
createClaimRecordDataAfterPublish,
};

View file

@ -1,5 +1,5 @@
const Sequelize = require('sequelize');
const {database, username, password} = require('@config/mysqlConfig');
import Sequelize from 'sequelize';
import { database, username, password } from '@config/mysqlConfig';
const createDatabaseIfNotExists = () => {
const sequelize = new Sequelize('', username, password, {
@ -8,7 +8,8 @@ const createDatabaseIfNotExists = () => {
operatorsAliases: false,
});
return new Promise((resolve, reject) => {
sequelize.query(`CREATE DATABASE IF NOT EXISTS ${database};`)
sequelize
.query(`CREATE DATABASE IF NOT EXISTS ${database};`)
.then(() => {
resolve();
})
@ -18,4 +19,4 @@ const createDatabaseIfNotExists = () => {
});
};
module.exports = createDatabaseIfNotExists;
export default createDatabaseIfNotExists;

View file

@ -1,22 +1,11 @@
const getMediaDimensions = require('../../utils/getMediaDimensions.js');
import getMediaDimensions from 'server/utils/getMediaDimensions.js';
async function createFileRecordDataAfterGet (resolveResult, getResult) {
const {
name,
claimId,
outpoint,
contentType: fileType,
} = resolveResult;
export async function createFileRecordDataAfterGet(resolveResult, getResult) {
const { name, claimId, outpoint, contentType: fileType } = resolveResult;
const {
file_name: fileName,
download_path: filePath,
} = getResult;
const { file_name: fileName, download_path: filePath } = getResult;
const {
height: fileHeight,
width: fileWidth,
} = await getMediaDimensions(fileType, filePath);
const { height: fileHeight, width: fileWidth } = await getMediaDimensions(fileType, filePath);
return {
name,
@ -30,22 +19,17 @@ async function createFileRecordDataAfterGet (resolveResult, getResult) {
};
}
async function createFileRecordDataAfterPublish (fileName, fileType, publishParams, publishResults) {
const {
name,
file_path: filePath,
} = publishParams;
export async function createFileRecordDataAfterPublish(
fileName,
fileType,
publishParams,
publishResults
) {
const { name, file_path: filePath } = publishParams;
const {
claim_id: claimId,
txid,
nout,
} = publishResults;
const { claim_id: claimId, txid, nout } = publishResults;
const {
height: fileHeight,
width: fileWidth,
} = await getMediaDimensions(fileType, filePath);
const { height: fileHeight, width: fileWidth } = await getMediaDimensions(fileType, filePath);
return {
name,
@ -58,8 +42,3 @@ async function createFileRecordDataAfterPublish (fileName, fileType, publishPara
fileType,
};
}
module.exports = {
createFileRecordDataAfterGet,
createFileRecordDataAfterPublish,
};

View file

@ -16,10 +16,10 @@ const returnShortId = (claimsArray, longId) => {
shortIdLength += 1;
shortId = longId.substring(0, shortIdLength);
possibleMatches = possibleMatches.filter(element => {
return (element.claimId && (element.claimId.substring(0, shortIdLength) === shortId));
return element.claimId && element.claimId.substring(0, shortIdLength) === shortId;
});
}
return shortId;
};
module.exports = returnShortId;
export default returnShortId;

View file

@ -1,4 +1,4 @@
const chai = require('chai');
import chai from 'chai';
const expect = chai.expect;
describe('#parsePublishApiRequestBody()', function() {

View file

@ -5,7 +5,7 @@ const MAX_P_PRECISION = Math.exp(-16); // Rought estimation of V8 precision, -16
const MIN_P = -6.44357455534; // v8 float 0.0...0
const MAX_P = 6.44357455534; // v8 float 1.0...0
const getMean = (numArr) => {
const getMean = numArr => {
let total = 0;
let length = numArr.length; // store local to reduce potential prop lookups
@ -17,12 +17,10 @@ const getMean = (numArr) => {
};
const getStandardDeviation = (numArr, mean) => {
return Math.sqrt(numArr.reduce((sq, n) => (
sq + Math.pow(n - mean, 2)
), 0) / (numArr.length - 1));
return Math.sqrt(numArr.reduce((sq, n) => sq + Math.pow(n - mean, 2), 0) / (numArr.length - 1));
};
const getInformationFromValues = (numArr) => {
export const getInformationFromValues = numArr => {
let mean = getMean(numArr);
return {
@ -31,9 +29,10 @@ const getInformationFromValues = (numArr) => {
};
};
const getZScore = (value, mean, sDeviation) => (sDeviation !== 0 ? (value - mean) / sDeviation : 0);
export const getZScore = (value, mean, sDeviation) =>
sDeviation !== 0 ? (value - mean) / sDeviation : 0;
const getFastPValue = (zScore) => {
export const getFastPValue = zScore => {
if (zScore <= MIN_P) {
return 0;
}
@ -47,7 +46,10 @@ const getFastPValue = (zScore) => {
let term = 1;
while (Math.abs(term) > MAX_P_PRECISION) {
term = ONE_DIV_SQRT_2PI * Math.pow(-1, k) * Math.pow(zScore, k) / (2 * k + 1) / Math.pow(2, k) * Math.pow(zScore, k + 1) / factorialK;
term =
(((ONE_DIV_SQRT_2PI * Math.pow(-1, k) * Math.pow(zScore, k)) / (2 * k + 1) / Math.pow(2, k)) *
Math.pow(zScore, k + 1)) /
factorialK;
sum += term;
k++;
factorialK *= k;
@ -57,11 +59,4 @@ const getFastPValue = (zScore) => {
return sum;
};
const getWeight = (zScore, pValue) => (zScore * pValue);
module.exports = {
getInformationFromValues,
getZScore,
getFastPValue,
getWeight,
};
export const getWeight = (zScore, pValue) => zScore * pValue;

View file

@ -1,4 +1,4 @@
module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
export default (sequelize, { BOOLEAN, DATE, STRING }) => {
const Views = sequelize.define(
'Views',
{
@ -34,15 +34,15 @@ module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
}
);
Views.getUniqueViews = ({
hours = 0,
minutes = 30,
} = {}) => {
Views.getUniqueViews = ({ hours = 0, minutes = 30 } = {}) => {
let time = new Date();
time.setHours(time.getHours() - hours);
time.setMinutes(time.getMinutes() - minutes);
const sqlTime = time.toISOString().slice(0, 19).replace('T', ' ');
const sqlTime = time
.toISOString()
.slice(0, 19)
.replace('T', ' ');
const selectString = 'claimId, publisherId, isChannel, COUNT(DISTINCT ip) as views';
const groupString = 'claimId, publisherId, isChannel';
@ -53,7 +53,7 @@ module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
);
};
Views.getGetUniqueViewsbByClaimId = (claimId) => {
Views.getGetUniqueViewsbByClaimId = claimId => {
return Views.count({
where: {
claimId,

View file

@ -1,11 +1,11 @@
const md5File = require('md5-file');
const path = require('path');
import md5File from 'md5-file';
import path from 'path';
const bundlePath = path.resolve('./public/bundle/bundle.js');
const bundleHash = md5File.sync(bundlePath);
const shortBundleHash = bundleHash.substring(0, 4);
module.exports = (helmet, html, preloadedState) => {
export default (helmet, html, preloadedState) => {
// take the html and preloadedState and return the full page
return `
<!DOCTYPE html>

View file

@ -1,30 +1,33 @@
// middleware
const { autoblockPublishMiddleware, autoblockPublishBodyMiddleware } = require('../../middleware/autoblockPublishMiddleware');
const multipartMiddleware = require('../../middleware/multipartMiddleware');
const torCheckMiddleware = require('../../middleware/torCheckMiddleware');
import multipartMiddleware from 'server/middleware/multipartMiddleware';
import torCheckMiddleware from 'server/middleware/torCheckMiddleware';
// route handlers
const channelAvailability = require('../../controllers/api/channel/availability');
const channelClaims = require('../../controllers/api/channel/claims');
const channelData = require('../../controllers/api/channel/data');
const channelShortId = require('../../controllers/api/channel/shortId');
const claimAvailability = require('../../controllers/api/claim/availability');
const claimData = require('../../controllers/api/claim/data/');
const claimGet = require('../../controllers/api/claim/get');
const claimList = require('../../controllers/api/claim/list');
const claimLongId = require('../../controllers/api/claim/longId');
const claimPublish = require('../../controllers/api/claim/publish');
const claimAbandon = require('../../controllers/api/claim/abandon');
const claimUpdate = require('../../controllers/api/claim/update');
const claimResolve = require('../../controllers/api/claim/resolve');
const claimShortId = require('../../controllers/api/claim/shortId');
const claimViews = require('../../controllers/api/claim/views');
const fileAvailability = require('../../controllers/api/file/availability');
const specialClaims = require('../../controllers/api/special/claims');
const userPassword = require('../../controllers/api/user/password');
const publishingConfig = require('../../controllers/api/config/site/publishing');
const getTorList = require('../../controllers/api/tor');
const getBlockedList = require('../../controllers/api/blocked');
const getOEmbedData = require('../../controllers/api/oEmbed');
import channelAvailability from 'server/controllers/api/channel/availability';
import channelClaims from 'server/controllers/api/channel/claims';
import channelData from 'server/controllers/api/channel/data';
import channelShortId from 'server/controllers/api/channel/shortId';
import claimAvailability from 'server/controllers/api/claim/availability';
import claimData from 'server/controllers/api/claim/data/';
import claimGet from 'server/controllers/api/claim/get';
import claimList from 'server/controllers/api/claim/list';
import claimLongId from 'server/controllers/api/claim/longId';
import claimPublish from 'server/controllers/api/claim/publish';
import claimAbandon from 'server/controllers/api/claim/abandon';
import claimUpdate from 'server/controllers/api/claim/update';
import claimResolve from 'server/controllers/api/claim/resolve';
import claimShortId from 'server/controllers/api/claim/shortId';
import claimViews from 'server/controllers/api/claim/views';
import fileAvailability from 'server/controllers/api/file/availability';
import specialClaims from 'server/controllers/api/special/claims';
import userPassword from 'server/controllers/api/user/password';
import publishingConfig from 'server/controllers/api/config/site/publishing';
import getTorList from 'server/controllers/api/tor';
import getBlockedList from 'server/controllers/api/blocked';
import getOEmbedData from 'server/controllers/api/oEmbed';
const {
autoblockPublishMiddleware,
autoblockPublishBodyMiddleware,
} = require('server/middleware/autoblockPublishMiddleware');
export default {
// homepage routes
@ -32,8 +35,12 @@ export default {
// channel routes
'/api/channel/availability/:name': { controller: [torCheckMiddleware, channelAvailability] },
'/api/channel/short-id/:longId/:name': { controller: [torCheckMiddleware, channelShortId] },
'/api/channel/data/:channelName/:channelClaimId' : { controller: [ torCheckMiddleware, channelData ] },
'/api/channel/claims/:channelName/:channelClaimId/:page': { controller: [ torCheckMiddleware, channelClaims ] },
'/api/channel/data/:channelName/:channelClaimId': {
controller: [torCheckMiddleware, channelData],
},
'/api/channel/claims/:channelName/:channelClaimId/:page': {
controller: [torCheckMiddleware, channelClaims],
},
// sepcial routes
'/api/special/:name/:page': { controller: [torCheckMiddleware, specialClaims] },
@ -44,9 +51,24 @@ export default {
'/api/claim/get/:name/:claimId': { controller: [torCheckMiddleware, claimGet] },
'/api/claim/list/:name': { controller: [torCheckMiddleware, claimList] },
'/api/claim/long-id': { method: 'post', controller: [torCheckMiddleware, claimLongId] }, // note: should be a 'get'
'/api/claim/publish' : { method: 'post', controller: [ torCheckMiddleware, autoblockPublishMiddleware, multipartMiddleware, autoblockPublishBodyMiddleware, claimPublish ] },
'/api/claim/update' : { method: 'post', controller: [ torCheckMiddleware, multipartMiddleware, claimUpdate ] },
'/api/claim/abandon' : { method: 'post', controller: [ torCheckMiddleware, multipartMiddleware, claimAbandon ] },
'/api/claim/publish': {
method: 'post',
controller: [
torCheckMiddleware,
autoblockPublishMiddleware,
multipartMiddleware,
autoblockPublishBodyMiddleware,
claimPublish,
],
},
'/api/claim/update': {
method: 'post',
controller: [torCheckMiddleware, multipartMiddleware, claimUpdate],
},
'/api/claim/abandon': {
method: 'post',
controller: [torCheckMiddleware, multipartMiddleware, claimAbandon],
},
'/api/claim/resolve/:name/:claimId': { controller: [torCheckMiddleware, claimResolve] },
'/api/claim/short-id/:longId/:name': { controller: [torCheckMiddleware, claimShortId] },
'/api/claim/views/:claimId': { controller: [torCheckMiddleware, claimViews] },

View file

@ -1,11 +1,19 @@
const serveByClaim = require('../../controllers/assets/serveByClaim');
const serveByIdentifierAndClaim = require('../../controllers/assets/serveByIdentifierAndClaim');
import serveByClaim from 'server/controllers/assets/serveByClaim';
import serveByIdentifierAndClaim from 'server/controllers/assets/serveByIdentifierAndClaim';
// TODO: Adjust build & sources to use import/export everywhere
const Actions = require('@actions').default;
const Sagas = require('@sagas').default;
export default {
'/:identifier/:claim': { controller: serveByIdentifierAndClaim, action: Actions.onHandleShowPageUri, saga: Sagas.handleShowPageUri },
'/:claim' : { controller: serveByClaim, action: Actions.onHandleShowPageUri, saga: Sagas.handleShowPageUri },
'/:identifier/:claim': {
controller: serveByIdentifierAndClaim,
action: Actions.onHandleShowPageUri,
saga: Sagas.handleShowPageUri,
},
'/:claim': {
controller: serveByClaim,
action: Actions.onHandleShowPageUri,
saga: Sagas.handleShowPageUri,
},
};

View file

@ -1,11 +1,14 @@
const speechPassport = require('../../speechPassport');
const handleSignupRequest = require('../../controllers/auth/signup');
const handleLoginRequest = require('../../controllers/auth/login');
const handleLogoutRequest = require('../../controllers/auth/logout');
const handleUserRequest = require('../../controllers/auth/user');
import speechPassport from '../../speechPassport';
import handleSignupRequest from '../../controllers/auth/signup';
import handleLoginRequest from '../../controllers/auth/login';
import handleLogoutRequest from '../../controllers/auth/logout';
import handleUserRequest from '../../controllers/auth/user';
export default {
'/signup': { method: 'post', controller: [ speechPassport.authenticate('local-signup'), handleSignupRequest ] },
'/signup': {
method: 'post',
controller: [speechPassport.authenticate('local-signup'), handleSignupRequest],
},
'/auth': { method: 'post', controller: handleLoginRequest },
'/logout': { controller: handleLogoutRequest },
'/user': { controller: handleUserRequest },

View file

@ -1,13 +1,17 @@
import handlePageRequest from '../../controllers/pages/sendReactApp';
const handleVideoEmbedRequest = require('../../controllers/pages/sendVideoEmbedPage');
const redirect = require('../../controllers/utils/redirect');
import handleVideoEmbedRequest from '../../controllers/pages/sendVideoEmbedPage';
import redirect from '../../controllers/utils/redirect';
// TODO: Adjust build & sources to use import/export everywhere
const Actions = require('@actions').default;
const Sagas = require('@sagas').default;
export default {
'/' : { controller: handlePageRequest, action: Actions.onHandleShowHomepage, saga: Sagas.handleShowHomepage },
'/': {
controller: handlePageRequest,
action: Actions.onHandleShowHomepage,
saga: Sagas.handleShowHomepage,
},
'/login': { controller: handlePageRequest },
'/about': { controller: handlePageRequest },
'/tos': { controller: handlePageRequest },

View file

@ -1,12 +1,12 @@
const passport = require('passport');
const localLoginStrategy = require('./utils/local-login.js');
const localSignupStrategy = require('./utils/local-signup.js');
const serializeUser = require('./utils/serializeUser.js');
const deserializeUser = require('./utils/deserializeUser.js');
import passport from 'passport';
import localLoginStrategy from './utils/local-login.js';
import localSignupStrategy from './utils/local-signup.js';
import serializeUser from './utils/serializeUser.js';
import deserializeUser from './utils/deserializeUser.js';
passport.deserializeUser(deserializeUser);
passport.serializeUser(serializeUser);
passport.use('local-login', localLoginStrategy);
passport.use('local-signup', localSignupStrategy);
module.exports = passport;
export default passport;

View file

@ -3,4 +3,4 @@ const deserializeUser = (user, done) => {
done(null, user);
};
module.exports = deserializeUser;
export default deserializeUser;

View file

@ -1,8 +1,8 @@
import db from 'server/models';
const PassportLocalStrategy = require('passport-local').Strategy;
const logger = require('winston');
const db = require('../../models');
const returnUserAndChannelInfo = (userInstance) => {
const returnUserAndChannelInfo = userInstance => {
return new Promise((resolve, reject) => {
let userInfo = {};
userInfo['id'] = userInstance.id;
@ -24,14 +24,13 @@ const returnUserAndChannelInfo = (userInstance) => {
});
};
module.exports = new PassportLocalStrategy(
const Strategy = new PassportLocalStrategy(
{
usernameField: 'username',
passwordField: 'password',
},
(username, password, done) => {
return db.User
.findOne({
return db.User.findOne({
where: { userName: username },
})
.then(user => {
@ -39,7 +38,8 @@ module.exports = new PassportLocalStrategy(
logger.debug('no user found');
return done(null, false, { message: 'Incorrect username or password' });
}
return user.comparePassword(password)
return user
.comparePassword(password)
.then(isMatch => {
if (!isMatch) {
logger.debug('incorrect password');
@ -63,3 +63,5 @@ module.exports = new PassportLocalStrategy(
});
}
);
export default Strategy;

View file

@ -1,10 +1,12 @@
import { createChannel } from 'server/lbrynet';
const PassportLocalStrategy = require('passport-local').Strategy;
const { createChannel } = require('../../lbrynet');
const logger = require('winston');
const db = require('../../models');
const { publishing: { closedRegistration } } = require('@config/siteConfig');
const db = require('server/models');
const {
publishing: { closedRegistration },
} = require('@config/siteConfig');
module.exports = new PassportLocalStrategy(
const Strategy = new PassportLocalStrategy(
{
usernameField: 'username',
passwordField: 'password',
@ -40,7 +42,11 @@ module.exports = new PassportLocalStrategy(
};
logger.verbose('certificateData >', certificateData);
// save user and certificate to db
return Promise.all([db.User.create(userData), db.Channel.create(channelData), db.Certificate.create(certificateData)]);
return Promise.all([
db.User.create(userData),
db.Channel.create(channelData),
db.Certificate.create(certificateData),
]);
})
.then(([newUser, newChannel, newCertificate]) => {
logger.verbose('user and certificate successfully created');
@ -54,7 +60,10 @@ module.exports = new PassportLocalStrategy(
})
.then(() => {
logger.verbose('user and certificate successfully associated');
return db.Certificate.getShortChannelIdFromLongChannelId(userInfo.channelClaimId, userInfo.channelName);
return db.Certificate.getShortChannelIdFromLongChannelId(
userInfo.channelClaimId,
userInfo.channelName
);
})
.then(shortChannelId => {
userInfo['shortChannelId'] = shortChannelId;
@ -66,3 +75,4 @@ module.exports = new PassportLocalStrategy(
});
}
);
export default Strategy;

View file

@ -3,4 +3,4 @@ const serializeUser = (user, done) => {
done(null, user);
};
module.exports = serializeUser;
export default serializeUser;

View file

@ -1,12 +1,13 @@
// load dependencies
const logger = require('winston');
const db = require('../models');
import logger from 'winston';
import db from 'server/models';
require('../helpers/configureLogger.js')(logger);
let totalClaims = 0;
let totalClaimsNoCertificate = 0;
db.sequelize.sync() // sync sequelize
db.sequelize
.sync() // sync sequelize
.then(() => {
logger.info('finding claims with no channels');
return db.Claim.findAll({
@ -21,8 +22,7 @@ db.sequelize.sync() // sync sequelize
.then(claimsArray => {
totalClaims = claimsArray.length;
const claimsUpdatePromises = claimsArray.map(claim => {
return db.Certificate
.findOne({
return db.Certificate.findOne({
where: { claimId: claim.get('certificateId') },
})
.then(certificate => {
@ -49,6 +49,6 @@ db.sequelize.sync() // sync sequelize
logger.info('total claims found with no matching certificate record', totalClaimsNoCertificate);
logger.debug('all done');
})
.catch((error) => {
.catch(error => {
logger.error(error);
});

Some files were not shown because too many files have changed in this diff Show more