Merge pull request #655 from lbryio/trending-metrics

Add zScore, pValue, specials, trending content
This commit is contained in:
Shawn K 2018-10-24 15:47:47 -05:00 committed by GitHub
commit 3831f9661b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 558 additions and 63 deletions

View file

@ -1,5 +1,10 @@
import * as actions from '../constants/show_action_types';
import { CHANNEL, ASSET_LITE, ASSET_DETAILS } from '../constants/show_request_types';
import {
ASSET_DETAILS,
ASSET_LITE,
CHANNEL,
SPECIAL_ASSET,
} from '../constants/show_request_types';
// basic request parsing
export function onHandleShowPageUri (params, url) {
@ -38,6 +43,15 @@ export function onNewChannelRequest (channelName, channelId) {
};
}
export function onNewSpecialAssetRequest (name) {
const requestType = SPECIAL_ASSET;
const requestId = `sar#${name}`;
return {
type: actions.SPECIAL_ASSET_REQUEST_NEW,
data: { requestType, requestId, name, channelName: name, channelId: name },
};
}
export function onNewAssetRequest (name, id, channelName, channelId, extension) {
const requestType = extension ? ASSET_LITE : ASSET_DETAILS;
const requestId = `ar#${name}#${id}#${channelName}#${channelId}`;

View file

@ -0,0 +1,7 @@
import Request from '../utils/request';
export function getSpecialAssetClaims(host, name, page) {
if (!page) page = 1;
const url = `${host}/api/special/${name}/${page}`;
return Request(url);
}

View file

@ -9,6 +9,7 @@ import LoginPage from '@pages/LoginPage';
import ContentPageWrapper from '@pages/ContentPageWrapper';
import FourOhFourPage from '@pages/FourOhFourPage';
import MultisitePage from '@pages/MultisitePage';
import PopularPage from '@pages/PopularPage';
const App = () => {
return (
@ -19,6 +20,7 @@ const App = () => {
<Route exact path='/faq' component={FaqPage} />
<Route exact path='/login' component={LoginPage} />
<Route exact path='/multisite' component={MultisitePage} />
<Route exact path='/popular' component={PopularPage} />
<Route exact path='/:identifier/:claim' component={ContentPageWrapper} />
<Route exact path='/:claim' component={ContentPageWrapper} />
<Route component={FourOhFourPage} />

View file

@ -5,6 +5,7 @@ export const REQUEST_ERROR = 'REQUEST_ERROR';
export const REQUEST_UPDATE = 'REQUEST_UPDATE';
export const ASSET_REQUEST_NEW = 'ASSET_REQUEST_NEW';
export const CHANNEL_REQUEST_NEW = 'CHANNEL_REQUEST_NEW';
export const SPECIAL_ASSET_REQUEST_NEW = 'SPECIAL_ASSET_REQUEST_NEW';
export const REQUEST_LIST_ADD = 'REQUEST_LIST_ADD';
// asset actions

View file

@ -1,3 +1,4 @@
export const CHANNEL = 'CHANNEL';
export const ASSET_LITE = 'ASSET_LITE';
export const ASSET_DETAILS = 'ASSET_DETAILS';
export const SPECIAL_ASSET = 'SPECIAL_ASSET';

View file

@ -5,7 +5,12 @@ import ShowAssetDetails from '@pages/ShowAssetDetails';
import ShowChannel from '@pages/ShowChannel';
import { withRouter } from 'react-router-dom';
import { CHANNEL, ASSET_LITE, ASSET_DETAILS } from '../../constants/show_request_types';
import {
CHANNEL,
ASSET_LITE,
ASSET_DETAILS,
SPECIAL_ASSET,
} from '../../constants/show_request_types';
class ContentPageWrapper extends React.Component {
componentDidMount () {
@ -31,6 +36,8 @@ class ContentPageWrapper extends React.Component {
return <ShowAssetLite />;
case ASSET_DETAILS:
return <ShowAssetDetails />;
case SPECIAL_ASSET:
return <ShowChannel />;
default:
return <p>loading...</p>;
}

View file

@ -0,0 +1,17 @@
import { connect } from 'react-redux';
import { onHandleShowHomepage } from '../../actions/show';
import View from './view';
const mapStateToProps = ({ show, site, channel }) => {
return {
error : show.request.error,
requestType: show.request.type,
homeChannel: 'special:trending',
};
};
const mapDispatchToProps = {
onHandleShowHomepage,
};
export default connect(mapStateToProps, mapDispatchToProps)(View);

View file

@ -0,0 +1,23 @@
import React from 'react';
import ContentPageWrapper from '@pages/ContentPageWrapper';
class PopularPage extends React.Component {
componentDidMount () {
this.props.onHandleShowHomepage(this.props.match.params);
}
componentWillReceiveProps (nextProps) {
if (nextProps.match.params !== this.props.match.params) {
this.props.onHandleShowHomepage(nextProps.match.params);
}
}
render () {
const { homeChannel } = this.props;
return (
<ContentPageWrapper homeChannel={homeChannel} />
)
}
};
export default PopularPage;

View file

@ -6,6 +6,7 @@ const mapStateToProps = ({ show, site, channel }) => {
const requestId = show.request.id;
// select request
const previousRequest = show.requestList[requestId] || null;
// select channel
let thisChannel;
if (previousRequest) {

View file

@ -2,6 +2,7 @@ import { all } from 'redux-saga/effects';
import { watchHandleShowPageUri, watchHandleShowHomepage } from './show_uri';
import { watchNewAssetRequest } from './show_asset';
import { watchNewChannelRequest, watchUpdateChannelClaims } from './show_channel';
import { watchNewSpecialAssetRequest } from './show_special';
import { watchFileIsRequested } from './file';
import { watchPublishStart } from './publish';
import { watchUpdateClaimAvailability } from './updateClaimAvailability';
@ -16,6 +17,7 @@ export function * rootSaga () {
watchHandleShowHomepage(),
watchNewAssetRequest(),
watchNewChannelRequest(),
watchNewSpecialAssetRequest(),
watchUpdateChannelClaims(),
watchFileIsRequested(),
watchPublishStart(),

View file

@ -0,0 +1,46 @@
import {call, put, select, takeLatest} from 'redux-saga/effects';
import * as actions from '../constants/show_action_types';
import { addNewChannelToChannelList, addRequestToRequestList, onRequestError, onRequestUpdate, updateChannelClaims } from '../actions/show';
//import { getChannelClaims, getChannelData } from '../api/channelApi';
import { getSpecialAssetClaims } from '../api/specialAssetApi';
import { selectShowState } from '../selectors/show';
import { selectSiteHost } from '../selectors/site';
export function * newSpecialAssetRequest (action) {
const { requestType, requestId, name } = action.data;
let claimsData;
// put an action to update the request in redux
yield put(onRequestUpdate(requestType, requestId));
// is this an existing request?
// If this uri is in the request list, it's already been fetched
const state = yield select(selectShowState);
const host = yield select(selectSiteHost);
if (state.requestList[requestId]) {
return null;
}
// store the request in the channel requests list
const channelKey = `sar#${name}`;
yield put(addRequestToRequestList(requestId, null, channelKey));
// If this channel is in the channel list, it's already been fetched
if (state.channelList[channelKey]) {
return null;
}
// get channel claims data
try {
({ data: claimsData } = yield call(getSpecialAssetClaims, host, name, 1));
} catch (error) {
return yield put(onRequestError(error.message));
}
// store the channel data in the channel list
yield put(addNewChannelToChannelList(channelKey, name, null, null, claimsData));
// clear any request errors
yield put(onRequestError(null));
}
export function * watchNewSpecialAssetRequest () {
yield takeLatest(actions.SPECIAL_ASSET_REQUEST_NEW, newSpecialAssetRequest);
}

View file

@ -1,8 +1,14 @@
import { call, put, takeLatest } from 'redux-saga/effects';
import * as actions from '../constants/show_action_types';
import { onRequestError, onNewChannelRequest, onNewAssetRequest } from '../actions/show';
import {
onRequestError,
onNewChannelRequest,
onNewAssetRequest,
onNewSpecialAssetRequest,
} from '../actions/show';
import { newAssetRequest } from '../sagas/show_asset';
import { newChannelRequest } from '../sagas/show_channel';
import { newSpecialAssetRequest } from '../sagas/show_special';
import lbryUri from '../../../utils/lbryUri';
function * parseAndUpdateIdentifierAndClaim (modifier, claim) {
@ -24,27 +30,32 @@ function * parseAndUpdateIdentifierAndClaim (modifier, claim) {
}
function * parseAndUpdateClaimOnly (claim) {
// this could be a request for an asset or a channel page
// claim could be an asset claim or a channel claim
let isChannel, channelName, channelClaimId;
try {
({ isChannel, channelName, channelClaimId } = lbryUri.parseIdentifier(claim));
} catch (error) {
return yield put(onRequestError(error.message));
if(/^special\:/.test(claim) === true) {
const assetName = /special\:(.*)/.exec(claim)[1];
return yield call(newSpecialAssetRequest, onNewSpecialAssetRequest(assetName));
} else {
// this could be a request for an asset or a channel page
// claim could be an asset claim or a channel claim
let isChannel, channelName, channelClaimId;
try {
({ isChannel, channelName, channelClaimId } = lbryUri.parseIdentifier(claim));
} catch (error) {
return yield put(onRequestError(error.message));
}
// trigger an new action to update the store
// return early if this request is for a channel
if (isChannel) {
return yield call(newChannelRequest, onNewChannelRequest(channelName, channelClaimId));
}
// if not for a channel, parse the claim request
let claimName, extension;
try {
({claimName, extension} = lbryUri.parseClaim(claim));
} catch (error) {
return yield put(onRequestError(error.message));
}
yield call(newAssetRequest, onNewAssetRequest(claimName, null, null, null, extension));
}
// trigger an new action to update the store
// return early if this request is for a channel
if (isChannel) {
return yield call(newChannelRequest, onNewChannelRequest(channelName, channelClaimId));
}
// if not for a channel, parse the claim request
let claimName, extension;
try {
({claimName, extension} = lbryUri.parseClaim(claim));
} catch (error) {
return yield put(onRequestError(error.message));
}
yield call(newAssetRequest, onNewAssetRequest(claimName, null, null, null, extension));
}
export function * handleShowPageUri (action) {

View file

@ -1,7 +1,7 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const getClaimData = require('server/utils/getClaimData');
const chainquery = require('chainquery');
const db = require('../../../../models');
const db = require('server/models');
/*
@ -16,7 +16,7 @@ const claimData = async ({ ip, originalUrl, body, params }, res) => {
try {
let resolvedClaim = await chainquery.claim.queries.resolveClaim(claimName, claimId).catch(() => {});
if(!resolvedClaim) {
resolvedClaim = await db.Claim.resolveClaim(claimName, claimId);
}

View file

@ -0,0 +1,42 @@
const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
const db = require('server/models');
const getClaimData = require('server/utils/getClaimData');
/*
route to get all claims for special
*/
const channelClaims = async ({ ip, originalUrl, body, params }, res) => {
const {
name,
page,
} = params;
if(name === 'trending') {
const result = await db.Trending.getTrendingClaims();
const claims = await Promise.all(result.map((claim) => getClaimData(claim)));
return res.status(200).json({
success: true,
data: {
channelName: name,
claims,
longChannelClaimId: name,
currentPage: 1,
nextPage: null,
previousPage: null,
totalPages: 1,
totalResults: claims.length,
}
});
}
res.status(404).json({
success: false,
message: 'Feature endpoint not found',
});
handleErrorResponse(originalUrl, ip, 'Feature endpoint not found', res);
};
module.exports = channelClaims;

View file

@ -37,11 +37,26 @@ const getClaimIdAndServeAsset = (channelName, channelClaimId, claimName, claimId
return claim;
})
.then(claim => {
if (serveOnlyApproved && !isApprovedChannel({ longId: claim.dataValues.publisher_id }, approvedChannels)) {
let claimDataValues = claim.dataValues;
if (serveOnlyApproved && !isApprovedChannel({ longId: claimDataValues.publisher_id || claimDataValues.certificateId }, approvedChannels)) {
throw new Error(CONTENT_UNAVAILABLE);
}
logger.debug('Outpoint:', claim.dataValues.outpoint);
return db.Blocked.isNotBlocked(claim.dataValues.outpoint);
let outpoint = claimDataValues.outpoint || `${claimDataValues.transaction_hash_id}:${claimDataValues.vout}`;
logger.debug('Outpoint:', outpoint);
return db.Blocked.isNotBlocked(outpoint).then(() => {
// If content was found, is approved, and not blocked - log a view.
db.Views.create({
time: Date.now(),
isChannel: false,
claimId: claimDataValues.claim_id || claimDataValues.claimId,
publisherId: claimDataValues.publisher_id || claimDataValues.certificateId,
ip,
});
return;
});
})
.then(() => {
return db.File.findOne({

View file

@ -11,12 +11,18 @@ const httpContext = require('express-http-context');
// load local modules
const db = require('./models');
const requestLogger = require('./middleware/requestLogger.js');
const createDatabaseIfNotExists = require('./models/utils/createDatabaseIfNotExists.js');
const requestLogger = require('./middleware/requestLogger');
const createDatabaseIfNotExists = require('./models/utils/createDatabaseIfNotExists');
const { getWalletBalance } = require('./lbrynet/index');
const configureLogging = require('./utils/configureLogging.js');
const configureSlack = require('./utils/configureSlack.js');
const speechPassport = require('./speechPassport/index');
const configureLogging = require('./utils/configureLogging');
const configureSlack = require('./utils/configureSlack');
const speechPassport = require('./speechPassport');
const processTrending = require('./utils/processTrending');
const {
logMetricsMiddleware,
setRouteDataInContextMiddleware,
} = require('./middleware/logMetricsMiddleware');
const {
details: { port: PORT },
@ -27,36 +33,6 @@ const {
},
} = require('@config/siteConfig');
function logMetricsMiddleware(req, res, next) {
res.on('finish', () => {
const userAgent = req.get('user-agent');
const routePath = httpContext.get('routePath');
db.Metrics.create({
isInternal: /node\-fetch/.test(userAgent),
isChannel: res.isChannel,
claimId: res.claimId,
routePath: httpContext.get('routePath'),
params: JSON.stringify(req.params),
ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress,
request: req.url,
routeData: JSON.stringify(httpContext.get('routeData')),
referrer: req.get('referrer'),
userAgent,
});
});
next();
}
function setRouteDataInContextMiddleware(routePath, routeData) {
return function (req, res, next) {
httpContext.set('routePath', routePath);
httpContext.set('routeData', routeData);
next();
};
}
function Server () {
this.initialize = () => {
// configure logging
@ -200,6 +176,8 @@ function Server () {
})
.then(() => {
logger.info('Spee.ch startup is complete');
setInterval(processTrending, 30 * 60000) // 30 minutes
})
.catch(error => {
if (error.code === 'ECONNREFUSED') {

View file

@ -0,0 +1,38 @@
const db = require('../models');
const httpContext = require('express-http-context');
function logMetricsMiddleware(req, res, next) {
res.on('finish', () => {
const userAgent = req.get('user-agent');
const routePath = httpContext.get('routePath');
db.Metrics.create({
time: Date.now(),
isInternal: /node\-fetch/.test(userAgent),
isChannel: res.isChannel,
claimId: res.claimId,
routePath: httpContext.get('routePath'),
params: JSON.stringify(req.params),
ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress,
request: req.url,
routeData: JSON.stringify(httpContext.get('routeData')),
referrer: req.get('referrer'),
userAgent,
});
});
next();
}
function setRouteDataInContextMiddleware(routePath, routeData) {
return function (req, res, next) {
httpContext.set('routePath', routePath);
httpContext.set('routeData', routeData);
next();
};
}
module.exports = {
logMetricsMiddleware,
setRouteDataInContextMiddleware,
};

View file

@ -8,7 +8,9 @@ const Claim = require('./claim');
const File = require('./file');
const Metrics = require('./metrics');
const Tor = require('./tor');
const Trending = require('./trending');
const User = require('./user');
const Views = require('./views');
const {
database,
@ -56,7 +58,9 @@ db['Claim'] = sequelize.import('Claim', Claim);
db['File'] = sequelize.import('File', File);
db['Metrics'] = sequelize.import('Metrics', Metrics);
db['Tor'] = sequelize.import('Tor', Tor);
db['Trending'] = sequelize.import('Trending', Trending);
db['User'] = sequelize.import('User', User);
db['Views'] = sequelize.import('Views', Views);
// run model.association for each model in the db object that has an association
logger.info('associating db models...');

98
server/models/trending.js Normal file
View file

@ -0,0 +1,98 @@
const chainquery = require('chainquery');
module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
const Trending = sequelize.define(
'Trending',
{
time: { /* TODO: Historical analysis and log roll */
type: DATE(6),
defaultValue: sequelize.NOW,
},
isChannel: {
type: BOOLEAN,
defaultValue: false,
},
claimId: {
type: STRING,
defaultValue: null,
},
publisherId: {
type: STRING,
defaultValue: null,
},
intervalViews: {
type: INTEGER,
defaultValue: 0,
},
weight: {
type: FLOAT,
defaultValue: 0,
},
zScore: {
type: FLOAT,
defaultValue: 0,
},
pValue: {
type: FLOAT,
defaultValue: 0,
},
// TODO: Calculate t-statistics
},
{
freezeTableName: true,
timestamps: false, // don't use default timestamps columns
indexes: [
{
fields: ['claimId'],
},
{
fields: ['time', 'isChannel', 'claimId', 'publisherId', 'weight'],
},
],
}
);
Trending.getTrendingWeightData = async ({
hours = 2,
minutes = 0,
limit = 20
} = {}) => {
let time = new Date();
time.setHours(time.getHours() - hours);
time.setMinutes(time.getMinutes() - minutes);
const sqlTime = time.toISOString().slice(0, 19).replace('T', ' ');
const selectString = 'DISTINCT(claimId), weight';
const whereString = `isChannel = false and time > '${sqlTime}'`;
const query = `SELECT ${selectString} FROM trending WHERE ${whereString} ORDER BY weight DESC LIMIT ${limit}`
return await sequelize.query(query, { type: sequelize.QueryTypes.SELECT });
};
Trending.getTrendingClaims = async () => {
const trendingWeightData = await Trending.getTrendingWeightData();
const trendingClaimIds = [];
const trendingClaims = trendingWeightData.reduce((claims, trendingData) => {
trendingClaimIds.push(trendingData.claimId);
claims[trendingData.claimId] = {
...trendingData
};
return claims;
}, {});
const claimData = await chainquery.claim.findAll({
where: {
claim_id: { [sequelize.Op.in]: trendingClaimIds },
},
});
return claimData.map((claimData) => {
return Object.assign(trendingClaims[claimData.claim_id], claimData.dataValues);
});
};
return Trending;
};

View file

@ -0,0 +1,68 @@
const ZSCORE_CRITICAL_THRESHOLD = 1.96; // 95-percentile
const ZSCORE_NINETYNINTH = 2.326347875; // 99-percentile
const ONE_DIV_SQRT_2PI = 0.3989422804014327; // V8 float of 1/SQRT(2 * PI)
const MAX_P_PRECISION = Math.exp(-16); // Rought estimation of V8 precision, -16 is 1.1253517471925912e-7
const MIN_P = -6.44357455534; // v8 float 0.0...0
const MAX_P = 6.44357455534; // v8 float 1.0...0
const getMean = (numArr) => {
let total = 0;
let length = numArr.length; // store local to reduce potential prop lookups
for(let i = 0; i < length; i++) {
total += numArr[i];
}
return total / length;
};
const getStandardDeviation = (numArr, mean) => {
return Math.sqrt(numArr.reduce((sq, n) => (
sq + Math.pow(n - mean, 2)
), 0) / (numArr.length - 1));
};
const getInformationFromValues = (numArr) => {
let mean = getMean(numArr);
return {
mean,
standardDeviation: getStandardDeviation(numArr, mean),
}
};
const getZScore = (value, mean, sDeviation) => ( sDeviation !== 0 ? (value - mean) / sDeviation : 0 );
const getFastPValue = (zScore) => {
if(zScore <= MIN_P) {
return 0;
}
if(zScore >= MAX_P) {
return 1;
}
let factorialK = 1;
let k = 0;
let sum = 0;
let term = 1;
while(Math.abs(term) > MAX_P_PRECISION) {
term = ONE_DIV_SQRT_2PI * Math.pow(-1 , k) * Math.pow(zScore , k) / (2 * k + 1) / Math.pow(2 , k) * Math.pow(zScore, k + 1) / factorialK;
sum += term;
k++;
factorialK *= k;
}
sum += 0.5;
return sum;
};
const getWeight = (zScore, pValue) => (zScore * pValue);
module.exports = {
getInformationFromValues,
getZScore,
getFastPValue,
getWeight,
};

57
server/models/views.js Normal file
View file

@ -0,0 +1,57 @@
module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
const Views = sequelize.define(
'Views',
{
time: {
type: DATE(6),
defaultValue: sequelize.NOW,
},
isChannel: {
type: BOOLEAN,
defaultValue: false,
},
claimId: {
type: STRING,
defaultValue: null,
},
publisherId: {
type: STRING,
defaultValue: null,
},
ip: {
type: STRING,
defaultValue: null,
},
},
{
freezeTableName: true,
timestamps: false, // don't use default timestamps columns
indexes: [
{
fields: ['time', 'isChannel', 'claimId', 'publisherId', 'ip'],
},
],
}
);
Views.getUniqueViews = ({
hours = 0,
minutes = 30,
} = {}) => {
let time = new Date();
time.setHours(time.getHours() - hours);
time.setMinutes(time.getMinutes() - minutes);
const sqlTime = time.toISOString().slice(0, 19).replace('T', ' ');
const selectString = 'claimId, publisherId, isChannel, COUNT(DISTINCT ip) as views';
const groupString = 'claimId, publisherId, isChannel';
return sequelize.query(
`SELECT ${selectString} FROM views where time > '${sqlTime}' GROUP BY ${groupString}`,
{ type: sequelize.QueryTypes.SELECT }
);
}
return Views;
};

View file

@ -17,6 +17,7 @@ const claimPublish = require('../../controllers/api/claim/publish');
const claimResolve = require('../../controllers/api/claim/resolve');
const claimShortId = require('../../controllers/api/claim/shortId');
const fileAvailability = require('../../controllers/api/file/availability');
const specialClaims = require('../../controllers/api/special/claims');
const userPassword = require('../../controllers/api/user/password');
const publishingConfig = require('../../controllers/api/config/site/publishing');
const getTorList = require('../../controllers/api/tor');
@ -83,6 +84,10 @@ module.exports = {
'/api/channel/data/:channelName/:channelClaimId': { controller: [ torCheckMiddleware, channelData ] },
'/api/channel/data/:channelName/:channelClaimId': { controller: [ torCheckMiddleware, channelData ] },
'/api/channel/claims/:channelName/:channelClaimId/:page': { controller: [ torCheckMiddleware, channelClaims ] },
// sepcial routes
'/api/special/:name/:page': { controller: [ torCheckMiddleware, specialClaims ] },
// claim routes
'/api/claim/availability/:name': { controller: [ torCheckMiddleware, claimAvailability ] },
'/api/claim/data/:claimName/:claimId': { controller: [ torCheckMiddleware, claimData ] },

View file

@ -0,0 +1,6 @@
module.exports = function(req) {
let reqIp = req.connection.remoteAddress;
let host = req.get('host');
return reqIp === '127.0.0.1' || reqIp === '::ffff:127.0.0.1' || reqIp === '::1' || host.indexOf('localhost') !== -1;
}

View file

@ -0,0 +1,52 @@
const db = require('server/models');
const {
getInformationFromValues,
getZScore,
getFastPValue,
getWeight,
} = require('server/models/utils/trendingAnalysis');
module.exports = async () => {
const claims = await db.Trending.getTrendingClaims();
const claimViews = await db.Views.getUniqueViews();
if(claimViews.length <= 1) {
return;
}
const time = Date.now();
// Must create statistical analytics before we can process zScores, etc
const viewsNumArray = claimViews.map((claimViewsEntry) => claimViewsEntry.views);
const {
mean,
standardDeviation,
} = getInformationFromValues(viewsNumArray);
for(let i = 0; i < claimViews.length; i++) {
let claimViewsEntry = claimViews[i];
const {
isChannel,
claimId,
publisherId,
} = claimViewsEntry;
const zScore = getZScore(claimViewsEntry.views, mean, standardDeviation);
const pValue = getFastPValue(zScore);
const weight = getWeight(zScore, pValue);
const trendingData = {
time,
isChannel: claimViewsEntry.isChannel,
claimId: claimViewsEntry.claimId,
publisherId: claimViewsEntry.publisherId,
intervalViews: claimViewsEntry.views,
weight,
zScore,
pValue,
};
db.Trending.create(trendingData);
}
}