Fix view logging exceptions and log errors when processing trending data
This commit is contained in:
parent
828e8dce70
commit
af8e7e7044
2 changed files with 54 additions and 35 deletions
|
@ -1,3 +1,4 @@
|
|||
const logger = require('winston');
|
||||
const db = require('../models');
|
||||
const httpContext = require('express-http-context');
|
||||
|
||||
|
@ -6,6 +7,18 @@ function logMetricsMiddleware(req, res, next) {
|
|||
const userAgent = req.get('user-agent');
|
||||
const routePath = httpContext.get('routePath');
|
||||
|
||||
let referrer = req.get('referrer');
|
||||
|
||||
if(referrer.length > 255) {
|
||||
// Attempt to "safely" clamp long URLs
|
||||
referrer = /(.*?)#.*/.exec(referrer)[1];
|
||||
|
||||
if(referrer.length > 255) {
|
||||
logger.warn('Request refferer exceeds 255 characters:', referrer);
|
||||
referrer = referrer.substring(0, 255);
|
||||
}
|
||||
}
|
||||
|
||||
db.Metrics.create({
|
||||
time: Date.now(),
|
||||
isInternal: /node\-fetch/.test(userAgent),
|
||||
|
@ -16,7 +29,7 @@ function logMetricsMiddleware(req, res, next) {
|
|||
ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress,
|
||||
request: req.url,
|
||||
routeData: JSON.stringify(httpContext.get('routeData')),
|
||||
referrer: req.get('referrer'),
|
||||
referrer,
|
||||
userAgent,
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,47 +6,53 @@ const {
|
|||
getWeight,
|
||||
} = require('server/models/utils/trendingAnalysis');
|
||||
|
||||
const logger = require('winston');
|
||||
|
||||
module.exports = async () => {
|
||||
const claims = await db.Trending.getTrendingClaims();
|
||||
const claimViews = await db.Views.getUniqueViews();
|
||||
try {
|
||||
const claims = await db.Trending.getTrendingClaims();
|
||||
const claimViews = await db.Views.getUniqueViews();
|
||||
|
||||
if(claimViews.length <= 1) {
|
||||
return;
|
||||
}
|
||||
if(claimViews.length <= 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const time = Date.now();
|
||||
|
||||
// Must create statistical analytics before we can process zScores, etc
|
||||
const viewsNumArray = claimViews.map((claimViewsEntry) => claimViewsEntry.views);
|
||||
const {
|
||||
mean,
|
||||
standardDeviation,
|
||||
} = getInformationFromValues(viewsNumArray);
|
||||
|
||||
for(let i = 0; i < claimViews.length; i++) {
|
||||
let claimViewsEntry = claimViews[i];
|
||||
const time = Date.now();
|
||||
|
||||
// Must create statistical analytics before we can process zScores, etc
|
||||
const viewsNumArray = claimViews.map((claimViewsEntry) => claimViewsEntry.views);
|
||||
const {
|
||||
isChannel,
|
||||
claimId,
|
||||
publisherId,
|
||||
} = claimViewsEntry;
|
||||
mean,
|
||||
standardDeviation,
|
||||
} = getInformationFromValues(viewsNumArray);
|
||||
|
||||
const zScore = getZScore(claimViewsEntry.views, mean, standardDeviation);
|
||||
const pValue = getFastPValue(zScore);
|
||||
const weight = getWeight(zScore, pValue);
|
||||
for(let i = 0; i < claimViews.length; i++) {
|
||||
let claimViewsEntry = claimViews[i];
|
||||
|
||||
const trendingData = {
|
||||
time,
|
||||
isChannel: claimViewsEntry.isChannel,
|
||||
claimId: claimViewsEntry.claimId,
|
||||
publisherId: claimViewsEntry.publisherId,
|
||||
intervalViews: claimViewsEntry.views,
|
||||
weight,
|
||||
zScore,
|
||||
pValue,
|
||||
};
|
||||
const {
|
||||
isChannel,
|
||||
claimId,
|
||||
publisherId,
|
||||
} = claimViewsEntry;
|
||||
|
||||
db.Trending.create(trendingData);
|
||||
const zScore = getZScore(claimViewsEntry.views, mean, standardDeviation);
|
||||
const pValue = getFastPValue(zScore);
|
||||
const weight = getWeight(zScore, pValue);
|
||||
|
||||
const trendingData = {
|
||||
time,
|
||||
isChannel: claimViewsEntry.isChannel,
|
||||
claimId: claimViewsEntry.claimId,
|
||||
publisherId: claimViewsEntry.publisherId,
|
||||
intervalViews: claimViewsEntry.views,
|
||||
weight,
|
||||
zScore,
|
||||
pValue,
|
||||
};
|
||||
|
||||
db.Trending.create(trendingData);
|
||||
}
|
||||
} catch(e) {
|
||||
logger.error('Error processing trending content:', e);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue