Fix view logging exceptions and log errors when processing trending data
This commit is contained in:
parent
828e8dce70
commit
af8e7e7044
2 changed files with 54 additions and 35 deletions
|
@ -1,3 +1,4 @@
|
||||||
|
const logger = require('winston');
|
||||||
const db = require('../models');
|
const db = require('../models');
|
||||||
const httpContext = require('express-http-context');
|
const httpContext = require('express-http-context');
|
||||||
|
|
||||||
|
@ -6,6 +7,18 @@ function logMetricsMiddleware(req, res, next) {
|
||||||
const userAgent = req.get('user-agent');
|
const userAgent = req.get('user-agent');
|
||||||
const routePath = httpContext.get('routePath');
|
const routePath = httpContext.get('routePath');
|
||||||
|
|
||||||
|
let referrer = req.get('referrer');
|
||||||
|
|
||||||
|
if(referrer.length > 255) {
|
||||||
|
// Attempt to "safely" clamp long URLs
|
||||||
|
referrer = /(.*?)#.*/.exec(referrer)[1];
|
||||||
|
|
||||||
|
if(referrer.length > 255) {
|
||||||
|
logger.warn('Request refferer exceeds 255 characters:', referrer);
|
||||||
|
referrer = referrer.substring(0, 255);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
db.Metrics.create({
|
db.Metrics.create({
|
||||||
time: Date.now(),
|
time: Date.now(),
|
||||||
isInternal: /node\-fetch/.test(userAgent),
|
isInternal: /node\-fetch/.test(userAgent),
|
||||||
|
@ -16,7 +29,7 @@ function logMetricsMiddleware(req, res, next) {
|
||||||
ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress,
|
ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress,
|
||||||
request: req.url,
|
request: req.url,
|
||||||
routeData: JSON.stringify(httpContext.get('routeData')),
|
routeData: JSON.stringify(httpContext.get('routeData')),
|
||||||
referrer: req.get('referrer'),
|
referrer,
|
||||||
userAgent,
|
userAgent,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -6,47 +6,53 @@ const {
|
||||||
getWeight,
|
getWeight,
|
||||||
} = require('server/models/utils/trendingAnalysis');
|
} = require('server/models/utils/trendingAnalysis');
|
||||||
|
|
||||||
|
const logger = require('winston');
|
||||||
|
|
||||||
module.exports = async () => {
|
module.exports = async () => {
|
||||||
const claims = await db.Trending.getTrendingClaims();
|
try {
|
||||||
const claimViews = await db.Views.getUniqueViews();
|
const claims = await db.Trending.getTrendingClaims();
|
||||||
|
const claimViews = await db.Views.getUniqueViews();
|
||||||
|
|
||||||
if(claimViews.length <= 1) {
|
if(claimViews.length <= 1) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const time = Date.now();
|
const time = Date.now();
|
||||||
|
|
||||||
// Must create statistical analytics before we can process zScores, etc
|
|
||||||
const viewsNumArray = claimViews.map((claimViewsEntry) => claimViewsEntry.views);
|
|
||||||
const {
|
|
||||||
mean,
|
|
||||||
standardDeviation,
|
|
||||||
} = getInformationFromValues(viewsNumArray);
|
|
||||||
|
|
||||||
for(let i = 0; i < claimViews.length; i++) {
|
|
||||||
let claimViewsEntry = claimViews[i];
|
|
||||||
|
|
||||||
|
// Must create statistical analytics before we can process zScores, etc
|
||||||
|
const viewsNumArray = claimViews.map((claimViewsEntry) => claimViewsEntry.views);
|
||||||
const {
|
const {
|
||||||
isChannel,
|
mean,
|
||||||
claimId,
|
standardDeviation,
|
||||||
publisherId,
|
} = getInformationFromValues(viewsNumArray);
|
||||||
} = claimViewsEntry;
|
|
||||||
|
|
||||||
const zScore = getZScore(claimViewsEntry.views, mean, standardDeviation);
|
for(let i = 0; i < claimViews.length; i++) {
|
||||||
const pValue = getFastPValue(zScore);
|
let claimViewsEntry = claimViews[i];
|
||||||
const weight = getWeight(zScore, pValue);
|
|
||||||
|
|
||||||
const trendingData = {
|
const {
|
||||||
time,
|
isChannel,
|
||||||
isChannel: claimViewsEntry.isChannel,
|
claimId,
|
||||||
claimId: claimViewsEntry.claimId,
|
publisherId,
|
||||||
publisherId: claimViewsEntry.publisherId,
|
} = claimViewsEntry;
|
||||||
intervalViews: claimViewsEntry.views,
|
|
||||||
weight,
|
|
||||||
zScore,
|
|
||||||
pValue,
|
|
||||||
};
|
|
||||||
|
|
||||||
db.Trending.create(trendingData);
|
const zScore = getZScore(claimViewsEntry.views, mean, standardDeviation);
|
||||||
|
const pValue = getFastPValue(zScore);
|
||||||
|
const weight = getWeight(zScore, pValue);
|
||||||
|
|
||||||
|
const trendingData = {
|
||||||
|
time,
|
||||||
|
isChannel: claimViewsEntry.isChannel,
|
||||||
|
claimId: claimViewsEntry.claimId,
|
||||||
|
publisherId: claimViewsEntry.publisherId,
|
||||||
|
intervalViews: claimViewsEntry.views,
|
||||||
|
weight,
|
||||||
|
zScore,
|
||||||
|
pValue,
|
||||||
|
};
|
||||||
|
|
||||||
|
db.Trending.create(trendingData);
|
||||||
|
}
|
||||||
|
} catch(e) {
|
||||||
|
logger.error('Error processing trending content:', e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue