diff --git a/server/middleware/logMetricsMiddleware.js b/server/middleware/logMetricsMiddleware.js index 7845aec5..2ad4c5fa 100644 --- a/server/middleware/logMetricsMiddleware.js +++ b/server/middleware/logMetricsMiddleware.js @@ -1,3 +1,4 @@ +const logger = require('winston'); const db = require('../models'); const httpContext = require('express-http-context'); @@ -6,6 +7,18 @@ function logMetricsMiddleware(req, res, next) { const userAgent = req.get('user-agent'); const routePath = httpContext.get('routePath'); + let referrer = req.get('referrer'); + + if(referrer.length > 255) { + // Attempt to "safely" clamp long URLs + referrer = /(.*?)#.*/.exec(referrer)[1]; + + if(referrer.length > 255) { + logger.warn('Request refferer exceeds 255 characters:', referrer); + referrer = referrer.substring(0, 255); + } + } + db.Metrics.create({ time: Date.now(), isInternal: /node\-fetch/.test(userAgent), @@ -16,7 +29,7 @@ function logMetricsMiddleware(req, res, next) { ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress, request: req.url, routeData: JSON.stringify(httpContext.get('routeData')), - referrer: req.get('referrer'), + referrer, userAgent, }); }); diff --git a/server/utils/processTrending.js b/server/utils/processTrending.js index f84fe057..c401f14f 100644 --- a/server/utils/processTrending.js +++ b/server/utils/processTrending.js @@ -6,47 +6,53 @@ const { getWeight, } = require('server/models/utils/trendingAnalysis'); +const logger = require('winston'); + module.exports = async () => { - const claims = await db.Trending.getTrendingClaims(); - const claimViews = await db.Views.getUniqueViews(); + try { + const claims = await db.Trending.getTrendingClaims(); + const claimViews = await db.Views.getUniqueViews(); - if(claimViews.length <= 1) { - return; - } + if(claimViews.length <= 1) { + return; + } - const time = Date.now(); - - // Must create statistical analytics before we can process zScores, etc - const viewsNumArray = claimViews.map((claimViewsEntry) => claimViewsEntry.views); - const { - mean, - standardDeviation, - } = getInformationFromValues(viewsNumArray); - - for(let i = 0; i < claimViews.length; i++) { - let claimViewsEntry = claimViews[i]; + const time = Date.now(); + // Must create statistical analytics before we can process zScores, etc + const viewsNumArray = claimViews.map((claimViewsEntry) => claimViewsEntry.views); const { - isChannel, - claimId, - publisherId, - } = claimViewsEntry; + mean, + standardDeviation, + } = getInformationFromValues(viewsNumArray); - const zScore = getZScore(claimViewsEntry.views, mean, standardDeviation); - const pValue = getFastPValue(zScore); - const weight = getWeight(zScore, pValue); + for(let i = 0; i < claimViews.length; i++) { + let claimViewsEntry = claimViews[i]; - const trendingData = { - time, - isChannel: claimViewsEntry.isChannel, - claimId: claimViewsEntry.claimId, - publisherId: claimViewsEntry.publisherId, - intervalViews: claimViewsEntry.views, - weight, - zScore, - pValue, - }; + const { + isChannel, + claimId, + publisherId, + } = claimViewsEntry; - db.Trending.create(trendingData); + const zScore = getZScore(claimViewsEntry.views, mean, standardDeviation); + const pValue = getFastPValue(zScore); + const weight = getWeight(zScore, pValue); + + const trendingData = { + time, + isChannel: claimViewsEntry.isChannel, + claimId: claimViewsEntry.claimId, + publisherId: claimViewsEntry.publisherId, + intervalViews: claimViewsEntry.views, + weight, + zScore, + pValue, + }; + + db.Trending.create(trendingData); + } + } catch(e) { + logger.error('Error processing trending content:', e); } }