diff --git a/client/src/actions/show.js b/client/src/actions/show.js
index b5d6d5c0..bdffb1df 100644
--- a/client/src/actions/show.js
+++ b/client/src/actions/show.js
@@ -1,5 +1,10 @@
 import * as actions from '../constants/show_action_types';
-import { CHANNEL, ASSET_LITE, ASSET_DETAILS } from '../constants/show_request_types';
+import {
+  ASSET_DETAILS,
+  ASSET_LITE,
+  CHANNEL,
+  SPECIAL_ASSET,
+} from '../constants/show_request_types';
 
 // basic request parsing
 export function onHandleShowPageUri (params, url) {
@@ -38,6 +43,15 @@ export function onNewChannelRequest (channelName, channelId) {
   };
 }
 
+export function onNewSpecialAssetRequest (name) {
+  const requestType = SPECIAL_ASSET;
+  const requestId = `sar#${name}`;
+  return {
+    type: actions.SPECIAL_ASSET_REQUEST_NEW,
+    data: { requestType, requestId, name, channelName: name, channelId: name },
+  };
+}
+
 export function onNewAssetRequest (name, id, channelName, channelId, extension) {
   const requestType = extension ? ASSET_LITE : ASSET_DETAILS;
   const requestId = `ar#${name}#${id}#${channelName}#${channelId}`;
diff --git a/client/src/api/specialAssetApi.js b/client/src/api/specialAssetApi.js
new file mode 100644
index 00000000..7771ac8b
--- /dev/null
+++ b/client/src/api/specialAssetApi.js
@@ -0,0 +1,7 @@
+import Request from '../utils/request';
+
+export function getSpecialAssetClaims(host, name, page) {
+  if (!page) page = 1;
+  const url = `${host}/api/special/${name}/${page}`;
+  return Request(url);
+}
diff --git a/client/src/app.js b/client/src/app.js
index e02c9a95..c4456ffe 100644
--- a/client/src/app.js
+++ b/client/src/app.js
@@ -9,6 +9,7 @@ import LoginPage from '@pages/LoginPage';
 import ContentPageWrapper from '@pages/ContentPageWrapper';
 import FourOhFourPage from '@pages/FourOhFourPage';
 import MultisitePage from '@pages/MultisitePage';
+import PopularPage from '@pages/PopularPage';
 
 const App = () => {
   return (
@@ -19,6 +20,7 @@ const App = () => {
       <Route exact path='/faq' component={FaqPage} />
       <Route exact path='/login' component={LoginPage} />
       <Route exact path='/multisite' component={MultisitePage} />
+      <Route exact path='/popular' component={PopularPage} />
       <Route exact path='/:identifier/:claim' component={ContentPageWrapper} />
       <Route exact path='/:claim' component={ContentPageWrapper} />
       <Route component={FourOhFourPage} />
diff --git a/client/src/constants/show_action_types.js b/client/src/constants/show_action_types.js
index 8bb2eca4..02c79813 100644
--- a/client/src/constants/show_action_types.js
+++ b/client/src/constants/show_action_types.js
@@ -5,6 +5,7 @@ export const REQUEST_ERROR = 'REQUEST_ERROR';
 export const REQUEST_UPDATE = 'REQUEST_UPDATE';
 export const ASSET_REQUEST_NEW = 'ASSET_REQUEST_NEW';
 export const CHANNEL_REQUEST_NEW = 'CHANNEL_REQUEST_NEW';
+export const SPECIAL_ASSET_REQUEST_NEW = 'SPECIAL_ASSET_REQUEST_NEW';
 export const REQUEST_LIST_ADD = 'REQUEST_LIST_ADD';
 
 // asset actions
diff --git a/client/src/constants/show_request_types.js b/client/src/constants/show_request_types.js
index d5fbed67..ca93c4e4 100644
--- a/client/src/constants/show_request_types.js
+++ b/client/src/constants/show_request_types.js
@@ -1,3 +1,4 @@
 export const CHANNEL = 'CHANNEL';
 export const ASSET_LITE = 'ASSET_LITE';
 export const ASSET_DETAILS = 'ASSET_DETAILS';
+export const SPECIAL_ASSET = 'SPECIAL_ASSET';
diff --git a/client/src/pages/ContentPageWrapper/view.jsx b/client/src/pages/ContentPageWrapper/view.jsx
index 9bb8e8cf..9a1cdb3f 100644
--- a/client/src/pages/ContentPageWrapper/view.jsx
+++ b/client/src/pages/ContentPageWrapper/view.jsx
@@ -5,7 +5,12 @@ import ShowAssetDetails from '@pages/ShowAssetDetails';
 import ShowChannel from '@pages/ShowChannel';
 import { withRouter } from 'react-router-dom';
 
-import { CHANNEL, ASSET_LITE, ASSET_DETAILS } from '../../constants/show_request_types';
+import {
+  CHANNEL,
+  ASSET_LITE,
+  ASSET_DETAILS,
+  SPECIAL_ASSET,
+} from '../../constants/show_request_types';
 
 class ContentPageWrapper extends React.Component {
   componentDidMount () {
@@ -31,6 +36,8 @@ class ContentPageWrapper extends React.Component {
         return <ShowAssetLite />;
       case ASSET_DETAILS:
         return <ShowAssetDetails />;
+      case SPECIAL_ASSET:
+        return <ShowChannel />;
       default:
         return <p>loading...</p>;
     }
diff --git a/client/src/pages/PopularPage/index.jsx b/client/src/pages/PopularPage/index.jsx
new file mode 100644
index 00000000..c1f3ba6b
--- /dev/null
+++ b/client/src/pages/PopularPage/index.jsx
@@ -0,0 +1,17 @@
+import { connect } from 'react-redux';
+import { onHandleShowHomepage } from '../../actions/show';
+import View from './view';
+
+const mapStateToProps = ({ show, site, channel }) => {
+  return {
+    error      : show.request.error,
+    requestType: show.request.type,
+    homeChannel: 'special:trending',
+  };
+};
+
+const mapDispatchToProps = {
+  onHandleShowHomepage,
+};
+
+export default connect(mapStateToProps, mapDispatchToProps)(View);
diff --git a/client/src/pages/PopularPage/view.jsx b/client/src/pages/PopularPage/view.jsx
new file mode 100644
index 00000000..e30b9149
--- /dev/null
+++ b/client/src/pages/PopularPage/view.jsx
@@ -0,0 +1,23 @@
+import React from 'react';
+import ContentPageWrapper from '@pages/ContentPageWrapper';
+
+class PopularPage extends React.Component {
+  componentDidMount () {
+    this.props.onHandleShowHomepage(this.props.match.params);
+  }
+
+  componentWillReceiveProps (nextProps) {
+    if (nextProps.match.params !== this.props.match.params) {
+      this.props.onHandleShowHomepage(nextProps.match.params);
+    }
+  }
+
+  render () {
+    const { homeChannel } = this.props;
+    return (
+      <ContentPageWrapper homeChannel={homeChannel} />
+    )
+  }
+};
+
+export default PopularPage;
diff --git a/client/src/pages/ShowChannel/index.js b/client/src/pages/ShowChannel/index.js
index e741d3da..e2f5ada0 100644
--- a/client/src/pages/ShowChannel/index.js
+++ b/client/src/pages/ShowChannel/index.js
@@ -6,6 +6,7 @@ const mapStateToProps = ({ show, site, channel }) => {
   const requestId = show.request.id;
   // select request
   const previousRequest = show.requestList[requestId] || null;
+
   // select channel
   let thisChannel;
   if (previousRequest) {
diff --git a/client/src/sagas/rootSaga.js b/client/src/sagas/rootSaga.js
index a83bda57..8bdc07d3 100644
--- a/client/src/sagas/rootSaga.js
+++ b/client/src/sagas/rootSaga.js
@@ -2,6 +2,7 @@ import { all } from 'redux-saga/effects';
 import { watchHandleShowPageUri, watchHandleShowHomepage } from './show_uri';
 import { watchNewAssetRequest } from './show_asset';
 import { watchNewChannelRequest, watchUpdateChannelClaims } from './show_channel';
+import { watchNewSpecialAssetRequest } from './show_special';
 import { watchFileIsRequested } from './file';
 import { watchPublishStart } from './publish';
 import { watchUpdateClaimAvailability } from './updateClaimAvailability';
@@ -16,6 +17,7 @@ export function * rootSaga () {
     watchHandleShowHomepage(),
     watchNewAssetRequest(),
     watchNewChannelRequest(),
+    watchNewSpecialAssetRequest(),
     watchUpdateChannelClaims(),
     watchFileIsRequested(),
     watchPublishStart(),
diff --git a/client/src/sagas/show_special.js b/client/src/sagas/show_special.js
new file mode 100644
index 00000000..eedaac53
--- /dev/null
+++ b/client/src/sagas/show_special.js
@@ -0,0 +1,46 @@
+import {call, put, select, takeLatest} from 'redux-saga/effects';
+import * as actions from '../constants/show_action_types';
+import { addNewChannelToChannelList, addRequestToRequestList, onRequestError, onRequestUpdate, updateChannelClaims } from '../actions/show';
+//import { getChannelClaims, getChannelData } from '../api/channelApi';
+import { getSpecialAssetClaims } from '../api/specialAssetApi';
+import { selectShowState } from '../selectors/show';
+import { selectSiteHost } from '../selectors/site';
+
+export function * newSpecialAssetRequest (action) {
+  const { requestType, requestId, name } = action.data;
+  let claimsData;
+  // put an action to update the request in redux
+  yield put(onRequestUpdate(requestType, requestId));
+  // is this an existing request?
+  // If this uri is in the request list, it's already been fetched
+  const state = yield select(selectShowState);
+  const host = yield select(selectSiteHost);
+  if (state.requestList[requestId]) {
+    return null;
+  }
+
+  // store the request in the channel requests list
+  const channelKey = `sar#${name}`;
+  yield put(addRequestToRequestList(requestId, null, channelKey));
+
+  // If this channel is in the channel list, it's already been fetched
+  if (state.channelList[channelKey]) {
+    return null;
+  }
+  // get channel claims data
+  try {
+    ({ data: claimsData } = yield call(getSpecialAssetClaims, host, name, 1));
+  } catch (error) {
+    return yield put(onRequestError(error.message));
+  }
+
+  // store the channel data in the channel list
+  yield put(addNewChannelToChannelList(channelKey, name, null, null, claimsData));
+
+  // clear any request errors
+  yield put(onRequestError(null));
+}
+
+export function * watchNewSpecialAssetRequest () {
+  yield takeLatest(actions.SPECIAL_ASSET_REQUEST_NEW, newSpecialAssetRequest);
+}
diff --git a/client/src/sagas/show_uri.js b/client/src/sagas/show_uri.js
index 84467a5b..c8525009 100644
--- a/client/src/sagas/show_uri.js
+++ b/client/src/sagas/show_uri.js
@@ -1,8 +1,14 @@
 import { call, put, takeLatest } from 'redux-saga/effects';
 import * as actions from '../constants/show_action_types';
-import { onRequestError, onNewChannelRequest, onNewAssetRequest } from '../actions/show';
+import {
+  onRequestError,
+  onNewChannelRequest,
+  onNewAssetRequest,
+  onNewSpecialAssetRequest,
+} from '../actions/show';
 import { newAssetRequest } from '../sagas/show_asset';
 import { newChannelRequest } from '../sagas/show_channel';
+import { newSpecialAssetRequest } from '../sagas/show_special';
 import lbryUri from '../../../utils/lbryUri';
 
 function * parseAndUpdateIdentifierAndClaim (modifier, claim) {
@@ -24,27 +30,32 @@ function * parseAndUpdateIdentifierAndClaim (modifier, claim) {
 }
 
 function * parseAndUpdateClaimOnly (claim) {
-  // this could be a request for an asset or a channel page
-  // claim could be an asset claim or a channel claim
-  let isChannel, channelName, channelClaimId;
-  try {
-    ({ isChannel, channelName, channelClaimId } = lbryUri.parseIdentifier(claim));
-  } catch (error) {
-    return yield put(onRequestError(error.message));
+  if(/^special\:/.test(claim) === true) {
+    const assetName = /special\:(.*)/.exec(claim)[1];
+    return yield call(newSpecialAssetRequest, onNewSpecialAssetRequest(assetName));
+  } else {
+    // this could be a request for an asset or a channel page
+    // claim could be an asset claim or a channel claim
+    let isChannel, channelName, channelClaimId;
+    try {
+      ({ isChannel, channelName, channelClaimId } = lbryUri.parseIdentifier(claim));
+    } catch (error) {
+      return yield put(onRequestError(error.message));
+    }
+    // trigger an new action to update the store
+    // return early if this request is for a channel
+    if (isChannel) {
+      return yield call(newChannelRequest, onNewChannelRequest(channelName, channelClaimId));
+    }
+    // if not for a channel, parse the claim request
+    let claimName, extension;
+    try {
+      ({claimName, extension} = lbryUri.parseClaim(claim));
+    } catch (error) {
+      return yield put(onRequestError(error.message));
+    }
+    yield call(newAssetRequest, onNewAssetRequest(claimName, null, null, null, extension));
   }
-  // trigger an new action to update the store
-  // return early if this request is for a channel
-  if (isChannel) {
-    return yield call(newChannelRequest, onNewChannelRequest(channelName, channelClaimId));
-  }
-  // if not for a channel, parse the claim request
-  let claimName, extension;
-  try {
-    ({claimName, extension} = lbryUri.parseClaim(claim));
-  } catch (error) {
-    return yield put(onRequestError(error.message));
-  }
-  yield call(newAssetRequest, onNewAssetRequest(claimName, null, null, null, extension));
 }
 
 export function * handleShowPageUri (action) {
diff --git a/server/controllers/api/claim/data/index.js b/server/controllers/api/claim/data/index.js
index b72751a4..6efe4e83 100644
--- a/server/controllers/api/claim/data/index.js
+++ b/server/controllers/api/claim/data/index.js
@@ -1,7 +1,7 @@
 const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
 const getClaimData = require('server/utils/getClaimData');
 const chainquery = require('chainquery');
-const db = require('../../../../models');
+const db = require('server/models');
 
 /*
 
@@ -16,7 +16,7 @@ const claimData = async ({ ip, originalUrl, body, params }, res) => {
 
   try {
     let resolvedClaim = await chainquery.claim.queries.resolveClaim(claimName, claimId).catch(() => {});
-    
+
     if(!resolvedClaim) {
       resolvedClaim = await db.Claim.resolveClaim(claimName, claimId);
     }
diff --git a/server/controllers/api/special/claims/index.js b/server/controllers/api/special/claims/index.js
new file mode 100644
index 00000000..4feec2da
--- /dev/null
+++ b/server/controllers/api/special/claims/index.js
@@ -0,0 +1,42 @@
+const { handleErrorResponse } = require('../../../utils/errorHandlers.js');
+const db = require('server/models');
+const getClaimData = require('server/utils/getClaimData');
+
+/*
+
+  route to get all claims for special
+
+*/
+
+const channelClaims = async ({ ip, originalUrl, body, params }, res) => {
+  const {
+    name,
+    page,
+  } = params;
+
+  if(name === 'trending') {
+    const result = await db.Trending.getTrendingClaims();
+    const claims = await Promise.all(result.map((claim) => getClaimData(claim)));
+    return res.status(200).json({
+      success: true,
+      data: {
+        channelName: name,
+        claims,
+        longChannelClaimId: name,
+        currentPage: 1,
+        nextPage: null,
+        previousPage: null,
+        totalPages: 1,
+        totalResults: claims.length,
+      }
+    });
+  }
+
+  res.status(404).json({
+    success: false,
+    message: 'Feature endpoint not found',
+  });
+  handleErrorResponse(originalUrl, ip, 'Feature endpoint not found', res);
+};
+
+module.exports = channelClaims;
diff --git a/server/controllers/assets/utils/getClaimIdAndServeAsset.js b/server/controllers/assets/utils/getClaimIdAndServeAsset.js
index 37c2cfd4..26c6805d 100644
--- a/server/controllers/assets/utils/getClaimIdAndServeAsset.js
+++ b/server/controllers/assets/utils/getClaimIdAndServeAsset.js
@@ -37,11 +37,26 @@ const getClaimIdAndServeAsset = (channelName, channelClaimId, claimName, claimId
       return claim;
     })
     .then(claim => {
-      if (serveOnlyApproved && !isApprovedChannel({ longId: claim.dataValues.publisher_id }, approvedChannels)) {
+      let claimDataValues = claim.dataValues;
+
+      if (serveOnlyApproved && !isApprovedChannel({ longId: claimDataValues.publisher_id || claimDataValues.certificateId }, approvedChannels)) {
         throw new Error(CONTENT_UNAVAILABLE);
       }
-      logger.debug('Outpoint:', claim.dataValues.outpoint);
-      return db.Blocked.isNotBlocked(claim.dataValues.outpoint);
+
+      let outpoint = claimDataValues.outpoint || `${claimDataValues.transaction_hash_id}:${claimDataValues.vout}`;
+      logger.debug('Outpoint:', outpoint);
+      return db.Blocked.isNotBlocked(outpoint).then(() => {
+        // If content was found, is approved, and not blocked - log a view.
+        db.Views.create({
+          time: Date.now(),
+          isChannel: false,
+          claimId: claimDataValues.claim_id || claimDataValues.claimId,
+          publisherId: claimDataValues.publisher_id || claimDataValues.certificateId,
+          ip,
+        });
+
+        return;
+      });
     })
     .then(() => {
       return db.File.findOne({
diff --git a/server/index.js b/server/index.js
index 011aac96..c42faa80 100644
--- a/server/index.js
+++ b/server/index.js
@@ -11,12 +11,18 @@ const httpContext = require('express-http-context');
 
 // load local modules
 const db = require('./models');
-const requestLogger = require('./middleware/requestLogger.js');
-const createDatabaseIfNotExists = require('./models/utils/createDatabaseIfNotExists.js');
+const requestLogger = require('./middleware/requestLogger');
+const createDatabaseIfNotExists = require('./models/utils/createDatabaseIfNotExists');
 const { getWalletBalance } = require('./lbrynet/index');
-const configureLogging = require('./utils/configureLogging.js');
-const configureSlack = require('./utils/configureSlack.js');
-const speechPassport = require('./speechPassport/index');
+const configureLogging = require('./utils/configureLogging');
+const configureSlack = require('./utils/configureSlack');
+const speechPassport = require('./speechPassport');
+const processTrending = require('./utils/processTrending');
+
+const {
+  logMetricsMiddleware,
+  setRouteDataInContextMiddleware,
+} = require('./middleware/logMetricsMiddleware');
 
 const {
   details: { port: PORT },
@@ -27,36 +33,6 @@ const {
   },
 } = require('@config/siteConfig');
 
-function logMetricsMiddleware(req, res, next) {
-  res.on('finish', () => {
-    const userAgent = req.get('user-agent');
-    const routePath = httpContext.get('routePath');
-
-    db.Metrics.create({
-      isInternal: /node\-fetch/.test(userAgent),
-      isChannel: res.isChannel,
-      claimId: res.claimId,
-      routePath: httpContext.get('routePath'),
-      params: JSON.stringify(req.params),
-      ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress,
-      request: req.url,
-      routeData: JSON.stringify(httpContext.get('routeData')),
-      referrer: req.get('referrer'),
-      userAgent,
-    });
-  });
-
-  next();
-}
-
-function setRouteDataInContextMiddleware(routePath, routeData) {
-  return function (req, res, next) {
-    httpContext.set('routePath', routePath);
-    httpContext.set('routeData', routeData);
-    next();
-  };
-}
-
 function Server () {
   this.initialize = () => {
     // configure logging
@@ -200,6 +176,8 @@ function Server () {
       })
       .then(() => {
         logger.info('Spee.ch startup is complete');
+
+        setInterval(processTrending, 30 * 60000) // 30 minutes
       })
       .catch(error => {
         if (error.code === 'ECONNREFUSED') {
diff --git a/server/middleware/logMetricsMiddleware.js b/server/middleware/logMetricsMiddleware.js
new file mode 100644
index 00000000..7845aec5
--- /dev/null
+++ b/server/middleware/logMetricsMiddleware.js
@@ -0,0 +1,38 @@
+const db = require('../models');
+const httpContext = require('express-http-context');
+
+function logMetricsMiddleware(req, res, next) {
+  res.on('finish', () => {
+    const userAgent = req.get('user-agent');
+    const routePath = httpContext.get('routePath');
+
+    db.Metrics.create({
+      time: Date.now(),
+      isInternal: /node\-fetch/.test(userAgent),
+      isChannel: res.isChannel,
+      claimId: res.claimId,
+      routePath: httpContext.get('routePath'),
+      params: JSON.stringify(req.params),
+      ip: req.headers['x-forwarded-for'] || req.connection.remoteAddress,
+      request: req.url,
+      routeData: JSON.stringify(httpContext.get('routeData')),
+      referrer: req.get('referrer'),
+      userAgent,
+    });
+  });
+
+  next();
+}
+
+function setRouteDataInContextMiddleware(routePath, routeData) {
+  return function (req, res, next) {
+    httpContext.set('routePath', routePath);
+    httpContext.set('routeData', routeData);
+    next();
+  };
+}
+
+module.exports = {
+  logMetricsMiddleware,
+  setRouteDataInContextMiddleware,
+};
diff --git a/server/models/index.js b/server/models/index.js
index 54461da6..14c460b0 100644
--- a/server/models/index.js
+++ b/server/models/index.js
@@ -8,7 +8,9 @@ const Claim = require('./claim');
 const File = require('./file');
 const Metrics = require('./metrics');
 const Tor = require('./tor');
+const Trending = require('./trending');
 const User = require('./user');
+const Views = require('./views');
 
 const {
   database,
@@ -56,7 +58,9 @@ db['Claim'] = sequelize.import('Claim', Claim);
 db['File'] = sequelize.import('File', File);
 db['Metrics'] = sequelize.import('Metrics', Metrics);
 db['Tor'] = sequelize.import('Tor', Tor);
+db['Trending'] = sequelize.import('Trending', Trending);
 db['User'] = sequelize.import('User', User);
+db['Views'] = sequelize.import('Views', Views);
 
 // run model.association for each model in the db object that has an association
 logger.info('associating db models...');
diff --git a/server/models/trending.js b/server/models/trending.js
new file mode 100644
index 00000000..183257e8
--- /dev/null
+++ b/server/models/trending.js
@@ -0,0 +1,98 @@
+const chainquery = require('chainquery');
+
+module.exports = (sequelize, { BOOLEAN, DATE, FLOAT, INTEGER, STRING }) => {
+  const Trending = sequelize.define(
+    'Trending',
+    {
+      time: { /* TODO: Historical analysis and log roll */
+        type: DATE(6),
+        defaultValue: sequelize.NOW,
+      },
+      isChannel: {
+        type: BOOLEAN,
+        defaultValue: false,
+      },
+      claimId: {
+        type: STRING,
+        defaultValue: null,
+      },
+      publisherId: {
+        type: STRING,
+        defaultValue: null,
+      },
+      intervalViews: {
+        type: INTEGER,
+        defaultValue: 0,
+      },
+      weight: {
+        type: FLOAT,
+        defaultValue: 0,
+      },
+      zScore: {
+        type: FLOAT,
+        defaultValue: 0,
+      },
+      pValue: {
+        type: FLOAT,
+        defaultValue: 0,
+      },
+      // TODO: Calculate t-statistics
+    },
+    {
+      freezeTableName: true,
+      timestamps: false, // don't use default timestamps columns
+      indexes: [
+        {
+          fields: ['claimId'],
+        },
+        {
+          fields: ['time', 'isChannel', 'claimId', 'publisherId', 'weight'],
+        },
+      ],
+    }
+  );
+
+  Trending.getTrendingWeightData = async ({
+    hours = 2,
+    minutes = 0,
+    limit = 20
+  } = {}) => {
+    let time = new Date();
+    time.setHours(time.getHours() - hours);
+    time.setMinutes(time.getMinutes() - minutes);
+
+    const sqlTime = time.toISOString().slice(0, 19).replace('T', ' ');
+
+    const selectString = 'DISTINCT(claimId), weight';
+    const whereString = `isChannel = false and time > '${sqlTime}'`;
+    const query = `SELECT ${selectString} FROM trending WHERE ${whereString} ORDER BY weight DESC LIMIT ${limit}`
+
+    return await sequelize.query(query, { type: sequelize.QueryTypes.SELECT });
+  };
+
+  Trending.getTrendingClaims = async () => {
+    const trendingWeightData = await Trending.getTrendingWeightData();
+
+    const trendingClaimIds = [];
+    const trendingClaims = trendingWeightData.reduce((claims, trendingData) => {
+      trendingClaimIds.push(trendingData.claimId);
+      claims[trendingData.claimId] = {
+        ...trendingData
+      };
+
+      return claims;
+    }, {});
+
+    const claimData = await chainquery.claim.findAll({
+      where: {
+        claim_id: { [sequelize.Op.in]: trendingClaimIds },
+      },
+    });
+
+    return claimData.map((claimData) => {
+      return Object.assign(trendingClaims[claimData.claim_id], claimData.dataValues);
+    });
+  };
+
+  return Trending;
+};
diff --git a/server/models/utils/trendingAnalysis.js b/server/models/utils/trendingAnalysis.js
new file mode 100644
index 00000000..47d48e37
--- /dev/null
+++ b/server/models/utils/trendingAnalysis.js
@@ -0,0 +1,68 @@
+const ZSCORE_CRITICAL_THRESHOLD = 1.96; // 95-percentile
+const ZSCORE_NINETYNINTH = 2.326347875; // 99-percentile
+const ONE_DIV_SQRT_2PI = 0.3989422804014327; // V8 float of 1/SQRT(2 * PI)
+const MAX_P_PRECISION = Math.exp(-16); // Rought estimation of V8 precision, -16 is 1.1253517471925912e-7
+const MIN_P = -6.44357455534; // v8 float 0.0...0
+const MAX_P = 6.44357455534; // v8 float 1.0...0
+
+const getMean = (numArr) => {
+  let total = 0;
+  let length = numArr.length; // store local to reduce potential prop lookups
+
+  for(let i = 0; i < length; i++) {
+    total += numArr[i];
+  }
+
+  return total / length;
+};
+
+const getStandardDeviation = (numArr, mean) => {
+  return Math.sqrt(numArr.reduce((sq, n) => (
+    sq + Math.pow(n - mean, 2)
+  ), 0) / (numArr.length - 1));
+};
+
+const getInformationFromValues = (numArr) => {
+  let mean = getMean(numArr);
+
+  return {
+    mean,
+    standardDeviation: getStandardDeviation(numArr, mean),
+  }
+};
+
+const getZScore = (value, mean, sDeviation) => ( sDeviation !== 0 ? (value - mean) / sDeviation : 0 );
+
+const getFastPValue = (zScore) => {
+  if(zScore <= MIN_P) {
+   return 0;
+  }
+  if(zScore >= MAX_P) {
+   return 1;
+  }
+
+  let factorialK = 1;
+  let k = 0;
+  let sum = 0;
+  let term = 1;
+
+  while(Math.abs(term) > MAX_P_PRECISION) {
+    term = ONE_DIV_SQRT_2PI * Math.pow(-1 , k) * Math.pow(zScore , k) / (2 * k + 1) / Math.pow(2 , k) * Math.pow(zScore, k + 1) / factorialK;
+    sum += term;
+    k++;
+    factorialK *= k;
+  }
+  sum += 0.5;
+
+  return sum;
+};
+
+
+const getWeight = (zScore, pValue) => (zScore * pValue);
+
+module.exports = {
+  getInformationFromValues,
+  getZScore,
+  getFastPValue,
+  getWeight,
+};
diff --git a/server/models/views.js b/server/models/views.js
new file mode 100644
index 00000000..9c6b15f0
--- /dev/null
+++ b/server/models/views.js
@@ -0,0 +1,57 @@
+module.exports = (sequelize, { BOOLEAN, DATE, STRING }) => {
+  const Views = sequelize.define(
+    'Views',
+    {
+      time: {
+        type: DATE(6),
+        defaultValue: sequelize.NOW,
+      },
+      isChannel: {
+        type: BOOLEAN,
+        defaultValue: false,
+      },
+      claimId: {
+        type: STRING,
+        defaultValue: null,
+      },
+      publisherId: {
+        type: STRING,
+        defaultValue: null,
+      },
+      ip: {
+        type: STRING,
+        defaultValue: null,
+      },
+    },
+    {
+      freezeTableName: true,
+      timestamps: false, // don't use default timestamps columns
+      indexes: [
+        {
+          fields: ['time', 'isChannel', 'claimId', 'publisherId', 'ip'],
+        },
+      ],
+    }
+  );
+
+  Views.getUniqueViews = ({
+    hours = 0,
+    minutes = 30,
+  } = {}) => {
+    let time = new Date();
+    time.setHours(time.getHours() - hours);
+    time.setMinutes(time.getMinutes() - minutes);
+
+    const sqlTime = time.toISOString().slice(0, 19).replace('T', ' ');
+
+    const selectString = 'claimId, publisherId, isChannel, COUNT(DISTINCT ip) as views';
+    const groupString = 'claimId, publisherId, isChannel';
+
+    return sequelize.query(
+      `SELECT ${selectString} FROM views where time > '${sqlTime}' GROUP BY ${groupString}`,
+      { type: sequelize.QueryTypes.SELECT }
+    );
+  }
+
+  return Views;
+};
diff --git a/server/routes/api/index.js b/server/routes/api/index.js
index 7b5e9806..150a61c9 100644
--- a/server/routes/api/index.js
+++ b/server/routes/api/index.js
@@ -17,6 +17,7 @@ const claimPublish = require('../../controllers/api/claim/publish');
 const claimResolve = require('../../controllers/api/claim/resolve');
 const claimShortId = require('../../controllers/api/claim/shortId');
 const fileAvailability = require('../../controllers/api/file/availability');
+const specialClaims = require('../../controllers/api/special/claims');
 const userPassword = require('../../controllers/api/user/password');
 const publishingConfig = require('../../controllers/api/config/site/publishing');
 const getTorList = require('../../controllers/api/tor');
@@ -83,6 +84,10 @@ module.exports = {
   '/api/channel/data/:channelName/:channelClaimId': { controller: [ torCheckMiddleware, channelData ] },
   '/api/channel/data/:channelName/:channelClaimId': { controller: [ torCheckMiddleware, channelData ] },
   '/api/channel/claims/:channelName/:channelClaimId/:page': { controller: [ torCheckMiddleware, channelClaims ] },
+
+  // sepcial routes
+  '/api/special/:name/:page': { controller: [ torCheckMiddleware, specialClaims ] },
+
   // claim routes
   '/api/claim/availability/:name': { controller: [ torCheckMiddleware, claimAvailability ] },
   '/api/claim/data/:claimName/:claimId': { controller: [ torCheckMiddleware, claimData ] },
diff --git a/server/utils/isRequestLocal.js b/server/utils/isRequestLocal.js
new file mode 100644
index 00000000..65e399b7
--- /dev/null
+++ b/server/utils/isRequestLocal.js
@@ -0,0 +1,6 @@
+module.exports = function(req) {
+    let reqIp = req.connection.remoteAddress;
+    let host = req.get('host');
+
+    return reqIp === '127.0.0.1' || reqIp === '::ffff:127.0.0.1' || reqIp === '::1' || host.indexOf('localhost') !== -1;
+}
diff --git a/server/utils/processTrending.js b/server/utils/processTrending.js
new file mode 100644
index 00000000..f84fe057
--- /dev/null
+++ b/server/utils/processTrending.js
@@ -0,0 +1,52 @@
+const db = require('server/models');
+const {
+  getInformationFromValues,
+  getZScore,
+  getFastPValue,
+  getWeight,
+} = require('server/models/utils/trendingAnalysis');
+
+module.exports = async () => {
+  const claims = await db.Trending.getTrendingClaims();
+  const claimViews = await db.Views.getUniqueViews();
+
+  if(claimViews.length <= 1) {
+    return;
+  }
+
+  const time = Date.now();
+
+  // Must create statistical analytics before we can process zScores, etc
+  const viewsNumArray = claimViews.map((claimViewsEntry) => claimViewsEntry.views);
+  const {
+    mean,
+    standardDeviation,
+  } = getInformationFromValues(viewsNumArray);
+
+  for(let i = 0; i < claimViews.length; i++) {
+    let claimViewsEntry = claimViews[i];
+
+    const {
+      isChannel,
+      claimId,
+      publisherId,
+    } = claimViewsEntry;
+
+    const zScore = getZScore(claimViewsEntry.views, mean, standardDeviation);
+    const pValue = getFastPValue(zScore);
+    const weight = getWeight(zScore, pValue);
+
+    const trendingData = {
+      time,
+      isChannel: claimViewsEntry.isChannel,
+      claimId: claimViewsEntry.claimId,
+      publisherId: claimViewsEntry.publisherId,
+      intervalViews: claimViewsEntry.views,
+      weight,
+      zScore,
+      pValue,
+    };
+
+    db.Trending.create(trendingData);
+  }
+}