replace claim_list_by_channel with claim_search
This commit is contained in:
parent
bb6be90560
commit
9994c9ba24
3 changed files with 93 additions and 92 deletions
16
dist/bundle.es.js
vendored
16
dist/bundle.es.js
vendored
|
@ -1544,18 +1544,20 @@ const doCheckSubscription = (subscriptionUri, shouldNotify) => (dispatch, getSta
|
||||||
|
|
||||||
if (!savedSubscription) {
|
if (!savedSubscription) {
|
||||||
throw Error(`Trying to find new content for ${subscriptionUri} but it doesn't exist in your subscriptions`);
|
throw Error(`Trying to find new content for ${subscriptionUri} but it doesn't exist in your subscriptions`);
|
||||||
} // We may be duplicating calls here. Can this logic be baked into doFetchClaimsByChannel?
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
claimId
|
||||||
|
} = lbryRedux.parseURI(subscriptionUri); // We may be duplicating calls here. Can this logic be baked into doFetchClaimsByChannel?
|
||||||
|
|
||||||
lbryRedux.Lbry.claim_list_by_channel({
|
lbryRedux.Lbry.claim_search({
|
||||||
uri: subscriptionUri,
|
channel_id: claimId,
|
||||||
page: 1,
|
page: 1,
|
||||||
page_size: PAGE_SIZE
|
page_size: PAGE_SIZE
|
||||||
}).then(claimListByChannel => {
|
}).then(result => {
|
||||||
const claimResult = claimListByChannel[subscriptionUri] || {};
|
|
||||||
const {
|
const {
|
||||||
claims_in_channel: claimsInChannel
|
items: claimsInChannel
|
||||||
} = claimResult; // may happen if subscribed to an abandoned channel or an empty channel
|
} = result; // may happen if subscribed to an abandoned channel or an empty channel
|
||||||
|
|
||||||
if (!claimsInChannel || !claimsInChannel.length) {
|
if (!claimsInChannel || !claimsInChannel.length) {
|
||||||
return;
|
return;
|
||||||
|
|
26
dist/bundle.js
vendored
26
dist/bundle.js
vendored
|
@ -2527,16 +2527,18 @@ var doCheckSubscription = function doCheckSubscription(subscriptionUri, shouldNo
|
||||||
|
|
||||||
if (!savedSubscription) {
|
if (!savedSubscription) {
|
||||||
throw Error("Trying to find new content for ".concat(subscriptionUri, " but it doesn't exist in your subscriptions"));
|
throw Error("Trying to find new content for ".concat(subscriptionUri, " but it doesn't exist in your subscriptions"));
|
||||||
} // We may be duplicating calls here. Can this logic be baked into doFetchClaimsByChannel?
|
}
|
||||||
|
|
||||||
|
var _parseURI = Object(lbry_redux__WEBPACK_IMPORTED_MODULE_3__["parseURI"])(subscriptionUri),
|
||||||
|
claimId = _parseURI.claimId; // We may be duplicating calls here. Can this logic be baked into doFetchClaimsByChannel?
|
||||||
|
|
||||||
|
|
||||||
lbry_redux__WEBPACK_IMPORTED_MODULE_3__["Lbry"].claim_list_by_channel({
|
lbry_redux__WEBPACK_IMPORTED_MODULE_3__["Lbry"].claim_search({
|
||||||
uri: subscriptionUri,
|
channel_id: claimId,
|
||||||
page: 1,
|
page: 1,
|
||||||
page_size: constants_claim__WEBPACK_IMPORTED_MODULE_0__["PAGE_SIZE"]
|
page_size: constants_claim__WEBPACK_IMPORTED_MODULE_0__["PAGE_SIZE"]
|
||||||
}).then(function (claimListByChannel) {
|
}).then(function (result) {
|
||||||
var claimResult = claimListByChannel[subscriptionUri] || {};
|
var claimsInChannel = result.items; // may happen if subscribed to an abandoned channel or an empty channel
|
||||||
var claimsInChannel = claimResult.claims_in_channel; // may happen if subscribed to an abandoned channel or an empty channel
|
|
||||||
|
|
||||||
if (!claimsInChannel || !claimsInChannel.length) {
|
if (!claimsInChannel || !claimsInChannel.length) {
|
||||||
return;
|
return;
|
||||||
|
@ -2617,8 +2619,8 @@ var doChannelSubscribe = function doChannelSubscribe(subscription) {
|
||||||
}); // if the user isn't sharing data, keep the subscriptions entirely in the app
|
}); // if the user isn't sharing data, keep the subscriptions entirely in the app
|
||||||
|
|
||||||
if (isSharingData) {
|
if (isSharingData) {
|
||||||
var _parseURI = Object(lbry_redux__WEBPACK_IMPORTED_MODULE_3__["parseURI"])(subscription.uri),
|
var _parseURI2 = Object(lbry_redux__WEBPACK_IMPORTED_MODULE_3__["parseURI"])(subscription.uri),
|
||||||
claimId = _parseURI.claimId; // They are sharing data, we can store their subscriptions in our internal database
|
claimId = _parseURI2.claimId; // They are sharing data, we can store their subscriptions in our internal database
|
||||||
|
|
||||||
|
|
||||||
lbryio__WEBPACK_IMPORTED_MODULE_6__["default"].call('subscription', 'new', {
|
lbryio__WEBPACK_IMPORTED_MODULE_6__["default"].call('subscription', 'new', {
|
||||||
|
@ -2645,8 +2647,8 @@ var doChannelUnsubscribe = function doChannelUnsubscribe(subscription) {
|
||||||
});
|
});
|
||||||
|
|
||||||
if (isSharingData) {
|
if (isSharingData) {
|
||||||
var _parseURI2 = Object(lbry_redux__WEBPACK_IMPORTED_MODULE_3__["parseURI"])(subscription.uri),
|
var _parseURI3 = Object(lbry_redux__WEBPACK_IMPORTED_MODULE_3__["parseURI"])(subscription.uri),
|
||||||
claimId = _parseURI2.claimId;
|
claimId = _parseURI3.claimId;
|
||||||
|
|
||||||
lbryio__WEBPACK_IMPORTED_MODULE_6__["default"].call('subscription', 'delete', {
|
lbryio__WEBPACK_IMPORTED_MODULE_6__["default"].call('subscription', 'delete', {
|
||||||
claim_id: claimId
|
claim_id: claimId
|
||||||
|
@ -2699,8 +2701,8 @@ var doFetchMySubscriptions = function doFetchMySubscriptions() {
|
||||||
dbSubMap[sub.claim_id] = 1;
|
dbSubMap[sub.claim_id] = 1;
|
||||||
});
|
});
|
||||||
reduxSubscriptions.forEach(function (sub) {
|
reduxSubscriptions.forEach(function (sub) {
|
||||||
var _parseURI3 = Object(lbry_redux__WEBPACK_IMPORTED_MODULE_3__["parseURI"])(sub.uri),
|
var _parseURI4 = Object(lbry_redux__WEBPACK_IMPORTED_MODULE_3__["parseURI"])(sub.uri),
|
||||||
claimId = _parseURI3.claimId;
|
claimId = _parseURI4.claimId;
|
||||||
|
|
||||||
reduxSubMap[claimId] = 1;
|
reduxSubMap[claimId] = 1;
|
||||||
|
|
||||||
|
|
|
@ -156,92 +156,89 @@ export const doCheckSubscription = (subscriptionUri: string, shouldNotify?: bool
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { claimId } = parseURI(subscriptionUri);
|
||||||
|
|
||||||
// We may be duplicating calls here. Can this logic be baked into doFetchClaimsByChannel?
|
// We may be duplicating calls here. Can this logic be baked into doFetchClaimsByChannel?
|
||||||
Lbry.claim_list_by_channel({ uri: subscriptionUri, page: 1, page_size: PAGE_SIZE }).then(
|
Lbry.claim_search({ channel_id: claimId, page: 1, page_size: PAGE_SIZE }).then(result => {
|
||||||
claimListByChannel => {
|
const { items: claimsInChannel } = result;
|
||||||
const claimResult = claimListByChannel[subscriptionUri] || {};
|
|
||||||
const { claims_in_channel: claimsInChannel } = claimResult;
|
|
||||||
|
|
||||||
// may happen if subscribed to an abandoned channel or an empty channel
|
// may happen if subscribed to an abandoned channel or an empty channel
|
||||||
if (!claimsInChannel || !claimsInChannel.length) {
|
if (!claimsInChannel || !claimsInChannel.length) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine if the latest subscription currently saved is actually the latest subscription
|
// Determine if the latest subscription currently saved is actually the latest subscription
|
||||||
const latestIndex = claimsInChannel.findIndex(
|
const latestIndex = claimsInChannel.findIndex(
|
||||||
claim => `${claim.name}#${claim.claim_id}` === savedSubscription.latest
|
claim => `${claim.name}#${claim.claim_id}` === savedSubscription.latest
|
||||||
);
|
);
|
||||||
|
|
||||||
// If latest is -1, it is a newly subscribed channel or there have been 10+ claims published since last viewed
|
// If latest is -1, it is a newly subscribed channel or there have been 10+ claims published since last viewed
|
||||||
const latestIndexToNotify = latestIndex === -1 ? 10 : latestIndex;
|
const latestIndexToNotify = latestIndex === -1 ? 10 : latestIndex;
|
||||||
|
|
||||||
// If latest is 0, nothing has changed
|
// If latest is 0, nothing has changed
|
||||||
// Do not download/notify about new content, it would download/notify 10 claims per channel
|
// Do not download/notify about new content, it would download/notify 10 claims per channel
|
||||||
if (latestIndex !== 0 && savedSubscription.latest) {
|
if (latestIndex !== 0 && savedSubscription.latest) {
|
||||||
let downloadCount = 0;
|
let downloadCount = 0;
|
||||||
|
|
||||||
const newUnread = [];
|
const newUnread = [];
|
||||||
claimsInChannel.slice(0, latestIndexToNotify).forEach(claim => {
|
claimsInChannel.slice(0, latestIndexToNotify).forEach(claim => {
|
||||||
const uri = buildURI({ contentName: claim.name, claimId: claim.claim_id }, true);
|
const uri = buildURI({ contentName: claim.name, claimId: claim.claim_id }, true);
|
||||||
const shouldDownload =
|
const shouldDownload =
|
||||||
shouldAutoDownload &&
|
shouldAutoDownload &&
|
||||||
Boolean(
|
Boolean(downloadCount < SUBSCRIPTION_DOWNLOAD_LIMIT && !claim.value.stream.metadata.fee);
|
||||||
downloadCount < SUBSCRIPTION_DOWNLOAD_LIMIT && !claim.value.stream.metadata.fee
|
|
||||||
);
|
|
||||||
|
|
||||||
// Add the new content to the list of "un-read" subscriptions
|
// Add the new content to the list of "un-read" subscriptions
|
||||||
if (shouldNotify) {
|
if (shouldNotify) {
|
||||||
newUnread.push(uri);
|
newUnread.push(uri);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (shouldDownload) {
|
if (shouldDownload) {
|
||||||
downloadCount += 1;
|
downloadCount += 1;
|
||||||
dispatch(doPurchaseUri(uri, { cost: 0 }, true));
|
dispatch(doPurchaseUri(uri, { cost: 0 }, true));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
dispatch(
|
|
||||||
doUpdateUnreadSubscriptions(
|
|
||||||
subscriptionUri,
|
|
||||||
newUnread,
|
|
||||||
downloadCount > 0 ? NOTIFICATION_TYPES.DOWNLOADING : NOTIFICATION_TYPES.NOTIFY_ONLY
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the latest piece of content for a channel
|
|
||||||
// This allows the app to know if there has been new content since it was last set
|
|
||||||
dispatch(
|
dispatch(
|
||||||
setSubscriptionLatest(
|
doUpdateUnreadSubscriptions(
|
||||||
{
|
subscriptionUri,
|
||||||
channelName: claimsInChannel[0].channel_name,
|
newUnread,
|
||||||
uri: buildURI(
|
downloadCount > 0 ? NOTIFICATION_TYPES.DOWNLOADING : NOTIFICATION_TYPES.NOTIFY_ONLY
|
||||||
{
|
|
||||||
channelName: claimsInChannel[0].channel_name,
|
|
||||||
claimId: claimsInChannel[0].claim_id,
|
|
||||||
},
|
|
||||||
false
|
|
||||||
),
|
|
||||||
},
|
|
||||||
buildURI(
|
|
||||||
{ contentName: claimsInChannel[0].name, claimId: claimsInChannel[0].claim_id },
|
|
||||||
false
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
// calling FETCH_CHANNEL_CLAIMS_COMPLETED after not calling STARTED
|
|
||||||
// means it will delete a non-existant fetchingChannelClaims[uri]
|
|
||||||
dispatch({
|
|
||||||
type: ACTIONS.FETCH_CHANNEL_CLAIMS_COMPLETED,
|
|
||||||
data: {
|
|
||||||
uri: subscriptionUri,
|
|
||||||
claims: claimsInChannel || [],
|
|
||||||
page: 1,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
);
|
|
||||||
|
// Set the latest piece of content for a channel
|
||||||
|
// This allows the app to know if there has been new content since it was last set
|
||||||
|
dispatch(
|
||||||
|
setSubscriptionLatest(
|
||||||
|
{
|
||||||
|
channelName: claimsInChannel[0].channel_name,
|
||||||
|
uri: buildURI(
|
||||||
|
{
|
||||||
|
channelName: claimsInChannel[0].channel_name,
|
||||||
|
claimId: claimsInChannel[0].claim_id,
|
||||||
|
},
|
||||||
|
false
|
||||||
|
),
|
||||||
|
},
|
||||||
|
buildURI(
|
||||||
|
{ contentName: claimsInChannel[0].name, claimId: claimsInChannel[0].claim_id },
|
||||||
|
false
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// calling FETCH_CHANNEL_CLAIMS_COMPLETED after not calling STARTED
|
||||||
|
// means it will delete a non-existant fetchingChannelClaims[uri]
|
||||||
|
dispatch({
|
||||||
|
type: ACTIONS.FETCH_CHANNEL_CLAIMS_COMPLETED,
|
||||||
|
data: {
|
||||||
|
uri: subscriptionUri,
|
||||||
|
claims: claimsInChannel || [],
|
||||||
|
page: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
export const doChannelSubscribe = (subscription: Subscription) => (
|
export const doChannelSubscribe = (subscription: Subscription) => (
|
||||||
|
|
Loading…
Reference in a new issue