Collections #383
34
dist/bundle.es.js
vendored
|
@ -3824,8 +3824,6 @@ function doResolveUris(uris, returnCachedClaims = false, resolveReposts = true)
|
|||
if (collectionIds.length) {
|
||||
dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 }));
|
||||
}
|
||||
// now collection claims are added, get their stuff
|
||||
// if collections: doResolveCollections(claimIds)
|
||||
|
||||
return result;
|
||||
});
|
||||
|
@ -4242,7 +4240,6 @@ function doFetchCollectionListMine(page = 1, pageSize = 99999) {
|
|||
collectionIds: items.map(claim => claim.claim_id),
|
||||
page_size: 5
|
||||
}));
|
||||
// update or fetch collections?
|
||||
};
|
||||
|
||||
const failure = error => {
|
||||
|
@ -4287,7 +4284,6 @@ function doClaimSearch(options = {
|
|||
pageSize: options.page_size
|
||||
}
|
||||
});
|
||||
// was return true
|
||||
return resolveInfo;
|
||||
};
|
||||
|
||||
|
@ -4373,6 +4369,10 @@ function doCollectionPublish(options, localId) {
|
|||
params['tags'] = options.tags.map(tag => tag.name);
|
||||
}
|
||||
|
||||
if (options.channel_id) {
|
||||
params['channel_id'] = options.channel_id;
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
dispatch({
|
||||
type: COLLECTION_PUBLISH_STARTED
|
||||
|
@ -4657,8 +4657,6 @@ const doFetchItemsInCollections = (resolveItemsOptions, resolveStartedCallback)
|
|||
var _ref = _asyncToGenerator$2(function* (dispatch, getState) {
|
||||
let fetchItemsForCollectionClaim = (() => {
|
||||
var _ref2 = _asyncToGenerator$2(function* (claim, pageSize) {
|
||||
// take [ {}, {} ], return {}
|
||||
// only need items [ url... ] and total_items
|
||||
const totalItems = claim.value.claims && claim.value.claims.length;
|
||||
const claimId = claim.claim_id;
|
||||
const itemOrder = claim.value.claims;
|
||||
|
@ -4690,21 +4688,8 @@ const doFetchItemsInCollections = (resolveItemsOptions, resolveStartedCallback)
|
|||
};
|
||||
|
||||
try {
|
||||
// sdk had a strange bug that would only return so many, so this had to be batched.
|
||||
// otherwise large lists of, ~500 channels for a homepage category failed
|
||||
const batchSize = pageSize || FETCH_BATCH_SIZE;
|
||||
const batches = [];
|
||||
/*
|
||||
// this was `collection_resolve` which returns claims for collection in order
|
||||
// however, this fails when a claim is pending. :/
|
||||
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
|
||||
batches[i] = Lbry.collection_resolve({
|
||||
claim_id: claimId,
|
||||
page: i + 1,
|
||||
page_size: batchSize,
|
||||
});
|
||||
}
|
||||
*/
|
||||
|
||||
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
|
||||
batches[i] = lbryProxy.claim_search({
|
||||
|
@ -4826,7 +4811,6 @@ const doFetchItemsInCollections = (resolveItemsOptions, resolveStartedCallback)
|
|||
|
||||
if (collectionItems) {
|
||||
collectionItems.forEach(function (collectionItem) {
|
||||
// here's where we would just items.push(collectionItem.permanent_url
|
||||
newItems.push(collectionItem.permanent_url);
|
||||
valueTypes.add(collectionItem.value_type);
|
||||
if (collectionItem.value.stream_type) {
|
||||
|
@ -6053,7 +6037,7 @@ function handleClaimAction(state, action) {
|
|||
} else {
|
||||
byId[channel.claim_id] = channel;
|
||||
}
|
||||
// Also add the permanent_url here until lighthouse returns canonical_url for search results
|
||||
|
||||
byUri[channel.permanent_url] = channel.claim_id;
|
||||
byUri[channel.canonical_url] = channel.claim_id;
|
||||
newResolvingUrls.delete(channel.canonical_url);
|
||||
|
@ -6067,11 +6051,7 @@ function handleClaimAction(state, action) {
|
|||
byId[collection.claim_id] = collection;
|
||||
}
|
||||
byUri[url] = collection.claim_id;
|
||||
|
||||
// If url isn't a canonical_url, make sure that is added too
|
||||
byUri[collection.canonical_url] = collection.claim_id;
|
||||
|
||||
// Also add the permanent_url here until lighthouse returns canonical_url for search results
|
||||
byUri[collection.permanent_url] = collection.claim_id;
|
||||
newResolvingUrls.delete(collection.canonical_url);
|
||||
newResolvingUrls.delete(collection.permanent_url);
|
||||
|
@ -6234,7 +6214,6 @@ reducers[FETCH_COLLECTION_LIST_COMPLETED] = (state, action) => {
|
|||
myCollectionClaimsSet = new Set(state.myCollectionClaims);
|
||||
claims.forEach(claim => {
|
||||
const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
|
||||
// maybe add info about items in collection
|
||||
|
||||
byUri[canonicalUrl] = claimId;
|
||||
byUri[permanentUrl] = claimId;
|
||||
|
@ -6523,8 +6502,6 @@ reducers[COLLECTION_PUBLISH_UPDATE_FAILED] = (state, action) => {
|
|||
});
|
||||
};
|
||||
|
||||
// COLLECTION_PUBLISH_ABANDON_...
|
||||
|
||||
reducers[IMPORT_CHANNEL_STARTED] = state => Object.assign({}, state, { pendingChannelImports: true });
|
||||
|
||||
reducers[IMPORT_CHANNEL_COMPLETED] = state => Object.assign({}, state, { pendingChannelImports: false });
|
||||
|
@ -7640,7 +7617,6 @@ const collectionsReducer = handleActions({
|
|||
const newUnpublishedList = Object.assign({}, unpublishedList);
|
||||
const newPendingList = Object.assign({}, pendingList);
|
||||
|
||||
const isEdit = editList[claimId];
|
||||
if (localId) {
|
||||
// new publish
|
||||
newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {});
|
||||
|
|
7
dist/flow-typed/Collections.js
vendored
|
@ -1,10 +1,3 @@
|
|||
typo? typo?
|
||||
declare type CollectionUpdateParams = {
|
||||
typo? typo?
|
||||
remove?: boolean,
|
||||
typo? typo?
|
||||
claims?: Array<Claim>,
|
||||
typo? typo?
|
||||
name?: string,
|
||||
typo? typo?
|
||||
order?: { from: number, to: number },
|
||||
typo? typo?
|
||||
}
|
||||
typo? typo?
|
||||
|
||||
typo? typo?
|
||||
declare type Collection = {
|
||||
id: string,
|
||||
items: Array<?string>,
|
||||
|
|
|||
typo? typo?
typo? typo?
|
7
flow-typed/Collections.js
vendored
|
@ -1,10 +1,3 @@
|
|||
declare type CollectionUpdateParams = {
|
||||
remove?: boolean,
|
||||
claims?: Array<Claim>,
|
||||
name?: string,
|
||||
order?: { from: number, to: number },
|
||||
}
|
||||
|
||||
declare type Collection = {
|
||||
id: string,
|
||||
items: Array<?string>,
|
||||
|
|
|
@ -151,8 +151,6 @@ export function doResolveUris(
|
|||
if (collectionIds.length) {
|
||||
dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 }));
|
||||
}
|
||||
// now collection claims are added, get their stuff
|
||||
// if collections: doResolveCollections(claimIds)
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -618,7 +616,6 @@ export function doFetchCollectionListMine(page: number = 1, pageSize: number = 9
|
|||
page_size: 5,
|
||||
})
|
||||
);
|
||||
// update or fetch collections?
|
||||
};
|
||||
|
||||
const failure = error => {
|
||||
|
@ -680,7 +677,6 @@ export function doClaimSearch(
|
|||
pageSize: options.page_size,
|
||||
},
|
||||
});
|
||||
// was return true
|
||||
return resolveInfo;
|
||||
};
|
||||
|
||||
|
@ -762,6 +758,7 @@ export function doCollectionPublish(
|
|||
const params: {
|
||||
name: string,
|
||||
bid: string,
|
||||
channel_id?: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
|
@ -786,6 +783,10 @@ export function doCollectionPublish(
|
|||
params['tags'] = options.tags.map(tag => tag.name);
|
||||
}
|
||||
|
||||
if (options.channel_id) {
|
||||
params['channel_id'] = options.channel_id;
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_STARTED,
|
||||
|
|
|
@ -120,8 +120,6 @@ export const doFetchItemsInCollections = (
|
|||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
const stateAfterClaimSearch = getState();
|
||||
|
||||
async function fetchItemsForCollectionClaim(claim: CollectionClaim, pageSize?: number) {
|
||||
// take [ {}, {} ], return {}
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
// only need items [ url... ] and total_items
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
const totalItems = claim.value.claims && claim.value.claims.length;
|
||||
const claimId = claim.claim_id;
|
||||
const itemOrder = claim.value.claims;
|
||||
|
@ -154,21 +152,8 @@ export const doFetchItemsInCollections = (
|
|||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
};
|
||||
|
||||
try {
|
||||
// sdk had a strange bug that would only return so many, so this had to be batched.
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
// otherwise large lists of, ~500 channels for a homepage category failed
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
const batchSize = pageSize || FETCH_BATCH_SIZE;
|
||||
const batches: Array<Promise<any>> = [];
|
||||
/*
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
// this was `collection_resolve` which returns claims for collection in order
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
// however, this fails when a claim is pending. :/
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
batches[i] = Lbry.collection_resolve({
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
claim_id: claimId,
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
page: i + 1,
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
page_size: batchSize,
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
});
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
}
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
*/
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
|
||||
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
|
||||
batches[i] = Lbry.claim_search({
|
||||
|
@ -264,7 +249,6 @@ export const doFetchItemsInCollections = (
|
|||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
|
||||
if (collectionItems) {
|
||||
collectionItems.forEach(collectionItem => {
|
||||
// here's where we would just items.push(collectionItem.permanent_url
|
||||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
||||
newItems.push(collectionItem.permanent_url);
|
||||
valueTypes.add(collectionItem.value_type);
|
||||
if (collectionItem.value.stream_type) {
|
||||
|
|
|||
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
Array/< { claimId: string, items: ?Array } > Array/< { claimId: string, items: ?Array<GenericClaim> } >
|
|
@ -162,7 +162,7 @@ function handleClaimAction(state: State, action: any): State {
|
|||
} else {
|
||||
byId[channel.claim_id] = channel;
|
||||
}
|
||||
// Also add the permanent_url here until lighthouse returns canonical_url for search results
|
||||
|
||||
byUri[channel.permanent_url] = channel.claim_id;
|
||||
byUri[channel.canonical_url] = channel.claim_id;
|
||||
newResolvingUrls.delete(channel.canonical_url);
|
||||
|
@ -176,11 +176,7 @@ function handleClaimAction(state: State, action: any): State {
|
|||
byId[collection.claim_id] = collection;
|
||||
}
|
||||
byUri[url] = collection.claim_id;
|
||||
|
||||
// If url isn't a canonical_url, make sure that is added too
|
||||
byUri[collection.canonical_url] = collection.claim_id;
|
||||
|
||||
// Also add the permanent_url here until lighthouse returns canonical_url for search results
|
||||
byUri[collection.permanent_url] = collection.claim_id;
|
||||
newResolvingUrls.delete(collection.canonical_url);
|
||||
newResolvingUrls.delete(collection.permanent_url);
|
||||
|
@ -350,7 +346,6 @@ reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any):
|
|||
claims.forEach(claim => {
|
||||
const { meta } = claim;
|
||||
const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
|
||||
// maybe add info about items in collection
|
||||
|
||||
byUri[canonicalUrl] = claimId;
|
||||
byUri[permanentUrl] = claimId;
|
||||
|
@ -651,8 +646,6 @@ reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED] = (state: State, action: any)
|
|||
});
|
||||
};
|
||||
|
||||
// COLLECTION_PUBLISH_ABANDON_...
|
||||
|
||||
reducers[ACTIONS.IMPORT_CHANNEL_STARTED] = (state: State): State =>
|
||||
Object.assign({}, state, { pendingChannelImports: true });
|
||||
|
||||
|
|
|
@ -95,7 +95,6 @@ const collectionsReducer = handleActions(
|
|||
const newUnpublishedList = Object.assign({}, unpublishedList);
|
||||
const newPendingList = Object.assign({}, pendingList);
|
||||
|
||||
const isEdit = editList[claimId];
|
||||
if (localId) {
|
||||
// new publish
|
||||
newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {});
|
||||
|
|
typo?