Collections #383

Merged
jessopb merged 20 commits from collections into master 2021-06-08 17:51:49 +02:00
7 changed files with 13 additions and 74 deletions
Showing only changes of commit dd95916281 - Show all commits

34
dist/bundle.es.js vendored
View file

@ -3824,8 +3824,6 @@ function doResolveUris(uris, returnCachedClaims = false, resolveReposts = true)
if (collectionIds.length) { if (collectionIds.length) {
dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 })); dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 }));
} }
// now collection claims are added, get their stuff
// if collections: doResolveCollections(claimIds)
return result; return result;
}); });
@ -4242,7 +4240,6 @@ function doFetchCollectionListMine(page = 1, pageSize = 99999) {
collectionIds: items.map(claim => claim.claim_id), collectionIds: items.map(claim => claim.claim_id),
page_size: 5 page_size: 5
})); }));
// update or fetch collections?
}; };
const failure = error => { const failure = error => {
@ -4287,7 +4284,6 @@ function doClaimSearch(options = {
pageSize: options.page_size pageSize: options.page_size
} }
}); });
// was return true
return resolveInfo; return resolveInfo;
}; };
@ -4373,6 +4369,10 @@ function doCollectionPublish(options, localId) {
params['tags'] = options.tags.map(tag => tag.name); params['tags'] = options.tags.map(tag => tag.name);
} }
if (options.channel_id) {
params['channel_id'] = options.channel_id;
}
return new Promise(resolve => { return new Promise(resolve => {
dispatch({ dispatch({
type: COLLECTION_PUBLISH_STARTED type: COLLECTION_PUBLISH_STARTED
@ -4657,8 +4657,6 @@ const doFetchItemsInCollections = (resolveItemsOptions, resolveStartedCallback)
var _ref = _asyncToGenerator$2(function* (dispatch, getState) { var _ref = _asyncToGenerator$2(function* (dispatch, getState) {
let fetchItemsForCollectionClaim = (() => { let fetchItemsForCollectionClaim = (() => {
var _ref2 = _asyncToGenerator$2(function* (claim, pageSize) { var _ref2 = _asyncToGenerator$2(function* (claim, pageSize) {
// take [ {}, {} ], return {}
// only need items [ url... ] and total_items
const totalItems = claim.value.claims && claim.value.claims.length; const totalItems = claim.value.claims && claim.value.claims.length;
const claimId = claim.claim_id; const claimId = claim.claim_id;
const itemOrder = claim.value.claims; const itemOrder = claim.value.claims;
@ -4690,21 +4688,8 @@ const doFetchItemsInCollections = (resolveItemsOptions, resolveStartedCallback)
}; };
try { try {
// sdk had a strange bug that would only return so many, so this had to be batched.
// otherwise large lists of, ~500 channels for a homepage category failed
const batchSize = pageSize || FETCH_BATCH_SIZE; const batchSize = pageSize || FETCH_BATCH_SIZE;
const batches = []; const batches = [];
/*
// this was `collection_resolve` which returns claims for collection in order
// however, this fails when a claim is pending. :/
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
batches[i] = Lbry.collection_resolve({
claim_id: claimId,
page: i + 1,
page_size: batchSize,
});
}
*/
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) { for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
batches[i] = lbryProxy.claim_search({ batches[i] = lbryProxy.claim_search({
@ -4826,7 +4811,6 @@ const doFetchItemsInCollections = (resolveItemsOptions, resolveStartedCallback)
if (collectionItems) { if (collectionItems) {
collectionItems.forEach(function (collectionItem) { collectionItems.forEach(function (collectionItem) {
// here's where we would just items.push(collectionItem.permanent_url
newItems.push(collectionItem.permanent_url); newItems.push(collectionItem.permanent_url);
valueTypes.add(collectionItem.value_type); valueTypes.add(collectionItem.value_type);
if (collectionItem.value.stream_type) { if (collectionItem.value.stream_type) {
@ -6053,7 +6037,7 @@ function handleClaimAction(state, action) {
} else { } else {
byId[channel.claim_id] = channel; byId[channel.claim_id] = channel;
} }
// Also add the permanent_url here until lighthouse returns canonical_url for search results
byUri[channel.permanent_url] = channel.claim_id; byUri[channel.permanent_url] = channel.claim_id;
byUri[channel.canonical_url] = channel.claim_id; byUri[channel.canonical_url] = channel.claim_id;
newResolvingUrls.delete(channel.canonical_url); newResolvingUrls.delete(channel.canonical_url);
@ -6067,11 +6051,7 @@ function handleClaimAction(state, action) {
byId[collection.claim_id] = collection; byId[collection.claim_id] = collection;
} }
byUri[url] = collection.claim_id; byUri[url] = collection.claim_id;
// If url isn't a canonical_url, make sure that is added too
byUri[collection.canonical_url] = collection.claim_id; byUri[collection.canonical_url] = collection.claim_id;
// Also add the permanent_url here until lighthouse returns canonical_url for search results
byUri[collection.permanent_url] = collection.claim_id; byUri[collection.permanent_url] = collection.claim_id;
newResolvingUrls.delete(collection.canonical_url); newResolvingUrls.delete(collection.canonical_url);
newResolvingUrls.delete(collection.permanent_url); newResolvingUrls.delete(collection.permanent_url);
@ -6234,7 +6214,6 @@ reducers[FETCH_COLLECTION_LIST_COMPLETED] = (state, action) => {
myCollectionClaimsSet = new Set(state.myCollectionClaims); myCollectionClaimsSet = new Set(state.myCollectionClaims);
claims.forEach(claim => { claims.forEach(claim => {
const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim; const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
// maybe add info about items in collection
byUri[canonicalUrl] = claimId; byUri[canonicalUrl] = claimId;
byUri[permanentUrl] = claimId; byUri[permanentUrl] = claimId;
@ -6523,8 +6502,6 @@ reducers[COLLECTION_PUBLISH_UPDATE_FAILED] = (state, action) => {
}); });
}; };
// COLLECTION_PUBLISH_ABANDON_...
reducers[IMPORT_CHANNEL_STARTED] = state => Object.assign({}, state, { pendingChannelImports: true }); reducers[IMPORT_CHANNEL_STARTED] = state => Object.assign({}, state, { pendingChannelImports: true });
reducers[IMPORT_CHANNEL_COMPLETED] = state => Object.assign({}, state, { pendingChannelImports: false }); reducers[IMPORT_CHANNEL_COMPLETED] = state => Object.assign({}, state, { pendingChannelImports: false });
@ -7640,7 +7617,6 @@ const collectionsReducer = handleActions({
const newUnpublishedList = Object.assign({}, unpublishedList); const newUnpublishedList = Object.assign({}, unpublishedList);
const newPendingList = Object.assign({}, pendingList); const newPendingList = Object.assign({}, pendingList);
const isEdit = editList[claimId];
if (localId) { if (localId) {
// new publish // new publish
newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {}); newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {});

View file

@ -1,10 +1,3 @@
infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?
infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?
declare type CollectionUpdateParams = {
infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?
remove?: boolean,
infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?
claims?: Array<Claim>,
infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?
name?: string,
infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?
order?: { from: number, to: number },
infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?
}
infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?
infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?
declare type Collection = { declare type Collection = {
id: string, id: string,
items: Array<?string>, items: Array<?string>,

infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?
infinite-persistence commented 2021-06-04 14:08:06 +02:00 (Migrated from github.com)
Review

typo?

typo?

View file

@ -1,10 +1,3 @@
declare type CollectionUpdateParams = {
remove?: boolean,
claims?: Array<Claim>,
name?: string,
order?: { from: number, to: number },
}
declare type Collection = { declare type Collection = {
id: string, id: string,
items: Array<?string>, items: Array<?string>,

View file

@ -151,8 +151,6 @@ export function doResolveUris(
if (collectionIds.length) { if (collectionIds.length) {
dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 })); dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 }));
} }
// now collection claims are added, get their stuff
// if collections: doResolveCollections(claimIds)
return result; return result;
} }
@ -618,7 +616,6 @@ export function doFetchCollectionListMine(page: number = 1, pageSize: number = 9
page_size: 5, page_size: 5,
}) })
); );
// update or fetch collections?
}; };
const failure = error => { const failure = error => {
@ -680,7 +677,6 @@ export function doClaimSearch(
pageSize: options.page_size, pageSize: options.page_size,
}, },
}); });
// was return true
return resolveInfo; return resolveInfo;
}; };
@ -762,6 +758,7 @@ export function doCollectionPublish(
const params: { const params: {
name: string, name: string,
bid: string, bid: string,
channel_id?: string,
blocking?: true, blocking?: true,
title?: string, title?: string,
thumbnail_url?: string, thumbnail_url?: string,
@ -786,6 +783,10 @@ export function doCollectionPublish(
params['tags'] = options.tags.map(tag => tag.name); params['tags'] = options.tags.map(tag => tag.name);
} }
if (options.channel_id) {
params['channel_id'] = options.channel_id;
}
return new Promise(resolve => { return new Promise(resolve => {
dispatch({ dispatch({
type: ACTIONS.COLLECTION_PUBLISH_STARTED, type: ACTIONS.COLLECTION_PUBLISH_STARTED,

View file

@ -93,7 +93,7 @@ export const doFetchItemsInCollections = (
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
pageSize?: number, pageSize?: number,
}, },
resolveStartedCallback?: () => void resolveStartedCallback?: () => void
) => async (dispatch: Dispatch, getState: GetState) => { ) => async(dispatch: Dispatch, getState: GetState) => {
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
/* /*
1) make sure all the collection claims are loaded into claims reducer, search/resolve if necessary. 1) make sure all the collection claims are loaded into claims reducer, search/resolve if necessary.
2) get the item claims for each 2) get the item claims for each
@ -120,8 +120,6 @@ export const doFetchItemsInCollections = (
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
const stateAfterClaimSearch = getState(); const stateAfterClaimSearch = getState();
async function fetchItemsForCollectionClaim(claim: CollectionClaim, pageSize?: number) { async function fetchItemsForCollectionClaim(claim: CollectionClaim, pageSize?: number) {
// take [ {}, {} ], return {}
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
// only need items [ url... ] and total_items
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
const totalItems = claim.value.claims && claim.value.claims.length; const totalItems = claim.value.claims && claim.value.claims.length;
const claimId = claim.claim_id; const claimId = claim.claim_id;
const itemOrder = claim.value.claims; const itemOrder = claim.value.claims;
@ -154,21 +152,8 @@ export const doFetchItemsInCollections = (
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
}; };
try { try {
// sdk had a strange bug that would only return so many, so this had to be batched.
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
// otherwise large lists of, ~500 channels for a homepage category failed
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
const batchSize = pageSize || FETCH_BATCH_SIZE; const batchSize = pageSize || FETCH_BATCH_SIZE;
const batches: Array<Promise<any>> = []; const batches: Array<Promise<any>> = [];
/*
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
// this was `collection_resolve` which returns claims for collection in order
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
// however, this fails when a claim is pending. :/
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
batches[i] = Lbry.collection_resolve({
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
claim_id: claimId,
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
page: i + 1,
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
page_size: batchSize,
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
});
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
}
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
*/
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) { for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
batches[i] = Lbry.claim_search({ batches[i] = Lbry.claim_search({
@ -264,7 +249,6 @@ export const doFetchItemsInCollections = (
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
if (collectionItems) { if (collectionItems) {
collectionItems.forEach(collectionItem => { collectionItems.forEach(collectionItem => {
// here's where we would just items.push(collectionItem.permanent_url
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
newItems.push(collectionItem.permanent_url); newItems.push(collectionItem.permanent_url);
valueTypes.add(collectionItem.value_type); valueTypes.add(collectionItem.value_type);
if (collectionItem.value.stream_type) { if (collectionItem.value.stream_type) {
@ -329,7 +313,7 @@ export const doFetchItemsInCollection = (
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
return doFetchItemsInCollections(newOptions, cb); return doFetchItemsInCollections(newOptions, cb);
}; };
export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => async ( export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => async(
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
dispatch: Dispatch, dispatch: Dispatch,
getState: GetState getState: GetState
) => { ) => {

jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >
jessopb commented 2021-02-17 17:16:08 +01:00 (Migrated from github.com)
Review

Array/< { claimId: string, items: ?Array } >

Array/< { claimId: string, items: ?Array<GenericClaim> } >

View file

@ -162,7 +162,7 @@ function handleClaimAction(state: State, action: any): State {
} else { } else {
byId[channel.claim_id] = channel; byId[channel.claim_id] = channel;
} }
// Also add the permanent_url here until lighthouse returns canonical_url for search results
byUri[channel.permanent_url] = channel.claim_id; byUri[channel.permanent_url] = channel.claim_id;
byUri[channel.canonical_url] = channel.claim_id; byUri[channel.canonical_url] = channel.claim_id;
newResolvingUrls.delete(channel.canonical_url); newResolvingUrls.delete(channel.canonical_url);
@ -176,11 +176,7 @@ function handleClaimAction(state: State, action: any): State {
byId[collection.claim_id] = collection; byId[collection.claim_id] = collection;
} }
byUri[url] = collection.claim_id; byUri[url] = collection.claim_id;
// If url isn't a canonical_url, make sure that is added too
byUri[collection.canonical_url] = collection.claim_id; byUri[collection.canonical_url] = collection.claim_id;
// Also add the permanent_url here until lighthouse returns canonical_url for search results
byUri[collection.permanent_url] = collection.claim_id; byUri[collection.permanent_url] = collection.claim_id;
newResolvingUrls.delete(collection.canonical_url); newResolvingUrls.delete(collection.canonical_url);
newResolvingUrls.delete(collection.permanent_url); newResolvingUrls.delete(collection.permanent_url);
@ -350,7 +346,6 @@ reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any):
claims.forEach(claim => { claims.forEach(claim => {
const { meta } = claim; const { meta } = claim;
const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim; const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
// maybe add info about items in collection
byUri[canonicalUrl] = claimId; byUri[canonicalUrl] = claimId;
byUri[permanentUrl] = claimId; byUri[permanentUrl] = claimId;
@ -651,8 +646,6 @@ reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED] = (state: State, action: any)
}); });
}; };
// COLLECTION_PUBLISH_ABANDON_...
reducers[ACTIONS.IMPORT_CHANNEL_STARTED] = (state: State): State => reducers[ACTIONS.IMPORT_CHANNEL_STARTED] = (state: State): State =>
Object.assign({}, state, { pendingChannelImports: true }); Object.assign({}, state, { pendingChannelImports: true });

View file

@ -95,7 +95,6 @@ const collectionsReducer = handleActions(
const newUnpublishedList = Object.assign({}, unpublishedList); const newUnpublishedList = Object.assign({}, unpublishedList);
const newPendingList = Object.assign({}, pendingList); const newPendingList = Object.assign({}, pendingList);
const isEdit = editList[claimId];
if (localId) { if (localId) {
// new publish // new publish
newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {}); newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {});