cleanup

cleanup
This commit is contained in:
zeppi 2021-06-01 12:19:35 -04:00 committed by jessopb
parent 7e049487c3
commit 9461cf1bee
7 changed files with 13 additions and 74 deletions

34
dist/bundle.es.js vendored
View file

@ -3824,8 +3824,6 @@ function doResolveUris(uris, returnCachedClaims = false, resolveReposts = true)
if (collectionIds.length) {
dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 }));
}
// now collection claims are added, get their stuff
// if collections: doResolveCollections(claimIds)
return result;
});
@ -4242,7 +4240,6 @@ function doFetchCollectionListMine(page = 1, pageSize = 99999) {
collectionIds: items.map(claim => claim.claim_id),
page_size: 5
}));
// update or fetch collections?
};
const failure = error => {
@ -4287,7 +4284,6 @@ function doClaimSearch(options = {
pageSize: options.page_size
}
});
// was return true
return resolveInfo;
};
@ -4373,6 +4369,10 @@ function doCollectionPublish(options, localId) {
params['tags'] = options.tags.map(tag => tag.name);
}
if (options.channel_id) {
params['channel_id'] = options.channel_id;
}
return new Promise(resolve => {
dispatch({
type: COLLECTION_PUBLISH_STARTED
@ -4657,8 +4657,6 @@ const doFetchItemsInCollections = (resolveItemsOptions, resolveStartedCallback)
var _ref = _asyncToGenerator$2(function* (dispatch, getState) {
let fetchItemsForCollectionClaim = (() => {
var _ref2 = _asyncToGenerator$2(function* (claim, pageSize) {
// take [ {}, {} ], return {}
// only need items [ url... ] and total_items
const totalItems = claim.value.claims && claim.value.claims.length;
const claimId = claim.claim_id;
const itemOrder = claim.value.claims;
@ -4690,21 +4688,8 @@ const doFetchItemsInCollections = (resolveItemsOptions, resolveStartedCallback)
};
try {
// sdk had a strange bug that would only return so many, so this had to be batched.
// otherwise large lists of, ~500 channels for a homepage category failed
const batchSize = pageSize || FETCH_BATCH_SIZE;
const batches = [];
/*
// this was `collection_resolve` which returns claims for collection in order
// however, this fails when a claim is pending. :/
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
batches[i] = Lbry.collection_resolve({
claim_id: claimId,
page: i + 1,
page_size: batchSize,
});
}
*/
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
batches[i] = lbryProxy.claim_search({
@ -4826,7 +4811,6 @@ const doFetchItemsInCollections = (resolveItemsOptions, resolveStartedCallback)
if (collectionItems) {
collectionItems.forEach(function (collectionItem) {
// here's where we would just items.push(collectionItem.permanent_url
newItems.push(collectionItem.permanent_url);
valueTypes.add(collectionItem.value_type);
if (collectionItem.value.stream_type) {
@ -6062,7 +6046,7 @@ function handleClaimAction(state, action) {
} else {
byId[channel.claim_id] = channel;
}
// Also add the permanent_url here until lighthouse returns canonical_url for search results
byUri[channel.permanent_url] = channel.claim_id;
byUri[channel.canonical_url] = channel.claim_id;
newResolvingUrls.delete(channel.canonical_url);
@ -6076,11 +6060,7 @@ function handleClaimAction(state, action) {
byId[collection.claim_id] = collection;
}
byUri[url] = collection.claim_id;
// If url isn't a canonical_url, make sure that is added too
byUri[collection.canonical_url] = collection.claim_id;
// Also add the permanent_url here until lighthouse returns canonical_url for search results
byUri[collection.permanent_url] = collection.claim_id;
newResolvingUrls.delete(collection.canonical_url);
newResolvingUrls.delete(collection.permanent_url);
@ -6243,7 +6223,6 @@ reducers[FETCH_COLLECTION_LIST_COMPLETED] = (state, action) => {
myCollectionClaimsSet = new Set(state.myCollectionClaims);
claims.forEach(claim => {
const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
// maybe add info about items in collection
byUri[canonicalUrl] = claimId;
byUri[permanentUrl] = claimId;
@ -6532,8 +6511,6 @@ reducers[COLLECTION_PUBLISH_UPDATE_FAILED] = (state, action) => {
});
};
// COLLECTION_PUBLISH_ABANDON_...
reducers[IMPORT_CHANNEL_STARTED] = state => Object.assign({}, state, { pendingChannelImports: true });
reducers[IMPORT_CHANNEL_COMPLETED] = state => Object.assign({}, state, { pendingChannelImports: false });
@ -7651,7 +7628,6 @@ const collectionsReducer = handleActions({
const newUnpublishedList = Object.assign({}, unpublishedList);
const newPendingList = Object.assign({}, pendingList);
const isEdit = editList[claimId];
if (localId) {
// new publish
newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {});

View file

@ -1,10 +1,3 @@
declare type CollectionUpdateParams = {
remove?: boolean,
claims?: Array<Claim>,
name?: string,
order?: { from: number, to: number },
}
declare type Collection = {
id: string,
items: Array<?string>,

View file

@ -1,10 +1,3 @@
declare type CollectionUpdateParams = {
remove?: boolean,
claims?: Array<Claim>,
name?: string,
order?: { from: number, to: number },
}
declare type Collection = {
id: string,
items: Array<?string>,

View file

@ -151,8 +151,6 @@ export function doResolveUris(
if (collectionIds.length) {
dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 }));
}
// now collection claims are added, get their stuff
// if collections: doResolveCollections(claimIds)
return result;
}
@ -618,7 +616,6 @@ export function doFetchCollectionListMine(page: number = 1, pageSize: number = 9
page_size: 5,
})
);
// update or fetch collections?
};
const failure = error => {
@ -680,7 +677,6 @@ export function doClaimSearch(
pageSize: options.page_size,
},
});
// was return true
return resolveInfo;
};
@ -762,6 +758,7 @@ export function doCollectionPublish(
const params: {
name: string,
bid: string,
channel_id?: string,
blocking?: true,
title?: string,
thumbnail_url?: string,
@ -786,6 +783,10 @@ export function doCollectionPublish(
params['tags'] = options.tags.map(tag => tag.name);
}
if (options.channel_id) {
params['channel_id'] = options.channel_id;
}
return new Promise(resolve => {
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_STARTED,

View file

@ -93,7 +93,7 @@ export const doFetchItemsInCollections = (
pageSize?: number,
},
resolveStartedCallback?: () => void
) => async (dispatch: Dispatch, getState: GetState) => {
) => async(dispatch: Dispatch, getState: GetState) => {
/*
1) make sure all the collection claims are loaded into claims reducer, search/resolve if necessary.
2) get the item claims for each
@ -120,8 +120,6 @@ export const doFetchItemsInCollections = (
const stateAfterClaimSearch = getState();
async function fetchItemsForCollectionClaim(claim: CollectionClaim, pageSize?: number) {
// take [ {}, {} ], return {}
// only need items [ url... ] and total_items
const totalItems = claim.value.claims && claim.value.claims.length;
const claimId = claim.claim_id;
const itemOrder = claim.value.claims;
@ -154,21 +152,8 @@ export const doFetchItemsInCollections = (
};
try {
// sdk had a strange bug that would only return so many, so this had to be batched.
// otherwise large lists of, ~500 channels for a homepage category failed
const batchSize = pageSize || FETCH_BATCH_SIZE;
const batches: Array<Promise<any>> = [];
/*
// this was `collection_resolve` which returns claims for collection in order
// however, this fails when a claim is pending. :/
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
batches[i] = Lbry.collection_resolve({
claim_id: claimId,
page: i + 1,
page_size: batchSize,
});
}
*/
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
batches[i] = Lbry.claim_search({
@ -264,7 +249,6 @@ export const doFetchItemsInCollections = (
if (collectionItems) {
collectionItems.forEach(collectionItem => {
// here's where we would just items.push(collectionItem.permanent_url
newItems.push(collectionItem.permanent_url);
valueTypes.add(collectionItem.value_type);
if (collectionItem.value.stream_type) {
@ -329,7 +313,7 @@ export const doFetchItemsInCollection = (
return doFetchItemsInCollections(newOptions, cb);
};
export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => async (
export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => async(
dispatch: Dispatch,
getState: GetState
) => {

View file

@ -162,7 +162,7 @@ function handleClaimAction(state: State, action: any): State {
} else {
byId[channel.claim_id] = channel;
}
// Also add the permanent_url here until lighthouse returns canonical_url for search results
byUri[channel.permanent_url] = channel.claim_id;
byUri[channel.canonical_url] = channel.claim_id;
newResolvingUrls.delete(channel.canonical_url);
@ -176,11 +176,7 @@ function handleClaimAction(state: State, action: any): State {
byId[collection.claim_id] = collection;
}
byUri[url] = collection.claim_id;
// If url isn't a canonical_url, make sure that is added too
byUri[collection.canonical_url] = collection.claim_id;
// Also add the permanent_url here until lighthouse returns canonical_url for search results
byUri[collection.permanent_url] = collection.claim_id;
newResolvingUrls.delete(collection.canonical_url);
newResolvingUrls.delete(collection.permanent_url);
@ -350,7 +346,6 @@ reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any):
claims.forEach(claim => {
const { meta } = claim;
const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
// maybe add info about items in collection
byUri[canonicalUrl] = claimId;
byUri[permanentUrl] = claimId;
@ -651,8 +646,6 @@ reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED] = (state: State, action: any)
});
};
// COLLECTION_PUBLISH_ABANDON_...
reducers[ACTIONS.IMPORT_CHANNEL_STARTED] = (state: State): State =>
Object.assign({}, state, { pendingChannelImports: true });

View file

@ -95,7 +95,6 @@ const collectionsReducer = handleActions(
const newUnpublishedList = Object.assign({}, unpublishedList);
const newPendingList = Object.assign({}, pendingList);
const isEdit = editList[claimId];
if (localId) {
// new publish
newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {});