83dbe8ec7c
* Playlists v2 * Style pass * Change playlist items arrange icon * Playlist card body open by default * Refactor collectionEdit components * Paginate & Refactor bid field * Collection page changes * Add Thumbnail optional * Replace extra info for description on collection page * Playlist card right below video on medium screen * Allow editing private collections * Add edit option to menus * Allow deleting a public playlist but keeping a private version * Add queue to Save menu, remove edit option from Builtin pages, show queue on playlists page * Fix scroll to recent persisting on medium screen * Fix adding to queue from menu * Fixes for delete * PublishList: delay mounting Items tab to prevent lock-up (#1783) For a large list, the playlist publish form is unusable (super-slow typing) due to the entire list being mounted despite the tab is not active. The full solution is still to paginate it, but for now, don't mount the tab until it is selected. Add a spinner to indicate something is loading. It's not prefect, but it's throwaway code anyway. At least we can fill in the fields properly now. * Batch-resolve private collections (#1782) * makeSelectClaimForClaimId --> selectClaimForClaimId Move away from the problematic `makeSelect*`, especially in large loops. * Batch-resolve private collections 1758 This alleviates the lock-up that is caused by large number of invidual resolves. There will still be some minor stutter due to the large DOM that React needs to handle -- that is logged in 1758 and will be handled separately. At least the stutter is short (1-2s) and the app is still usable. Private list items are being resolve individually, super slow if the list is large (>100). Published lists doesn't have this issue. doFetchItemsInCollections contains most of the useful logic, but it isn't called for private/built-in lists because it's not an actual claim. Tweaked doFetchItemsInCollections to handle private (UUID-based) collections. * Use persisted state for floating player playlist card body - I find it annoying being open everytime * Fix removing edits from published playlist * Fix scroll on mobile * Allow going editing items from toast * Fix ClaimShareButton * Prevent edit/publish of builtin * Fix async inside forEach * Fix sync on queue edit * Fix autoplayCountdown replay * Fix deleting an item scrolling the playlist * CreatedAt fixes * Remove repost for now * Anon publish fixes * Fix mature case on floating Co-authored-by: infinite-persistence <64950861+infinite-persistence@users.noreply.github.com>
488 lines
15 KiB
JavaScript
488 lines
15 KiB
JavaScript
// @flow
|
|
import * as ACTIONS from 'constants/action_types';
|
|
import { v4 as uuid } from 'uuid';
|
|
import Lbry from 'lbry';
|
|
import { doClaimSearch, doAbandonClaim } from 'redux/actions/claims';
|
|
import { selectClaimForClaimId, selectPermanentUrlForUri } from 'redux/selectors/claims';
|
|
import {
|
|
selectCollectionForId,
|
|
// selectPublishedCollectionForId, // for "save" or "copy" action
|
|
selectPublishedCollectionForId,
|
|
selectUnpublishedCollectionForId,
|
|
selectEditedCollectionForId,
|
|
selectHasItemsInQueue,
|
|
} from 'redux/selectors/collections';
|
|
import * as COLS from 'constants/collections';
|
|
import { isPermanentUrl } from 'util/claim';
|
|
import { parseClaimIdFromPermanentUrl } from 'util/url';
|
|
|
|
const FETCH_BATCH_SIZE = 50;
|
|
|
|
export const doLocalCollectionCreate = (params: CollectionCreateParams, cb?: (id: any) => void) => (
|
|
dispatch: Dispatch
|
|
) => {
|
|
const { items } = params;
|
|
const id = uuid();
|
|
|
|
if (cb) cb(id);
|
|
|
|
return dispatch({
|
|
type: ACTIONS.COLLECTION_NEW,
|
|
data: {
|
|
entry: {
|
|
id: id, // start with a uuid, this becomes a claimId after publish
|
|
items: items || [],
|
|
...params,
|
|
},
|
|
},
|
|
});
|
|
};
|
|
|
|
export const doCollectionDelete = (id: string, colKey: ?string = undefined) => (
|
|
dispatch: Dispatch,
|
|
getState: GetState
|
|
) => {
|
|
const state = getState();
|
|
const claim = selectClaimForClaimId(state, id);
|
|
const collectionDelete = () =>
|
|
dispatch({
|
|
type: ACTIONS.COLLECTION_DELETE,
|
|
data: {
|
|
id: id,
|
|
collectionKey: colKey,
|
|
},
|
|
});
|
|
|
|
if (claim) {
|
|
return dispatch(doAbandonClaim(claim, collectionDelete));
|
|
}
|
|
|
|
return collectionDelete();
|
|
};
|
|
|
|
// Given a collection, save its collectionId to be resolved and displayed in Library
|
|
// export const doCollectionSave = (
|
|
// id: string,
|
|
// ) => (dispatch: Dispatch) => {
|
|
// return dispatch({
|
|
// type: ACTIONS.COLLECTION_SAVE,
|
|
// data: {
|
|
// id: id,
|
|
// },
|
|
// });
|
|
// };
|
|
|
|
// Given a collection and name, copy it to a local private collection with a name
|
|
// export const doCollectionCopy = (
|
|
// id: string,
|
|
// ) => (dispatch: Dispatch) => {
|
|
// return dispatch({
|
|
// type: ACTIONS.COLLECTION_COPY,
|
|
// data: {
|
|
// id: id,
|
|
// },
|
|
// });
|
|
// };
|
|
|
|
function isPrivateCollectionId(collectionId: string) {
|
|
// Private (unpublished) collections uses UUID.
|
|
return collectionId.includes('-');
|
|
}
|
|
|
|
export const doFetchItemsInCollections = (
|
|
resolveItemsOptions: {
|
|
collectionIds: Array<string>,
|
|
pageSize?: number,
|
|
},
|
|
resolveStartedCallback?: () => void
|
|
) => async (dispatch: Dispatch, getState: GetState) => {
|
|
/*
|
|
1) make sure all the collection claims are loaded into claims reducer, search/resolve if necessary.
|
|
2) get the item claims for each
|
|
3) format and make sure they're in the order as in the claim
|
|
4) Build the collection objects and update collections reducer
|
|
5) Update redux claims reducer
|
|
*/
|
|
let state = getState();
|
|
const { collectionIds, pageSize } = resolveItemsOptions;
|
|
|
|
dispatch({
|
|
type: ACTIONS.COLLECTION_ITEMS_RESOLVE_STARTED,
|
|
data: { ids: collectionIds },
|
|
});
|
|
|
|
if (resolveStartedCallback) resolveStartedCallback();
|
|
|
|
const privateCollectionIds = [];
|
|
const collectionIdsToSearch = [];
|
|
|
|
collectionIds.forEach((id) => {
|
|
if (isPrivateCollectionId(id)) {
|
|
privateCollectionIds.push(id);
|
|
} else if (!state.claims.byId[id]) {
|
|
collectionIdsToSearch.push(id);
|
|
}
|
|
});
|
|
|
|
if (collectionIdsToSearch.length) {
|
|
// TODO: this might fail if there are >50 collections due to the claim_search
|
|
// limitation. The `useAutoPagination` parameter might slow things down
|
|
// because it is not parallel, so maybe a `Promise.all` is needed here.
|
|
// But leaving as-is for now.
|
|
await dispatch(doClaimSearch({ claim_ids: collectionIdsToSearch, page: 1, page_size: 9999 }));
|
|
}
|
|
|
|
const stateAfterClaimSearch = getState();
|
|
|
|
async function fetchItemsForCollectionClaim(
|
|
collectionId: string,
|
|
totalItems: number,
|
|
itemIdsInOrder: Array<string>,
|
|
pageSize?: number
|
|
) {
|
|
const sortResults = (items: Array<Claim>, claimList) => {
|
|
const newItems: Array<Claim> = [];
|
|
claimList.forEach((id) => {
|
|
const index = items.findIndex((i) => i.claim_id === id);
|
|
if (index >= 0) {
|
|
newItems.push(items[index]);
|
|
}
|
|
});
|
|
/*
|
|
This will return newItems[] of length less than total_items below
|
|
if one or more of the claims has been abandoned. That's ok for now.
|
|
*/
|
|
return newItems;
|
|
};
|
|
|
|
const mergeBatches = (
|
|
arrayOfResults: Array<{ items: Array<Claim>, total_items: number }>,
|
|
claimList: Array<string>
|
|
) => {
|
|
const mergedResults: { items: Array<Claim>, total_items: number } = {
|
|
items: [],
|
|
total_items: 0,
|
|
};
|
|
arrayOfResults.forEach((result) => {
|
|
mergedResults.items = mergedResults.items.concat(result.items);
|
|
mergedResults.total_items = result.total_items;
|
|
});
|
|
|
|
mergedResults.items = sortResults(mergedResults.items, claimList);
|
|
return mergedResults;
|
|
};
|
|
|
|
try {
|
|
const batchSize = pageSize || FETCH_BATCH_SIZE;
|
|
const batches: Array<Promise<any>> = [];
|
|
|
|
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
|
|
batches[i] = Lbry.claim_search({
|
|
claim_ids: itemIdsInOrder.slice(i * batchSize, (i + 1) * batchSize),
|
|
page: 1,
|
|
page_size: batchSize,
|
|
no_totals: true,
|
|
});
|
|
}
|
|
const itemsInBatches = await Promise.all(batches);
|
|
const result = mergeBatches(itemsInBatches, itemIdsInOrder);
|
|
|
|
// $FlowFixMe
|
|
const itemsById: { claimId: string, items?: ?Array<GenericClaim> } = { claimId: collectionId };
|
|
if (result.items) {
|
|
itemsById.items = result.items;
|
|
} else {
|
|
itemsById.items = null;
|
|
}
|
|
return itemsById;
|
|
} catch (e) {
|
|
return {
|
|
claimId: collectionId,
|
|
items: null,
|
|
};
|
|
}
|
|
}
|
|
|
|
const invalidCollectionIds = [];
|
|
const promisedCollectionItemFetches = [];
|
|
|
|
collectionIds.forEach((collectionId) => {
|
|
if (isPrivateCollectionId(collectionId)) {
|
|
const collection = selectCollectionForId(state, collectionId);
|
|
if (collection?.items.length > 0) {
|
|
promisedCollectionItemFetches.push(
|
|
fetchItemsForCollectionClaim(
|
|
collectionId,
|
|
collection.items.length,
|
|
collection.items.map((url) => parseClaimIdFromPermanentUrl(url, 'junk')),
|
|
pageSize
|
|
)
|
|
);
|
|
}
|
|
} else {
|
|
const claim = selectClaimForClaimId(stateAfterClaimSearch, collectionId);
|
|
if (!claim) {
|
|
invalidCollectionIds.push(collectionId);
|
|
} else {
|
|
promisedCollectionItemFetches.push(
|
|
fetchItemsForCollectionClaim(
|
|
collectionId,
|
|
claim.value.claims && claim.value.claims.length,
|
|
claim.value.claims,
|
|
pageSize
|
|
)
|
|
);
|
|
}
|
|
}
|
|
});
|
|
|
|
// $FlowFixMe
|
|
const collectionItemsById: Array<{
|
|
claimId: string,
|
|
items: ?Array<GenericClaim>,
|
|
}> = await Promise.all(promisedCollectionItemFetches);
|
|
|
|
const newCollectionObjectsById = {};
|
|
const resolvedItemsByUrl = {};
|
|
collectionItemsById.forEach((entry) => {
|
|
// $FlowFixMe
|
|
const collectionItems: Array<any> = entry.items;
|
|
const collectionId = entry.claimId;
|
|
|
|
if (isPrivateCollectionId(collectionId) && collectionItems) {
|
|
// Nothing to do for now. We are only interested in getting the resolved
|
|
// data for each item in the private collection.
|
|
} else if (collectionItems) {
|
|
const claim = selectClaimForClaimId(stateAfterClaimSearch, collectionId);
|
|
|
|
const editedCollection = selectEditedCollectionForId(stateAfterClaimSearch, collectionId);
|
|
const { name, timestamp, value } = claim || {};
|
|
const { title, description, thumbnail } = value;
|
|
const valueTypes = new Set();
|
|
const streamTypes = new Set();
|
|
|
|
let newItems = [];
|
|
let isPlaylist;
|
|
|
|
if (collectionItems) {
|
|
collectionItems.forEach((collectionItem) => {
|
|
newItems.push(collectionItem.permanent_url);
|
|
valueTypes.add(collectionItem.value_type);
|
|
if (collectionItem.value.stream_type) {
|
|
streamTypes.add(collectionItem.value.stream_type);
|
|
}
|
|
resolvedItemsByUrl[collectionItem.canonical_url] = collectionItem;
|
|
});
|
|
isPlaylist =
|
|
valueTypes.size === 1 &&
|
|
valueTypes.has('stream') &&
|
|
((streamTypes.size === 1 && (streamTypes.has('audio') || streamTypes.has('video'))) ||
|
|
(streamTypes.size === 2 && streamTypes.has('audio') && streamTypes.has('video')));
|
|
}
|
|
|
|
newCollectionObjectsById[collectionId] = {
|
|
items: newItems,
|
|
id: collectionId,
|
|
name: title || name,
|
|
itemCount: claim.value.claims.length,
|
|
type: isPlaylist ? 'playlist' : 'collection',
|
|
updatedAt: timestamp,
|
|
description,
|
|
thumbnail,
|
|
};
|
|
|
|
if (editedCollection && timestamp > editedCollection['updatedAt']) {
|
|
dispatch({
|
|
type: ACTIONS.COLLECTION_DELETE,
|
|
data: {
|
|
id: collectionId,
|
|
collectionKey: 'edited',
|
|
},
|
|
});
|
|
}
|
|
} else {
|
|
invalidCollectionIds.push(collectionId);
|
|
}
|
|
});
|
|
|
|
const resolveInfo: ClaimActionResolveInfo = {};
|
|
|
|
const resolveReposts = true;
|
|
|
|
collectionItemsById.forEach((collection) => {
|
|
// GenericClaim type probably needs to be updated to avoid this "Any"
|
|
collection.items &&
|
|
collection.items.forEach((result: any) => {
|
|
result = { [result.canonical_url]: result };
|
|
processResult(result, resolveInfo, resolveReposts);
|
|
});
|
|
});
|
|
|
|
dispatch({
|
|
type: ACTIONS.RESOLVE_URIS_COMPLETED,
|
|
data: { resolveInfo },
|
|
});
|
|
|
|
dispatch({
|
|
type: ACTIONS.COLLECTION_ITEMS_RESOLVE_COMPLETED,
|
|
data: {
|
|
resolvedPrivateCollectionIds: privateCollectionIds,
|
|
resolvedCollections: newCollectionObjectsById,
|
|
failedCollectionIds: invalidCollectionIds,
|
|
},
|
|
});
|
|
};
|
|
|
|
function processResult(result, resolveInfo = {}, checkReposts = false) {
|
|
const fallbackResolveInfo = {
|
|
stream: null,
|
|
claimsInChannel: null,
|
|
channel: null,
|
|
};
|
|
|
|
Object.entries(result).forEach(([uri, uriResolveInfo]) => {
|
|
// Flow has terrible Object.entries support
|
|
// https://github.com/facebook/flow/issues/2221
|
|
if (uriResolveInfo) {
|
|
if (uriResolveInfo.error) {
|
|
// $FlowFixMe
|
|
resolveInfo[uri] = { ...fallbackResolveInfo };
|
|
} else {
|
|
let result = {};
|
|
if (uriResolveInfo.value_type === 'channel') {
|
|
result.channel = uriResolveInfo;
|
|
// $FlowFixMe
|
|
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
|
|
} else if (uriResolveInfo.value_type === 'collection') {
|
|
result.collection = uriResolveInfo;
|
|
// $FlowFixMe
|
|
} else {
|
|
result.stream = uriResolveInfo;
|
|
if (uriResolveInfo.signing_channel) {
|
|
result.channel = uriResolveInfo.signing_channel;
|
|
result.claimsInChannel =
|
|
(uriResolveInfo.signing_channel.meta && uriResolveInfo.signing_channel.meta.claims_in_channel) || 0;
|
|
}
|
|
}
|
|
// $FlowFixMe
|
|
resolveInfo[uri] = result;
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
export const doFetchItemsInCollection = (options: { collectionId: string, pageSize?: number }, cb?: () => void) => {
|
|
const { collectionId, pageSize } = options;
|
|
const newOptions: { collectionIds: Array<string>, pageSize?: number } = {
|
|
collectionIds: [collectionId],
|
|
};
|
|
if (pageSize) newOptions.pageSize = pageSize;
|
|
return doFetchItemsInCollections(newOptions, cb);
|
|
};
|
|
|
|
export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => (
|
|
dispatch: Dispatch,
|
|
getState: GetState
|
|
) => {
|
|
const state = getState();
|
|
const collection: Collection = selectCollectionForId(state, collectionId);
|
|
|
|
if (!collection) {
|
|
return dispatch({
|
|
type: ACTIONS.COLLECTION_ERROR,
|
|
data: { message: 'collection does not exist' },
|
|
});
|
|
}
|
|
|
|
const editedCollection: Collection = selectEditedCollectionForId(state, collectionId);
|
|
const unpublishedCollection: Collection = selectUnpublishedCollectionForId(state, collectionId);
|
|
const publishedCollection: Collection = selectPublishedCollectionForId(state, collectionId); // needs to be published only
|
|
|
|
const { uris: anyUris, remove, order, type } = params;
|
|
|
|
// -- sanitization --
|
|
// only permanent urls can be added to collections
|
|
let uris;
|
|
|
|
if (anyUris) {
|
|
uris = [];
|
|
|
|
anyUris.forEach(async (uri) => {
|
|
// related to selectBrokenUrlsForCollectionId
|
|
const isDeletingBrokenUris = typeof uri !== 'string';
|
|
|
|
// $FlowFixMe
|
|
if (isPermanentUrl(uri) || isDeletingBrokenUris) return uris.push(uri);
|
|
|
|
const url = selectPermanentUrlForUri(state, uri);
|
|
// $FlowFixMe
|
|
return uris.push(url);
|
|
});
|
|
}
|
|
|
|
// -------------------
|
|
|
|
const collectionType = type || collection.type;
|
|
const currentUrls = collection.items ? collection.items.concat() : [];
|
|
let newItems = currentUrls;
|
|
|
|
// Passed uris to add/remove:
|
|
if (uris) {
|
|
if (remove) {
|
|
// Filters (removes) the passed uris from the current list items
|
|
// $FlowFixMe
|
|
newItems = currentUrls.filter((url) => url && !uris?.includes(url));
|
|
} else {
|
|
// Pushes (adds to the end) the passed uris to the current list items
|
|
// (only if item not already in currentUrls, avoid duplicates)
|
|
uris.forEach((url) => !currentUrls.includes(url) && newItems.push(url));
|
|
}
|
|
} else if (remove) {
|
|
// no uris and remove === true: clear the list
|
|
newItems = [];
|
|
}
|
|
|
|
// Passed an ordering to change: (doesn't need the uris here since
|
|
// the items are already on the list)
|
|
if (order) {
|
|
const [movedItem] = currentUrls.splice(order.from, 1);
|
|
currentUrls.splice(order.to, 0, movedItem);
|
|
}
|
|
|
|
const isQueue = collectionId === COLS.QUEUE_ID;
|
|
const collectionKey =
|
|
(isQueue && COLS.QUEUE_ID) ||
|
|
((editedCollection || publishedCollection) && COLS.COL_KEY_EDITED) ||
|
|
(COLS.BUILTIN_PLAYLISTS.includes(collectionId) && COLS.COL_KEY_BUILTIN) ||
|
|
(unpublishedCollection && COLS.COL_KEY_UNPUBLISHED);
|
|
|
|
return dispatch({
|
|
type: isQueue ? ACTIONS.QUEUE_EDIT : ACTIONS.COLLECTION_EDIT,
|
|
data: {
|
|
collectionKey,
|
|
collection: {
|
|
items: newItems,
|
|
id: collectionId,
|
|
type: collectionType,
|
|
name: params.name || collection.name,
|
|
description: params.description || collection.description,
|
|
thumbnail: params.thumbnail || collection.thumbnail,
|
|
},
|
|
},
|
|
});
|
|
};
|
|
|
|
export const doClearEditsForCollectionid = (collectionId: string) => (dispatch: Dispatch) =>
|
|
dispatch({ type: ACTIONS.COLLECTION_EDIT, data: { collectionKey: COLS.COL_KEY_EDITED, collectionId } });
|
|
|
|
export const doClearQueueList = () => (dispatch: Dispatch, getState: GetState) => {
|
|
const state = getState();
|
|
const hasItemsInQueue = selectHasItemsInQueue(state);
|
|
|
|
if (hasItemsInQueue) {
|
|
return dispatch(doCollectionEdit(COLS.QUEUE_ID, { remove: true, type: 'playlist' }));
|
|
}
|
|
};
|
|
|
|
export const doClearCollectionErrors = () => (dispatch: Dispatch) =>
|
|
dispatch({ type: ACTIONS.CLEAR_COLLECTION_ERRORS });
|