Compare commits

..

20 commits

Author SHA1 Message Date
zeppi
5cd9e7601c bugfix 2021-06-08 10:14:36 -04:00
zeppi
97cd2dbd6a refactor select collection index / next 2021-06-07 16:21:31 -04:00
zeppi
6d160b3d14 cleanup 2021-06-04 10:08:43 -04:00
zeppi
e4fc01fc72 fix crash on abandoned collection claim 2021-06-04 09:22:54 -04:00
zeppi
d71b9a351c fix sync bringing back unpublished 2021-06-03 14:03:33 -04:00
zeppi
dc19ecb77e bugfix 2021-06-02 15:20:55 -04:00
zeppi
dd95916281 cleanup
cleanup

cleanup
2021-06-01 12:42:15 -04:00
zeppi
9e802f47c4 fix sync 2021-06-01 09:09:21 -04:00
zeppi
ff16a0c8a4 finalize collections sync keys 2021-06-01 08:12:02 -04:00
zeppi
6228a2dce1 fix tags bug, flow 2021-05-30 18:26:53 -04:00
zeppi
09699be25c return new collection on publish 2021-05-28 08:37:32 -04:00
zeppi
25acc1289c handle collection claim delete 2021-05-26 11:36:17 -04:00
zeppi
d2d9b037f0 refactor fetch to fix pending 2021-05-25 08:59:56 -04:00
zeppi
bcd8fd1005 thumb param 2021-05-19 15:47:23 -04:00
zeppi
d4fba29fc5 collections length 2021-05-19 11:36:11 -04:00
zeppi
79519a3ea1 prefer title for collection name on resolve 2021-05-13 14:06:45 -04:00
zeppi
15eebee694 pending, edit fixes, support collectionCount 2021-05-12 15:11:04 -04:00
zeppi
a1dce5581c make edits work 2021-05-06 18:41:02 -04:00
zeppi
211266cdd8 fix pending, support new collection add ui 2021-04-28 10:29:30 -04:00
zeppi
f9f53af06e wip
wip

clean

clean

review

wip

wallet sync

wip

collection publishing

build

refactor, publishing, pending, editing

wip

wip

fetch collections on resolve

select collections or playlists

build

return edit success

fix collection claimId selector

small rename

flow type fixes

collection edit params type param and flowtypes
2021-04-28 10:28:23 -04:00
23 changed files with 400 additions and 766 deletions

499
dist/bundle.es.js vendored

File diff suppressed because it is too large Load diff

View file

@ -75,7 +75,7 @@ declare type BalanceResponse = {
declare type ResolveResponse = {
// Keys are the url(s) passed to resolve
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, collection?: CollectionClaim, claimsInChannel?: number },
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, claimsInChannel?: number },
};
declare type GetResponse = FileListItem & { error?: string };
@ -351,7 +351,6 @@ declare type LbryTypes = {
address_unused: (params: {}) => Promise<string>, // New address
address_list: (params: {}) => Promise<string>,
transaction_list: (params: {}) => Promise<TxListResponse>,
txo_list: (params: {}) => Promise<any>,
// Sync
sync_hash: (params: {}) => Promise<string>,

View file

@ -1,5 +0,0 @@
// @flow
declare module '@ungap/from-entries' {
declare module.exports: any;
}

3
flow-typed/Lbry.js vendored
View file

@ -75,7 +75,7 @@ declare type BalanceResponse = {
declare type ResolveResponse = {
// Keys are the url(s) passed to resolve
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, collection?: CollectionClaim, claimsInChannel?: number },
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, claimsInChannel?: number },
};
declare type GetResponse = FileListItem & { error?: string };
@ -351,7 +351,6 @@ declare type LbryTypes = {
address_unused: (params: {}) => Promise<string>, // New address
address_list: (params: {}) => Promise<string>,
transaction_list: (params: {}) => Promise<TxListResponse>,
txo_list: (params: {}) => Promise<any>,
// Sync
sync_hash: (params: {}) => Promise<string>,

View file

@ -1,5 +0,0 @@
// @flow
declare module '@ungap/from-entries' {
declare module.exports: any;
}

View file

@ -29,7 +29,6 @@
"test": "jest"
},
"dependencies": {
"@ungap/from-entries": "^0.2.1",
"proxy-polyfill": "0.1.6",
"reselect": "^3.0.0",
"uuid": "^8.3.1"

View file

@ -8,8 +8,3 @@ export const WATCH_LATER_ID = 'watchlater';
export const FAVORITES_ID = 'favorites';
export const FAVORITE_CHANNELS_ID = 'favoriteChannels';
export const BUILTIN_LISTS = [WATCH_LATER_ID, FAVORITES_ID, FAVORITE_CHANNELS_ID];
export const COL_KEY_EDITED = 'edited';
export const COL_KEY_UNPUBLISHED = 'unpublished';
export const COL_KEY_PENDING = 'pending';
export const COL_KEY_SAVED = 'saved';

View file

@ -23,7 +23,7 @@ export const INSTANT_PURCHASE_MAX = 'instant_purchase_max';
export const THEME = 'theme';
export const THEMES = 'themes';
export const AUTOMATIC_DARK_MODE_ENABLED = 'automatic_dark_mode_enabled';
export const AUTOPLAY_MEDIA = 'autoplay';
export const AUTOPLAY = 'autoplay';
export const AUTOPLAY_NEXT = 'autoplay_next';
export const OS_NOTIFICATIONS_ENABLED = 'os_notifications_enabled';
export const AUTO_DOWNLOAD = 'auto_download';
@ -39,8 +39,6 @@ export const ENABLE_PUBLISH_PREVIEW = 'enable-publish-preview';
export const TILE_LAYOUT = 'tile_layout';
export const VIDEO_THEATER_MODE = 'video_theater_mode';
export const VIDEO_PLAYBACK_RATE = 'video_playback_rate';
export const CUSTOM_COMMENTS_SERVER_ENABLED = 'custom_comments_server_enabled';
export const CUSTOM_COMMENTS_SERVER_URL = 'custom_comments_server_url';
// mobile settings
export const BACKGROUND_PLAY_ENABLED = 'backgroundPlayEnabled';

View file

@ -21,12 +21,10 @@ export const CLIENT_SYNC_KEYS = [
SETTINGS.INSTANT_PURCHASE_ENABLED,
SETTINGS.INSTANT_PURCHASE_MAX,
SETTINGS.THEME,
SETTINGS.AUTOPLAY_MEDIA,
SETTINGS.AUTOPLAY_NEXT,
SETTINGS.AUTOPLAY,
SETTINGS.HIDE_BALANCE,
SETTINGS.HIDE_SPLASH_ANIMATION,
SETTINGS.FLOATING_PLAYER,
SETTINGS.DARK_MODE_TIMES,
SETTINGS.AUTOMATIC_DARK_MODE_ENABLED,
SETTINGS.LANGUAGE,
];

View file

@ -52,8 +52,6 @@ export {
isURIClaimable,
isNameValid,
convertToShareLink,
splitBySeparator,
isURIEqual,
} from 'lbryURI';
// middlware
@ -176,14 +174,12 @@ export {
makeSelectMyPublishedCollectionForId,
makeSelectUnpublishedCollectionForId,
makeSelectCollectionForId,
makeSelectClaimUrlInCollection,
makeSelectUrlsForCollectionId,
makeSelectClaimIdsForCollectionId,
makeSelectNameForCollectionId,
makeSelectCountForCollectionId,
makeSelectIsResolvingCollectionForId,
makeSelectIndexForUrlInCollection,
makeSelectPreviousUrlForCollectionAndUrl,
makeSelectNextUrlForCollectionAndUrl,
makeSelectCollectionForIdHasClaimUrl,
} from 'redux/selectors/collections';
@ -213,6 +209,7 @@ export {
makeSelectTotalItemsForChannel,
makeSelectTotalPagesForChannel,
makeSelectNsfwCountFromUris,
makeSelectNsfwCountForChannel,
makeSelectOmittedCountForChannel,
makeSelectClaimIsNsfw,
makeSelectChannelForClaimUri,
@ -220,6 +217,7 @@ export {
makeSelectMyChannelPermUrlForName,
makeSelectClaimIsPending,
makeSelectReflectingClaimForUri,
makeSelectClaimsInChannelForCurrentPageState,
makeSelectShortUrlForUri,
makeSelectCanonicalUrlForUri,
makeSelectPermanentUrlForUri,
@ -284,7 +282,6 @@ export {
selectUpdateCollectionError,
selectCreatingCollection,
selectCreateCollectionError,
makeSelectClaimIdIsPending,
} from 'redux/selectors/claims';
export {

View file

@ -117,7 +117,6 @@ const Lbry: LbryTypes = {
utxo_release: (params = {}) => daemonCallWithResult('utxo_release', params),
support_abandon: (params = {}) => daemonCallWithResult('support_abandon', params),
purchase_list: (params = {}) => daemonCallWithResult('purchase_list', params),
txo_list: (params = {}) => daemonCallWithResult('txo_list', params),
sync_hash: (params = {}) => daemonCallWithResult('sync_hash', params),
sync_apply: (params = {}) => daemonCallWithResult('sync_apply', params),

View file

@ -4,7 +4,7 @@ const channelNameMinLength = 1;
const claimIdMaxLength = 40;
// see https://spec.lbry.com/#urls
export const regexInvalidURI = /[ =&#:$@%?;/\\"<>%{}|^~[\]`\u{0000}-\u{0008}\u{000b}-\u{000c}\u{000e}-\u{001F}\u{D800}-\u{DFFF}\u{FFFE}-\u{FFFF}]/u;
export const regexInvalidURI = /[ =&#:$@%?;/\\"<>%\{\}|^~[\]`\u{0000}-\u{0008}\u{000b}-\u{000c}\u{000e}-\u{001F}\u{D800}-\u{DFFF}\u{FFFE}-\u{FFFF}]/u;
export const regexAddress = /^(b|r)(?=[^0OIl]{32,33})[0-9A-Za-z]{32,33}$/;
const regexPartProtocol = '^((?:lbry://)?)';
const regexPartStreamOrChannelName = '([^:$#/]*)';
@ -12,11 +12,6 @@ const regexPartModifierSeparator = '([:$#]?)([^/]*)';
const queryStringBreaker = '^([\\S]+)([?][\\S]*)';
const separateQuerystring = new RegExp(queryStringBreaker);
const MOD_SEQUENCE_SEPARATOR = '*';
const MOD_CLAIM_ID_SEPARATOR_OLD = '#';
const MOD_CLAIM_ID_SEPARATOR = ':';
const MOD_BID_POSITION_SEPARATOR = '$';
/**
* Parses a LBRY name into its component parts. Throws errors with user-friendly
* messages for invalid names.
@ -149,11 +144,11 @@ function parseURIModifier(modSeperator: ?string, modValue: ?string) {
throw new Error(__(`No modifier provided after separator %modSeperator%.`, { modSeperator }));
}
if (modSeperator === MOD_CLAIM_ID_SEPARATOR || MOD_CLAIM_ID_SEPARATOR_OLD) {
if (modSeperator === '#') {
claimId = modValue;
} else if (modSeperator === MOD_SEQUENCE_SEPARATOR) {
} else if (modSeperator === ':') {
claimSequence = modValue;
} else if (modSeperator === MOD_BID_POSITION_SEPARATOR) {
} else if (modSeperator === '$') {
bidPosition = modValue;
}
}
@ -325,22 +320,3 @@ export function convertToShareLink(URL: string) {
'https://open.lbry.com/'
);
}
export function splitBySeparator(uri: string) {
const protocolLength = 7;
return uri.startsWith('lbry://') ? uri.slice(protocolLength).split(/[#:*]/) : uri.split(/#:\*\$/);
}
export function isURIEqual(uriA: string, uriB: string) {
const parseA = parseURI(normalizeURI(uriA));
const parseB = parseURI(normalizeURI(uriB));
if (parseA.isChannel) {
if (parseB.isChannel && parseA.channelClaimId === parseB.channelClaimId) {
return true;
}
} else if (parseA.streamClaimId === parseB.streamClaimId) {
return true;
} else {
return false;
}
}

View file

@ -10,7 +10,6 @@ import {
selectClaimsByUri,
selectMyChannelClaims,
selectPendingIds,
selectPendingClaimsById,
} from 'redux/selectors/claims';
import { doFetchTxoPage } from 'redux/actions/wallet';
@ -21,7 +20,6 @@ import { createNormalizedClaimSearchKey } from 'util/claim';
import { PAGE_SIZE } from 'constants/claim';
import {
selectPendingCollections,
makeSelectClaimIdsForCollectionId,
} from 'redux/selectors/collections';
import {
doFetchItemsInCollection,
@ -29,9 +27,6 @@ import {
doCollectionDelete,
} from 'redux/actions/collections';
let onChannelConfirmCallback;
let checkPendingInterval;
export function doResolveUris(
uris: Array<string>,
returnCachedClaims: boolean = false,
@ -407,7 +402,7 @@ export function doClearChannelErrors() {
};
}
export function doCreateChannel(name: string, amount: number, optionalParams: any, onConfirm: any) {
export function doCreateChannel(name: string, amount: number, optionalParams: any, cb: any) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.CREATE_CHANNEL_STARTED,
@ -474,7 +469,7 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
claims: [channelClaim],
},
});
dispatch(doCheckPendingClaims(onConfirm));
dispatch(doCheckPendingClaims(cb));
return channelClaim;
})
.catch(error => {
@ -831,22 +826,18 @@ export function doCollectionPublish(
};
}
export function doCollectionPublishUpdate(
options: {
bid?: string,
blocking?: true,
title?: string,
thumbnail_url?: string,
description?: string,
claim_id: string,
tags?: Array<Tag>,
languages?: Array<string>,
claims?: Array<string>,
channel_id?: string,
},
isBackgroundUpdate?: boolean
) {
return (dispatch: Dispatch, getState: GetState): Promise<any> => {
export function doCollectionPublishUpdate(options: {
bid?: string,
blocking?: true,
title?: string,
thumbnail_url?: string,
description?: string,
claim_id: string,
tags?: Array<Tag>,
languages?: Array<string>,
claims?: Array<string>,
}) {
return (dispatch: Dispatch): Promise<any> => {
// TODO: implement one click update
const updateParams: {
@ -854,49 +845,32 @@ export function doCollectionPublishUpdate(
blocking?: true,
title?: string,
thumbnail_url?: string,
channel_id?: string,
description?: string,
claim_id: string,
tags?: Array<string>,
languages?: Array<string>,
claims?: Array<string>,
clear_claims: boolean,
replace?: boolean,
} = isBackgroundUpdate
? {
blocking: true,
claim_id: options.claim_id,
clear_claims: true,
}
: {
bid: creditsToString(options.bid),
title: options.title,
thumbnail_url: options.thumbnail_url,
description: options.description,
tags: [],
languages: options.languages || [],
locations: [],
blocking: true,
claim_id: options.claim_id,
clear_claims: true,
replace: true,
};
if (isBackgroundUpdate && updateParams.claim_id) {
const state = getState();
updateParams['claims'] = makeSelectClaimIdsForCollectionId(updateParams.claim_id)(state);
} else if (options.claims) {
updateParams['claims'] = options.claims;
}
} = {
bid: creditsToString(options.bid),
title: options.title,
thumbnail_url: options.thumbnail_url,
description: options.description,
tags: [],
languages: options.languages || [],
locations: [],
blocking: true,
claim_id: options.claim_id,
clear_claims: true,
};
if (options.tags) {
updateParams['tags'] = options.tags.map(tag => tag.name);
}
if (options.channel_id) {
updateParams['channel_id'] = options.channel_id;
if (options.claims) {
updateParams['claims'] = options.claims;
}
return new Promise(resolve => {
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_STARTED,
@ -921,6 +895,7 @@ export function doCollectionPublishUpdate(
},
});
dispatch(doCheckPendingClaims());
dispatch(doFetchCollectionListMine(1, 10));
return resolve(collectionClaim);
}
@ -1000,71 +975,52 @@ export function doPurchaseList(page: number = 1, pageSize: number = PAGE_SIZE) {
};
}
export const doCheckPendingClaims = (onChannelConfirmed: Function) => (
export const doCheckPendingClaims = (onConfirmed: Function) => (
dispatch: Dispatch,
getState: GetState
) => {
if (onChannelConfirmed) {
onChannelConfirmCallback = onChannelConfirmed;
}
clearInterval(checkPendingInterval);
const checkTxoList = () => {
let claimCheckInterval;
const checkClaimList = () => {
const state = getState();
const pendingById = Object.assign({}, selectPendingClaimsById(state));
const pendingTxos = (Object.values(pendingById): any).map(p => p.txid);
// use collections
const pendingIdSet = new Set(selectPendingIds(state));
const pendingCollections = selectPendingCollections(state);
if (pendingTxos.length) {
Lbry.txo_list({ txid: pendingTxos })
.then(result => {
const txos = result.items;
const idsToConfirm = [];
txos.forEach(txo => {
if (txo.claim_id && txo.confirmations > 0) {
idsToConfirm.push(txo.claim_id);
delete pendingById[txo.claim_id];
Lbry.claim_list({ page: 1, page_size: 10 })
.then(result => {
const claims = result.items;
const claimsToConfirm = [];
claims.forEach(claim => {
const { claim_id: claimId } = claim;
if (claim.confirmations > 0 && pendingIdSet.has(claimId)) {
pendingIdSet.delete(claimId);
if (Object.keys(pendingCollections).includes(claim.claim_id)) {
dispatch(doFetchItemsInCollection({ collectionId: claim.claim_id }));
dispatch(doCollectionDelete(claim.claim_id, 'pending'));
}
claimsToConfirm.push(claim);
if (onConfirmed) {
onConfirmed(claim);
}
});
return { idsToConfirm, pendingById };
})
.then(results => {
const { idsToConfirm, pendingById } = results;
if (idsToConfirm.length) {
return Lbry.claim_list({ claim_id: idsToConfirm, resolve: true }).then(results => {
const claims = results.items;
const collectionIds = claims
.filter(c => c.value_type === 'collection')
.map(c => c.claim_id);
dispatch({
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
data: {
claims: claims,
pending: pendingById,
},
});
if (collectionIds.length) {
dispatch(
doFetchItemsInCollections({
collectionIds,
})
);
}
const channelClaims = claims.filter(claim => claim.value_type === 'channel');
if (channelClaims.length && onChannelConfirmCallback) {
channelClaims.forEach(claim => onChannelConfirmCallback(claim));
}
if (Object.keys(pendingById).length === 0) {
clearInterval(checkPendingInterval);
}
});
}
});
} else {
clearInterval(checkPendingInterval);
}
if (claimsToConfirm.length) {
dispatch({
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
data: {
claims: claimsToConfirm,
},
});
}
return pendingIdSet.size;
})
.then(len => {
if (!len) {
clearInterval(claimCheckInterval);
}
});
};
// do something with onConfirmed (typically get blocklist for channel)
checkPendingInterval = setInterval(() => {
checkTxoList();
claimCheckInterval = setInterval(() => {
checkClaimList();
}, 30000);
};

View file

@ -18,7 +18,7 @@ const getTimestamp = () => {
return Math.floor(Date.now() / 1000);
};
const FETCH_BATCH_SIZE = 50;
const FETCH_BATCH_SIZE = 10;
export const doLocalCollectionCreate = (
name: string,
@ -93,7 +93,7 @@ export const doFetchItemsInCollections = (
pageSize?: number,
},
resolveStartedCallback?: () => void
) => async(dispatch: Dispatch, getState: GetState) => {
) => async (dispatch: Dispatch, getState: GetState) => {
/*
1) make sure all the collection claims are loaded into claims reducer, search/resolve if necessary.
2) get the item claims for each
@ -165,7 +165,6 @@ export const doFetchItemsInCollections = (
claim_ids: claim.value.claims,
page: i + 1,
page_size: batchSize,
no_totals: true,
});
}
const itemsInBatches = await Promise.all(batches);
@ -319,7 +318,7 @@ export const doFetchItemsInCollection = (
return doFetchItemsInCollections(newOptions, cb);
};
export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => async(
export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => async (
dispatch: Dispatch,
getState: GetState
) => {

View file

@ -21,7 +21,6 @@ export const doResetThumbnailStatus = () => (dispatch: Dispatch) => {
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
thumbnailPath: '',
thumbnailError: undefined,
},
});
@ -69,8 +68,7 @@ export const doUploadThumbnail = (
thumbnailBlob?: File,
fsAdapter?: any,
fs?: any,
path?: any,
cb?: (string) => void
path?: any
) => (dispatch: Dispatch) => {
const downMessage = __('Thumbnail upload service may be down, try again later.');
let thumbnail, fileExt, fileName, fileType;
@ -98,13 +96,6 @@ export const doUploadThumbnail = (
);
};
dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
thumbnailError: undefined,
},
});
const doUpload = data => {
return fetch(SPEECH_PUBLISH, {
method: 'POST',
@ -113,17 +104,15 @@ export const doUploadThumbnail = (
.then(res => res.text())
.then(text => (text.length ? JSON.parse(text) : {}))
.then(json => {
if (!json.success) return uploadError(json.message || downMessage);
if (cb) {
cb(json.data.serveUrl);
}
return dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
thumbnail: json.data.serveUrl,
},
});
return json.success
? dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
thumbnail: json.data.serveUrl,
},
})
: uploadError(json.message || downMessage);
})
.catch(err => {
let message = err.message;
@ -211,8 +200,7 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
description,
fee,
languages,
releaseTime: release_time,
releaseTimeEdited: undefined,
release_time: release_time ? Number(release_time) * 1000 : undefined,
thumbnail: thumbnail ? thumbnail.url : null,
title,
uri,
@ -266,7 +254,7 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
filePath,
description,
language,
releaseTimeEdited,
releaseTime,
license,
licenseUrl,
useLBRYUploader,
@ -359,8 +347,8 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
}
// Set release time to curret date. On edits, keep original release/transaction time as release_time
if (releaseTimeEdited) {
publishPayload.release_time = releaseTimeEdited;
if (releaseTime) {
publishPayload.release_time = Number(Math.round(new Date(releaseTime) / 1000));
} else if (myClaimForUriEditing && myClaimForUriEditing.value.release_time) {
publishPayload.release_time = Number(myClaimForUri.value.release_time);
} else if (myClaimForUriEditing && myClaimForUriEditing.timestamp) {

View file

@ -14,7 +14,6 @@ type SharedData = {
app_welcome_version?: number,
sharing_3P?: boolean,
unpublishedCollections: CollectionGroup,
editedCollections: CollectionGroup,
builtinCollections: CollectionGroup,
savedCollections: Array<string>,
},
@ -32,7 +31,6 @@ function extractUserState(rawObj: SharedData) {
app_welcome_version,
sharing_3P,
unpublishedCollections,
editedCollections,
builtinCollections,
savedCollections,
} = rawObj.value;
@ -47,7 +45,6 @@ function extractUserState(rawObj: SharedData) {
...(app_welcome_version ? { app_welcome_version } : {}),
...(sharing_3P ? { sharing_3P } : {}),
...(unpublishedCollections ? { unpublishedCollections } : {}),
...(editedCollections ? { editedCollections } : {}),
...(builtinCollections ? { builtinCollections } : {}),
...(savedCollections ? { savedCollections } : {}),
};
@ -68,7 +65,6 @@ export function doPopulateSharedUserState(sharedSettings: any) {
app_welcome_version,
sharing_3P,
unpublishedCollections,
editedCollections,
builtinCollections,
savedCollections,
} = extractUserState(sharedSettings);
@ -84,7 +80,6 @@ export function doPopulateSharedUserState(sharedSettings: any) {
welcomeVersion: app_welcome_version,
allowAnalytics: sharing_3P,
unpublishedCollections,
editedCollections,
builtinCollections,
savedCollections,
},

View file

@ -17,8 +17,8 @@ type State = {
channelClaimCounts: { [string]: number },
claimsByUri: { [string]: string },
byId: { [string]: Claim },
pendingById: { [string]: Claim }, // keep pending claims
resolvingUris: Array<string>,
pendingIds: Array<string>,
reflectingById: { [string]: ReflectingUpdate },
myClaims: ?Array<string>,
myChannelClaims: ?Array<string>,
@ -83,7 +83,7 @@ const defaultState = {
fetchingMyChannels: false,
fetchingMyCollections: false,
abandoningById: {},
pendingById: {},
pendingIds: [],
reflectingById: {},
claimSearchError: false,
claimSearchByQuery: {},
@ -117,7 +117,7 @@ function handleClaimAction(state: State, action: any): State {
const byUri = Object.assign({}, state.claimsByUri);
const byId = Object.assign({}, state.byId);
const channelClaimCounts = Object.assign({}, state.channelClaimCounts);
const pendingById = state.pendingById;
const pendingIds = state.pendingIds;
let newResolvingUrls = new Set(state.resolvingUris);
let myClaimIds = new Set(state.myClaims);
@ -127,7 +127,7 @@ function handleClaimAction(state: State, action: any): State {
const channel = channelFromResolve || (stream && stream.signing_channel);
if (stream) {
if (pendingById[stream.claim_id]) {
if (pendingIds.includes(stream.claim_id)) {
byId[stream.claim_id] = mergeClaim(stream, byId[stream.claim_id]);
} else {
byId[stream.claim_id] = stream;
@ -157,7 +157,7 @@ function handleClaimAction(state: State, action: any): State {
channelClaimCounts[channel.canonical_url] = claimsInChannel;
}
if (pendingById[channel.claim_id]) {
if (pendingIds.includes(channel.claim_id)) {
byId[channel.claim_id] = mergeClaim(channel, byId[channel.claim_id]);
} else {
byId[channel.claim_id] = channel;
@ -170,7 +170,7 @@ function handleClaimAction(state: State, action: any): State {
}
if (collection) {
if (pendingById[collection.claim_id]) {
if (pendingIds.includes(collection.claim_id)) {
byId[collection.claim_id] = mergeClaim(collection, byId[collection.claim_id]);
} else {
byId[collection.claim_id] = collection;
@ -187,7 +187,7 @@ function handleClaimAction(state: State, action: any): State {
}
newResolvingUrls.delete(url);
if (!stream && !channel && !collection && !pendingById[byUri[url]]) {
if (!stream && !channel && !collection && !pendingIds.includes(byUri[url])) {
byUri[url] = null;
}
});
@ -230,33 +230,34 @@ reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED] = (state: State): State =>
});
reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any): State => {
const { result }: { result: ClaimListResponse } = action.data;
const { result, resolve }: { result: ClaimListResponse, resolve: boolean } = action.data;
const claims = result.items;
const page = result.page;
const totalItems = result.total_items;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
const pendingById = Object.assign({}, state.pendingById);
const pendingIds = state.pendingIds || [];
let myClaimIds = new Set(state.myClaims);
let urlsForCurrentPage = [];
const pendingIdSet = new Set(pendingIds);
claims.forEach((claim: Claim) => {
const { permanent_url: permanentUri, claim_id: claimId, canonical_url: canonicalUri } = claim;
const { permanent_url: permanentUri, claim_id: claimId } = claim;
if (claim.type && claim.type.match(/claim|update/)) {
urlsForCurrentPage.push(permanentUri);
if (claim.confirmations < 1) {
pendingById[claimId] = claim;
if (byId[claimId]) {
byId[claimId] = mergeClaim(claim, byId[claimId]);
} else {
byId[claimId] = claim;
}
pendingIdSet.add(claimId);
} else if (!resolve && pendingIdSet.has(claimId) && claim.confirmations > 0) {
pendingIdSet.delete(claimId);
}
if (pendingIds.includes(claimId)) {
byId[claimId] = mergeClaim(claim, byId[claimId]);
} else {
byId[claimId] = claim;
}
byUri[permanentUri] = claimId;
byUri[canonicalUri] = claimId;
myClaimIds.add(claimId);
}
});
@ -265,7 +266,7 @@ reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any):
isFetchingClaimListMine: false,
myClaims: Array.from(myClaimIds),
byId,
pendingById,
pendingIds: Array.from(pendingIdSet),
claimsByUri: byUri,
myClaimsPageResults: urlsForCurrentPage,
myClaimsPageNumber: page,
@ -278,8 +279,9 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_STARTED] = (state: State): State =>
reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): State => {
const { claims }: { claims: Array<ChannelClaim> } = action.data;
const myClaims = state.myClaims || [];
let myClaimIds = new Set(state.myClaims);
const pendingById = Object.assign({}, state.pendingById);
const pendingIds = state.pendingIds || [];
let myChannelClaims;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
@ -293,12 +295,7 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
claims.forEach(claim => {
const { meta } = claim;
const { claims_in_channel: claimsInChannel } = claim.meta;
const {
canonical_url: canonicalUrl,
permanent_url: permanentUrl,
claim_id: claimId,
confirmations,
} = claim;
const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
byUri[canonicalUrl] = claimId;
byUri[permanentUrl] = claimId;
@ -307,14 +304,7 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
// $FlowFixMe
myChannelClaims.add(claimId);
if (confirmations < 1) {
pendingById[claimId] = claim;
if (byId[claimId]) {
byId[claimId] = mergeClaim(claim, byId[claimId]);
} else {
byId[claimId] = claim;
}
} else {
if (!pendingIds.some(c => c === claimId)) {
byId[claimId] = claim;
}
myClaimIds.add(claimId);
@ -323,7 +313,6 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
return Object.assign({}, state, {
byId,
pendingById,
claimsByUri: byUri,
channelClaimCounts,
fetchingMyChannels: false,
@ -347,7 +336,7 @@ reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any):
const { claims }: { claims: Array<CollectionClaim> } = action.data;
const myClaims = state.myClaims || [];
let myClaimIds = new Set(myClaims);
const pendingById = Object.assign({}, state.pendingById);
const pendingIds = state.pendingIds || [];
let myCollectionClaimsSet = new Set([]);
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
@ -356,12 +345,7 @@ reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any):
myCollectionClaimsSet = new Set(state.myCollectionClaims);
claims.forEach(claim => {
const { meta } = claim;
const {
canonical_url: canonicalUrl,
permanent_url: permanentUrl,
claim_id: claimId,
confirmations,
} = claim;
const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
byUri[canonicalUrl] = claimId;
byUri[permanentUrl] = claimId;
@ -369,14 +353,7 @@ reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any):
// $FlowFixMe
myCollectionClaimsSet.add(claimId);
// we don't want to overwrite a pending result with a resolve
if (confirmations < 1) {
pendingById[claimId] = claim;
if (byId[claimId]) {
byId[claimId] = mergeClaim(claim, byId[claimId]);
} else {
byId[claimId] = claim;
}
} else {
if (!pendingIds.some(c => c === claimId)) {
byId[claimId] = claim;
}
myClaimIds.add(claimId);
@ -386,7 +363,6 @@ reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any):
return {
...state,
byId,
pendingById,
claimsByUri: byUri,
fetchingMyCollections: false,
myCollectionClaims: Array.from(myCollectionClaimsSet),
@ -479,8 +455,9 @@ reducers[ACTIONS.ABANDON_CLAIM_STARTED] = (state: State, action: any): State =>
reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State => {
const { claims: pendingClaims }: { claims: Array<Claim> } = action.data;
const byId = Object.assign({}, state.byId);
const pendingById = Object.assign({}, state.pendingById);
const byUri = Object.assign({}, state.claimsByUri);
const pendingIds = state.pendingIds;
const pendingIdSet = new Set(pendingIds);
let myClaimIds = new Set(state.myClaims);
const myChannelClaims = new Set(state.myChannelClaims);
@ -488,7 +465,7 @@ reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State =>
pendingClaims.forEach((claim: Claim) => {
let newClaim;
const { permanent_url: uri, claim_id: claimId, type, value_type: valueType } = claim;
pendingById[claimId] = claim; // make sure we don't need to merge?
pendingIdSet.add(claimId);
const oldClaim = byId[claimId];
if (oldClaim && oldClaim.canonical_url) {
newClaim = mergeClaim(oldClaim, claim);
@ -508,22 +485,21 @@ reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State =>
return Object.assign({}, state, {
myClaims: Array.from(myClaimIds),
byId,
pendingById,
myChannelClaims: Array.from(myChannelClaims),
claimsByUri: byUri,
pendingIds: Array.from(pendingIdSet),
});
};
reducers[ACTIONS.UPDATE_CONFIRMED_CLAIMS] = (state: State, action: any): State => {
const {
claims: confirmedClaims,
pending: pendingClaims,
}: { claims: Array<Claim>, pending: { [string]: Claim } } = action.data;
const { claims: confirmedClaims }: { claims: Array<Claim> } = action.data;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
//
const pendingIds = state.pendingIds;
const pendingIdSet = new Set(pendingIds);
confirmedClaims.forEach((claim: GenericClaim) => {
const { claim_id: claimId, type } = claim;
const { permanent_url: permanentUri, claim_id: claimId, type } = claim;
let newClaim = claim;
const oldClaim = byId[claimId];
if (oldClaim && oldClaim.canonical_url) {
@ -531,10 +507,11 @@ reducers[ACTIONS.UPDATE_CONFIRMED_CLAIMS] = (state: State, action: any): State =
}
if (type && type.match(/claim|update|channel/)) {
byId[claimId] = newClaim;
pendingIdSet.delete(claimId);
}
});
return Object.assign({}, state, {
pendingById: pendingClaims,
pendingIds: Array.from(pendingIdSet),
byId,
claimsByUri: byUri,
});

View file

@ -90,15 +90,8 @@ const collectionsReducer = handleActions(
[ACTIONS.COLLECTION_PENDING]: (state, action) => {
const { localId, claimId } = action.data;
const {
resolved: resolvedList,
edited: editList,
unpublished: unpublishedList,
pending: pendingList,
} = state;
const { edited: editList, unpublished: unpublishedList, pending: pendingList } = state;
const newEditList = Object.assign({}, editList);
const newResolvedList = Object.assign({}, resolvedList);
const newUnpublishedList = Object.assign({}, unpublishedList);
const newPendingList = Object.assign({}, pendingList);
@ -108,10 +101,7 @@ const collectionsReducer = handleActions(
delete newUnpublishedList[localId];
} else {
// edit update
newPendingList[claimId] = Object.assign(
{},
newEditList[claimId] || newResolvedList[claimId]
);
newPendingList[claimId] = Object.assign({}, newEditList[claimId]);
delete newEditList[claimId];
}
@ -168,15 +158,9 @@ const collectionsReducer = handleActions(
});
},
[ACTIONS.USER_STATE_POPULATE]: (state, action) => {
const {
builtinCollections,
savedCollections,
unpublishedCollections,
editedCollections,
} = action.data;
const { builtinCollections, savedCollections, unpublishedCollections } = action.data;
return {
...state,
edited: editedCollections || state.edited,
unpublished: unpublishedCollections || state.unpublished,
builtin: builtinCollections || state.builtin,
saved: savedCollections || state.saved,

View file

@ -22,11 +22,9 @@ type PublishState = {
thumbnail_url: string,
thumbnailPath: string,
uploadThumbnailStatus: string,
thumbnailError: ?boolean,
description: string,
language: string,
releaseTime: ?number,
releaseTimeEdited: ?number,
releaseTime: ?string,
channel: string,
channelId: ?string,
name: string,
@ -57,11 +55,9 @@ const defaultState: PublishState = {
thumbnail_url: '',
thumbnailPath: '',
uploadThumbnailStatus: THUMBNAIL_STATUSES.API_DOWN,
thumbnailError: undefined,
description: '',
language: '',
releaseTime: undefined,
releaseTimeEdited: undefined,
nsfw: false,
channel: CHANNEL_ANONYMOUS,
channelId: '',

View file

@ -1,5 +1,5 @@
// @flow
import { normalizeURI, parseURI } from 'lbryURI';
import { normalizeURI, buildURI, parseURI } from 'lbryURI';
import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
import { createSelector } from 'reselect';
import { isClaimNsfw, filterClaims } from 'util/claim';
@ -7,24 +7,11 @@ import * as CLAIM from 'constants/claim';
const selectState = state => state.claims || {};
export const selectById = createSelector(
export const selectClaimsById = createSelector(
selectState,
state => state.byId || {}
);
export const selectPendingClaimsById = createSelector(
selectState,
state => state.pendingById || {}
);
export const selectClaimsById = createSelector(
selectById,
selectPendingClaimsById,
(byId, pendingById) => {
return Object.assign(byId, pendingById); // do I need merged to keep metadata?
}
);
export const selectClaimIdsByUri = createSelector(
selectState,
state => state.claimsByUri || {}
@ -85,36 +72,29 @@ export const selectAllClaimsByChannel = createSelector(
export const selectPendingIds = createSelector(
selectState,
state => Object.keys(state.pendingById) || []
state => state.pendingIds || []
);
export const selectPendingClaims = createSelector(
selectPendingClaimsById,
pendingById => Object.values(pendingById)
selectPendingIds,
selectClaimsById,
(pendingIds, byId) => pendingIds.map(id => byId[id])
);
export const makeSelectClaimIsPending = (uri: string) =>
createSelector(
selectClaimIdsByUri,
selectPendingClaimsById,
(idsByUri, pendingById) => {
selectPendingIds,
(idsByUri, pendingIds) => {
const claimId = idsByUri[normalizeURI(uri)];
if (claimId) {
return Boolean(pendingById[claimId]);
return pendingIds.some(i => i === claimId);
}
return false;
}
);
export const makeSelectClaimIdIsPending = (claimId: string) =>
createSelector(
selectPendingClaimsById,
pendingById => {
return Boolean(pendingById[claimId]);
}
);
export const makeSelectClaimIdForUri = (uri: string) =>
createSelector(
selectClaimIdsByUri,
@ -165,7 +145,7 @@ export const makeSelectClaimForUri = (uri: string, returnRepost: boolean = true)
return {
...repostedClaim,
repost_url: normalizeURI(uri),
repost_url: uri,
repost_channel_url: channelUrl,
repost_bid_amount: claim && claim.meta && claim.meta.effective_amount,
};
@ -299,8 +279,8 @@ export const makeSelectMyPurchasesForPage = (query: ?string, page: number = 1) =
const end = Number(page) * Number(CLAIM.PAGE_SIZE);
return matchingFileInfos && matchingFileInfos.length
? matchingFileInfos
.slice(start, end)
.map(fileInfo => fileInfo.canonical_url || fileInfo.permanent_url)
.slice(start, end)
.map(fileInfo => fileInfo.canonical_url || fileInfo.permanent_url)
: [];
}
);
@ -338,7 +318,6 @@ export const makeSelectClaimsInChannelForPage = (uri: string, page?: number) =>
}
);
// THIS IS LEFT OVER FROM ONE TAB CHANNEL_CONTENT
export const makeSelectTotalClaimsInChannelSearch = (uri: string) =>
createSelector(
selectClaimsById,
@ -349,7 +328,6 @@ export const makeSelectTotalClaimsInChannelSearch = (uri: string) =>
}
);
// THIS IS LEFT OVER FROM ONE_TAB CHANNEL CONTENT
export const makeSelectTotalPagesInChannelSearch = (uri: string) =>
createSelector(
selectClaimsById,
@ -360,6 +338,21 @@ export const makeSelectTotalPagesInChannelSearch = (uri: string) =>
}
);
export const makeSelectClaimsInChannelForCurrentPageState = (uri: string) =>
createSelector(
selectClaimsById,
selectAllClaimsByChannel,
selectCurrentChannelPage,
(byId, allClaims, page) => {
const byChannel = allClaims[uri] || {};
const claimIds = byChannel[page || 1];
if (!claimIds) return claimIds;
return claimIds.map(claimId => byId[claimId]);
}
);
export const makeSelectMetadataForUri = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri),
@ -393,8 +386,8 @@ export const makeSelectDateForUri = (uri: string) =>
(claim.value.release_time
? claim.value.release_time * 1000
: claim.meta && claim.meta.creation_timestamp
? claim.meta.creation_timestamp * 1000
: null);
? claim.meta.creation_timestamp * 1000
: null);
if (!timestamp) {
return undefined;
}
@ -500,9 +493,7 @@ export const selectMyClaims = createSelector(
export const selectMyClaimsWithoutChannels = createSelector(
selectMyClaims,
myClaims =>
myClaims
.filter(claim => claim && !claim.name.match(/^@/))
.sort((a, b) => a.timestamp - b.timestamp)
myClaims.filter(claim => !claim.name.match(/^@/)).sort((a, b) => a.timestamp - b.timestamp)
);
export const selectMyClaimUrisWithoutChannels = createSelector(
@ -610,18 +601,31 @@ export const selectChannelClaimCounts = createSelector(
export const makeSelectPendingClaimForUri = (uri: string) =>
createSelector(
selectPendingClaimsById,
pendingById => {
selectPendingIds,
selectClaimsById,
(pending, claims) => {
let validUri;
let uriIsChannel;
let uriStreamName;
let uriChannelName;
try {
({ streamName: uriStreamName, channelName: uriChannelName } = parseURI(uri));
({
isChannel: uriIsChannel,
streamName: uriStreamName,
channelName: uriChannelName,
} = parseURI(uri));
validUri = true;
} catch (e) {
return null;
}
const pendingClaims = (Object.values(pendingById): any);
const matchingClaim = pendingClaims.find((claim: GenericClaim) => {
return claim.normalized_name === uriChannelName || claim.normalized_name === uriStreamName;
const pendingClaims = pending.map(id => claims[id]);
const matchingClaim = pendingClaims.find(claim => {
const { streamName, channelName, isChannel } = parseURI(claim.permanent_url);
if (isChannel) {
return channelName === uriChannelName;
} else {
return streamName === uriStreamName;
}
});
return matchingClaim || null;
}
@ -630,13 +634,13 @@ export const makeSelectPendingClaimForUri = (uri: string) =>
export const makeSelectTotalItemsForChannel = (uri: string) =>
createSelector(
selectChannelClaimCounts,
byUri => byUri && byUri[normalizeURI(uri)]
byUri => byUri && byUri[uri]
);
export const makeSelectTotalPagesForChannel = (uri: string, pageSize: number = 10) =>
createSelector(
selectChannelClaimCounts,
byUri => byUri && byUri[uri] && Math.ceil(byUri[normalizeURI(uri)] / pageSize)
byUri => byUri && byUri[uri] && Math.ceil(byUri[uri] / pageSize)
);
export const makeSelectNsfwCountFromUris = (uris: Array<string>) =>
@ -652,6 +656,27 @@ export const makeSelectNsfwCountFromUris = (uris: Array<string>) =>
}, 0)
);
export const makeSelectNsfwCountForChannel = (uri: string) =>
createSelector(
selectClaimsById,
selectAllClaimsByChannel,
selectCurrentChannelPage,
(byId, allClaims, page) => {
const byChannel = allClaims[uri] || {};
const claimIds = byChannel[page || 1];
if (!claimIds) return 0;
return claimIds.reduce((acc, claimId) => {
const claim = byId[claimId];
if (isClaimNsfw(claim)) {
return acc + 1;
}
return acc;
}, 0);
}
);
export const makeSelectOmittedCountForChannel = (uri: string) =>
createSelector(
makeSelectTotalItemsForChannel(uri),
@ -731,6 +756,14 @@ export const makeSelectTagsForUri = (uri: string) =>
}
);
export const makeSelectChannelTagsForUri = (uri: string) =>
createSelector(
makeSelectMetadataForUri(uri),
(metadata: ?GenericMetadata) => {
return (metadata && metadata.tags) || [];
}
);
export const selectFetchingClaimSearchByQuery = createSelector(
selectState,
state => state.fetchingClaimSearchByQuery || {}

View file

@ -1,11 +1,6 @@
// @flow
import fromEntries from '@ungap/from-entries';
import { createSelector } from 'reselect';
import {
selectMyCollectionIds,
makeSelectClaimForUri,
selectClaimsByUri,
} from 'redux/selectors/claims';
import { selectMyCollectionIds } from 'redux/selectors/claims';
import { parseURI } from 'lbryURI';
const selectState = (state: { collections: CollectionState }) => state.collections;
@ -80,7 +75,7 @@ export const selectMyPublishedCollections = createSelector(
selectMyCollectionIds,
(resolved, pending, edited, myIds) => {
// all resolved in myIds, plus those in pending and edited
const myPublishedCollections = fromEntries(
const myPublishedCollections = Object.fromEntries(
Object.entries(pending).concat(
Object.entries(resolved).filter(
([key, val]) =>
@ -101,7 +96,7 @@ export const selectMyPublishedCollections = createSelector(
export const selectMyPublishedMixedCollections = createSelector(
selectMyPublishedCollections,
published => {
const myCollections = fromEntries(
const myCollections = Object.fromEntries(
// $FlowFixMe
Object.entries(published).filter(([key, collection]) => {
// $FlowFixMe
@ -115,7 +110,7 @@ export const selectMyPublishedMixedCollections = createSelector(
export const selectMyPublishedPlaylistCollections = createSelector(
selectMyPublishedCollections,
published => {
const myCollections = fromEntries(
const myCollections = Object.fromEntries(
// $FlowFixMe
Object.entries(published).filter(([key, collection]) => {
// $FlowFixMe
@ -136,7 +131,7 @@ export const makeSelectMyPublishedCollectionForId = (id: string) =>
// selectResolvedCollections,
// selectSavedCollectionIds,
// (resolved, myIds) => {
// const mySavedCollections = fromEntries(
// const mySavedCollections = Object.fromEntries(
// Object.entries(resolved).filter(([key, val]) => myIds.includes(key))
// );
// return mySavedCollections;
@ -159,33 +154,11 @@ export const makeSelectCollectionForId = (id: string) =>
selectMyEditedCollections,
selectPendingCollections,
(bLists, rLists, uLists, eLists, pLists) => {
const collection = bLists[id] || uLists[id] || eLists[id] || pLists[id] || rLists[id];
const collection = bLists[id] || uLists[id] || eLists[id] || rLists[id] || pLists[id];
return collection;
}
);
export const makeSelectClaimUrlInCollection = (url: string) =>
createSelector(
selectBuiltinCollections,
selectMyPublishedCollections,
selectMyUnpublishedCollections,
selectMyEditedCollections,
selectPendingCollections,
(bLists, myRLists, uLists, eLists, pLists) => {
const collections = [bLists, uLists, eLists, myRLists, pLists];
const itemsInCollections = [];
collections.map(list => {
Object.entries(list).forEach(([key, value]) => {
// $FlowFixMe
value.items.map(item => {
itemsInCollections.push(item);
});
});
});
return itemsInCollections.includes(url);
}
);
export const makeSelectCollectionForIdHasClaimUrl = (id: string, url: string) =>
createSelector(
makeSelectCollectionForId(id),
@ -213,72 +186,28 @@ export const makeSelectClaimIdsForCollectionId = (id: string) =>
export const makeSelectIndexForUrlInCollection = (url: string, id: string) =>
createSelector(
state => state.content.shuffleList,
makeSelectUrlsForCollectionId(id),
makeSelectClaimForUri(url),
(shuffleState, urls, claim) => {
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
const listUrls = shuffleUrls || urls;
const index = listUrls && listUrls.findIndex(u => u === url);
urls => {
const index = urls && urls.findIndex(u => u === url);
if (index > -1) {
return index;
} else if (claim) {
const index = listUrls && listUrls.findIndex(u => u === claim.permanent_url);
if (index > -1) return index;
return claim;
}
return null;
}
);
export const makeSelectPreviousUrlForCollectionAndUrl = (id: string, url: string) =>
createSelector(
state => state.content.shuffleList,
state => state.content.loopList,
makeSelectIndexForUrlInCollection(url, id),
makeSelectUrlsForCollectionId(id),
(shuffleState, loopState, index, urls) => {
const loopList = loopState && loopState.collectionId === id && loopState.loop;
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
if (index > -1) {
const listUrls = shuffleUrls || urls;
let nextUrl;
if (index === 0 && loopList) {
nextUrl = listUrls[listUrls.length - 1];
} else {
nextUrl = listUrls[index - 1];
}
return nextUrl || null;
} else {
return null;
}
}
);
export const makeSelectNextUrlForCollectionAndUrl = (id: string, url: string) =>
createSelector(
state => state.content.shuffleList,
state => state.content.loopList,
makeSelectIndexForUrlInCollection(url, id),
makeSelectUrlsForCollectionId(id),
(shuffleState, loopState, index, urls) => {
const loopList = loopState && loopState.collectionId === id && loopState.loop;
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
if (index > -1) {
const listUrls = shuffleUrls || urls;
// We'll get the next playble url
let remainingUrls = listUrls.slice(index + 1);
if (!remainingUrls.length && loopList) {
remainingUrls = listUrls.slice(0);
(index, urls) => {
if (urls && index >= -1) {
const url = urls[index + 1];
if (url) {
return url;
}
const nextUrl = remainingUrls && remainingUrls[0];
return nextUrl || null;
} else {
return null;
}
return null;
}
);
@ -298,13 +227,7 @@ export const makeSelectCountForCollectionId = (id: string) =>
if (collection.itemCount !== undefined) {
return collection.itemCount;
}
let itemCount = 0;
collection.items.map(item => {
if (item) {
itemCount += 1;
}
});
return itemCount;
return collection.items.length;
}
return null;
}

View file

@ -43,8 +43,7 @@ export const selectPublishFormValues = createSelector(
state => state.settings,
selectIsStillEditing,
(publishState, settingsState, isStillEditing) => {
const { languages, ...formValues } = publishState;
const language = languages && languages.length && languages[0];
const { pendingPublish, language, ...formValues } = publishState;
const { clientSettings } = settingsState;
const { language: languageSet } = clientSettings;

View file

@ -1411,11 +1411,6 @@
dependencies:
"@types/yargs-parser" "*"
"@ungap/from-entries@^0.2.1":
version "0.2.1"
resolved "https://registry.yarnpkg.com/@ungap/from-entries/-/from-entries-0.2.1.tgz#7e86196b8b2e99d73106a8f25c2a068326346354"
integrity sha512-CAqefTFAfnUPwYqsWHXpOxHaq1Zo5UQ3m9Zm2p09LggGe57rqHoBn3c++xcoomzXKynAUuiBMDUCQvKMnXjUpA==
abab@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/abab/-/abab-1.0.4.tgz#5faad9c2c07f60dd76770f71cf025b62a63cfd4e"