Merge pull request #325 from lbryio/pendingChannels

support pending channels
This commit is contained in:
jessopb 2020-06-19 12:15:38 -04:00 committed by GitHub
commit d00744a8b5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 467 additions and 488 deletions

597
dist/bundle.es.js vendored

File diff suppressed because it is too large Load diff

View file

@ -22,7 +22,7 @@ declare type GenericClaim = {
timestamp?: number, // date of last transaction
height: number, // block height the tx was confirmed
is_channel_signature_valid?: boolean,
is_my_output: true,
is_my_output: boolean,
name: string,
normalized_name: string, // `name` normalized via unicode NFD spec,
nout: number, // index number for an output of a tx

2
flow-typed/Claim.js vendored
View file

@ -22,7 +22,7 @@ declare type GenericClaim = {
timestamp?: number, // date of last transaction
height: number, // block height the tx was confirmed
is_channel_signature_valid?: boolean,
is_my_output: true,
is_my_output: boolean,
name: string,
normalized_name: string, // `name` normalized via unicode NFD spec,
nout: number, // index number for an output of a tx

View file

@ -76,6 +76,7 @@ export {
doClearRepostError,
doCheckPublishNameAvailability,
doPurchaseList,
doCheckPendingClaims,
} from 'redux/actions/claims';
export { doClearPurchasedUriSuccess, doPurchaseUri, doFileGet } from 'redux/actions/file';
@ -94,7 +95,6 @@ export {
doUploadThumbnail,
doPrepareEdit,
doPublish,
doCheckPendingPublishes,
doCheckReflectingFiles,
} from 'redux/actions/publish';
@ -198,7 +198,6 @@ export {
makeSelectFirstRecommendedFileForUri,
makeSelectChannelForClaimUri,
makeSelectClaimIsPending,
makeSelectPendingByUri,
makeSelectReflectingClaimForUri,
makeSelectClaimsInChannelForCurrentPageState,
makeSelectShortUrlForUri,
@ -207,7 +206,6 @@ export {
makeSelectSupportsForUri,
makeSelectMyPurchasesForPage,
makeSelectClaimWasPurchased,
selectPendingById,
selectReflectingById,
selectClaimsById,
selectClaimsByUri,
@ -217,9 +215,9 @@ export {
selectMyActiveClaims,
selectAllFetchingChannelClaims,
selectIsFetchingClaimListMine,
selectPendingClaims,
selectMyClaims,
selectMyClaimsWithoutChannels,
selectMyChannelUrls,
selectMyClaimUrisWithoutChannels,
selectAllMyClaimsByOutpoint,
selectMyClaimsOutpoints,

View file

@ -9,6 +9,7 @@ import {
selectResolvingUris,
selectClaimsByUri,
selectMyChannelClaims,
selectPendingIds,
} from 'redux/selectors/claims';
import { doFetchTxoPage } from 'redux/actions/wallet';
import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
@ -338,7 +339,7 @@ export function doFetchClaimsByChannel(uri: string, page: number = 1) {
};
}
export function doCreateChannel(name: string, amount: number, optionalParams: any) {
export function doCreateChannel(name: string, amount: number, optionalParams: any, cb: any) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.CREATE_CHANNEL_STARTED,
@ -395,6 +396,13 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
type: ACTIONS.CREATE_CHANNEL_COMPLETED,
data: { channelClaim },
});
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [channelClaim],
},
});
dispatch(doCheckPendingClaims(cb));
return channelClaim;
})
.catch(error => {
@ -408,7 +416,7 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
};
}
export function doUpdateChannel(params: any) {
export function doUpdateChannel(params: any, cb: any) {
return (dispatch: Dispatch, getState: GetState) => {
dispatch({
type: ACTIONS.UPDATE_CHANNEL_STARTED,
@ -454,7 +462,16 @@ export function doUpdateChannel(params: any) {
type: ACTIONS.UPDATE_CHANNEL_COMPLETED,
data: { channelClaim },
});
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [channelClaim],
},
});
dispatch(doCheckPendingClaims(cb));
return Boolean(result.outputs[0]);
})
.then()
.catch(error => {
dispatch({
type: ACTIONS.UPDATE_CHANNEL_FAILED,
@ -669,3 +686,48 @@ export function doPurchaseList(page: number = 1, pageSize: number = PAGE_SIZE) {
}).then(success, failure);
};
}
export const doCheckPendingClaims = (onConfirmed: Function) => (
dispatch: Dispatch,
getState: GetState
) => {
let claimCheckInterval;
const checkClaimList = () => {
const state = getState();
const pendingIdSet = new Set(selectPendingIds(state));
Lbry.claim_list({ page: 1, page_size: 10 })
.then(result => {
const claims = result.items;
const claimsToConfirm = [];
claims.forEach(claim => {
const { claim_id: claimId } = claim;
if (claim.confirmations > 0 && pendingIdSet.has(claimId)) {
pendingIdSet.delete(claimId);
claimsToConfirm.push(claim);
if (onConfirmed) {
onConfirmed(claim);
}
}
});
if (claimsToConfirm.length) {
dispatch({
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
data: {
claims: claimsToConfirm,
},
});
}
return pendingIdSet.size;
})
.then(len => {
if (!len) {
clearInterval(claimCheckInterval);
}
});
};
claimCheckInterval = setInterval(() => {
checkClaimList();
}, 30000);
};

View file

@ -10,7 +10,6 @@ import { doError } from 'redux/actions/notifications';
import { isClaimNsfw } from 'util/claim';
import {
selectMyChannelClaims,
selectPendingById,
selectMyClaimsWithoutChannels,
selectReflectingById,
} from 'redux/selectors/claims';
@ -427,46 +426,3 @@ export const doCheckReflectingFiles = () => (dispatch: Dispatch, getState: GetSt
}, 5000);
}
};
export const doCheckPendingPublishes = (onConfirmed: Function) => (
dispatch: Dispatch,
getState: GetState
) => {
let publishCheckInterval;
const checkFileList = () => {
const state = getState();
const pendingById = selectPendingById(state);
Lbry.claim_list({ page: 1, page_size: 10 })
.then(result => {
const claims = result.items;
const claimsToConfirm = [];
claims.forEach(claim => {
if (claim.confirmations > 0 && pendingById[claim.claim_id]) {
delete pendingById[claim.claim_id];
claimsToConfirm.push(claim);
if (onConfirmed) {
onConfirmed(claim);
}
}
});
if (claimsToConfirm.length) {
dispatch({
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
data: {
claims: claimsToConfirm,
},
});
}
return Object.keys(pendingById).length;
})
.then(len => {
if (!len) {
clearInterval(publishCheckInterval);
}
});
};
publishCheckInterval = setInterval(() => {
checkFileList();
}, 30000);
};

View file

@ -9,7 +9,7 @@
// - Sean
import * as ACTIONS from 'constants/action_types';
import { buildURI, parseURI } from 'lbryURI';
import mergeClaim from 'util/merge-claim';
type State = {
createChannelError: ?string,
@ -17,7 +17,7 @@ type State = {
claimsByUri: { [string]: string },
byId: { [string]: Claim },
resolvingUris: Array<string>,
pendingById: { [string]: Claim },
pendingIds: Array<string>,
reflectingById: { [string]: ReflectingUpdate },
myClaims: ?Array<string>,
myChannelClaims: ?Array<string>,
@ -75,7 +75,7 @@ const defaultState = {
fetchingMyPurchasesError: undefined,
fetchingMyChannels: false,
abandoningById: {},
pendingById: {},
pendingIds: [],
reflectingById: {},
claimSearchError: false,
claimSearchByQuery: {},
@ -112,18 +112,19 @@ function handleClaimAction(state: State, action: any): State {
const byUri = Object.assign({}, state.claimsByUri);
const byId = Object.assign({}, state.byId);
const channelClaimCounts = Object.assign({}, state.channelClaimCounts);
const pendingIds = state.pendingIds;
let newResolvingUrls = new Set(state.resolvingUris);
Object.entries(resolveInfo).forEach(([url: string, resolveResponse: ResolveResponse]) => {
// $FlowFixMe
const { claimsInChannel, stream, channel } = resolveResponse;
if (claimsInChannel) {
channelClaimCounts[url] = claimsInChannel;
channelClaimCounts[channel.canonical_url] = claimsInChannel;
}
if (stream) {
byId[stream.claim_id] = stream;
if (pendingIds.includes(stream.claim_id)) {
byId[stream.claim_id] = mergeClaim(stream, byId[stream.claim_id]);
} else {
byId[stream.claim_id] = stream;
}
byUri[url] = stream.claim_id;
// If url isn't a canonical_url, make sure that is added too
@ -135,12 +136,18 @@ function handleClaimAction(state: State, action: any): State {
newResolvingUrls.delete(stream.permanent_url);
}
if (channel) {
if (!stream) {
byUri[url] = channel.claim_id;
if (channel && channel.claim_id) {
if (claimsInChannel) {
channelClaimCounts[url] = claimsInChannel;
channelClaimCounts[channel.canonical_url] = claimsInChannel;
}
byUri[url] = channel.claim_id;
byId[channel.claim_id] = channel;
if (pendingIds.includes(channel.claim_id)) {
byId[channel.claim_id] = mergeClaim(channel, byId[channel.claim_id]);
} else {
byId[channel.claim_id] = channel;
}
// Also add the permanent_url here until lighthouse returns canonical_url for search results
byUri[channel.permanent_url] = channel.claim_id;
byUri[channel.canonical_url] = channel.claim_id;
@ -198,47 +205,37 @@ reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any):
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
const pendingById: { [string]: Claim } = Object.assign({}, state.pendingById);
const pendingIds = state.pendingIds || [];
let myClaimIds = new Set(state.myClaims);
let urlsForCurrentPage = [];
const pendingIdSet = new Set(pendingIds);
claims.forEach((claim: Claim) => {
const uri = buildURI({ streamName: claim.name, streamClaimId: claim.claim_id });
const { claim_id: claimId } = claim;
const { permanent_url: permanentUri, claim_id: claimId } = claim;
if (claim.type && claim.type.match(/claim|update/)) {
urlsForCurrentPage.push(uri);
urlsForCurrentPage.push(permanentUri);
if (claim.confirmations < 1) {
pendingById[claimId] = claim;
delete byId[claimId];
delete byUri[claimId];
pendingIdSet.add(claimId);
} else if (!resolve && pendingIdSet.has(claimId) && claim.confirmations > 0) {
pendingIdSet.delete(claimId);
}
if (pendingIds.includes(claimId)) {
byId[claimId] = mergeClaim(claim, byId[claimId]);
} else {
byId[claimId] = claim;
byUri[uri] = claimId;
}
byUri[permanentUri] = claimId;
myClaimIds.add(claimId);
if (!resolve && pendingById[claimId] && claim.confirmations > 0) {
delete pendingById[claimId];
}
}
});
// Remove old pending publishes if resolve if false (resolve=true means confirmations on updates are not 0)
if (!resolve) {
Object.values(pendingById)
// $FlowFixMe
.filter(pendingClaim => byId[pendingClaim.claim_id])
.forEach(pendingClaim => {
// $FlowFixMe
delete pendingById[pendingClaim.claim_id];
});
}
return Object.assign({}, state, {
isFetchingClaimListMine: false,
myClaims: Array.from(myClaimIds),
byId,
pendingIds: Array.from(pendingIdSet),
claimsByUri: byUri,
pendingById,
myClaimsPageResults: urlsForCurrentPage,
myClaimsPageNumber: page,
myClaimsPageTotalResults: totalItems,
@ -252,7 +249,7 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
const { claims }: { claims: Array<ChannelClaim> } = action.data;
const myClaims = state.myClaims || [];
let myClaimIds = new Set(state.myClaims);
const pendingById = Object.assign(state.pendingById);
const pendingIds = state.pendingIds || [];
let myChannelClaims;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
@ -275,18 +272,10 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
// $FlowFixMe
myChannelClaims.add(claimId);
if (!byId[claimId]) {
if (!pendingIds.some(c => c === claimId)) {
byId[claimId] = claim;
}
myClaimIds.add(claimId);
if (pendingById[claimId] && claim.confirmations > 0) {
delete pendingById[claimId];
}
if (pendingById[claimId] && claim.confirmations > 0) {
delete pendingById[claimId];
}
});
}
@ -296,7 +285,7 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
channelClaimCounts,
fetchingMyChannels: false,
myChannelClaims: myChannelClaims ? Array.from(myChannelClaims) : null,
myClaims: Array.from(myClaimIds),
myClaims: myClaimIds ? Array.from(myClaimIds) : null,
});
};
@ -385,19 +374,31 @@ reducers[ACTIONS.ABANDON_CLAIM_STARTED] = (state: State, action: any): State =>
};
reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State => {
const { claims }: { claims: Array<Claim> } = action.data;
const { claims: pendingClaims }: { claims: Array<Claim> } = action.data;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
const pendingById: { [string]: Claim } = Object.assign({}, state.pendingById);
const pendingIds = state.pendingIds;
const pendingIdSet = new Set(pendingIds);
let myClaimIds = new Set(state.myClaims);
const myChannelClaims = new Set(state.myChannelClaims);
// $FlowFixMe
claims.forEach((claim: Claim) => {
const uri = buildURI({ streamName: claim.name, streamClaimId: claim.claim_id });
const { claim_id: claimId } = claim;
if (claim.type && claim.type.match(/claim|update/)) {
pendingById[claimId] = claim;
delete byId[claimId];
pendingClaims.forEach((claim: Claim) => {
let newClaim;
const { permanent_url: uri, claim_id: claimId, type, value_type: valueType } = claim;
pendingIdSet.add(claimId);
const oldClaim = byId[claimId];
if (oldClaim && oldClaim.canonical_url) {
newClaim = mergeClaim(oldClaim, claim);
} else {
newClaim = claim;
}
if (valueType === 'channel') {
myChannelClaims.add(claimId);
}
if (type && type.match(/claim|update/)) {
byId[claimId] = newClaim;
byUri[uri] = claimId;
}
myClaimIds.add(claimId);
@ -405,32 +406,35 @@ reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State =>
return Object.assign({}, state, {
myClaims: Array.from(myClaimIds),
byId,
myChannelClaims: Array.from(myChannelClaims),
claimsByUri: byUri,
pendingById,
pendingIds: Array.from(pendingIdSet),
});
};
reducers[ACTIONS.UPDATE_CONFIRMED_CLAIMS] = (state: State, action: any): State => {
const { claims }: { claims: Array<Claim> } = action.data;
const { claims: confirmedClaims }: { claims: Array<Claim> } = action.data;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
const pendingById: { [string]: Claim } = Object.assign({}, state.pendingById);
let myClaimIds = new Set(state.myClaims);
const pendingIds = state.pendingIds;
const pendingIdSet = new Set(pendingIds);
claims.forEach((claim: GenericClaim) => {
const uri = buildURI({ streamName: claim.name, streamClaimId: claim.claim_id });
const { claim_id: claimId } = claim;
if (claim.type && claim.type.match(/claim|update/)) {
delete pendingById[claimId];
byId[claimId] = claim;
confirmedClaims.forEach((claim: GenericClaim) => {
const { permanent_url: permanentUri, claim_id: claimId, type } = claim;
let newClaim = claim;
const oldClaim = byId[claimId];
if (oldClaim && oldClaim.canonical_url) {
newClaim = mergeClaim(oldClaim, claim);
}
if (type && type.match(/claim|update|channel/)) {
byId[claimId] = newClaim;
pendingIdSet.delete(claimId);
}
myClaimIds.add(claimId);
});
return Object.assign({}, state, {
myClaims: Array.from(myClaimIds),
pendingIds: Array.from(pendingIdSet),
byId,
claimsByUri: byUri,
pendingById,
});
};
@ -466,19 +470,7 @@ reducers[ACTIONS.CREATE_CHANNEL_STARTED] = (state: State): State => ({
});
reducers[ACTIONS.CREATE_CHANNEL_COMPLETED] = (state: State, action: any): State => {
const channelClaim: ChannelClaim = action.data.channelClaim;
const byId = Object.assign({}, state.byId);
const pendingById = Object.assign({}, state.pendingById);
const myChannelClaims = new Set(state.myChannelClaims);
byId[channelClaim.claim_id] = channelClaim;
pendingById[channelClaim.claim_id] = channelClaim;
myChannelClaims.add(channelClaim.claim_id);
return Object.assign({}, state, {
byId,
pendingById,
myChannelClaims: Array.from(myChannelClaims),
creatingChannel: false,
});
};
@ -498,13 +490,7 @@ reducers[ACTIONS.UPDATE_CHANNEL_STARTED] = (state: State, action: any): State =>
};
reducers[ACTIONS.UPDATE_CHANNEL_COMPLETED] = (state: State, action: any): State => {
const channelClaim: ChannelClaim = action.data.channelClaim;
const byId = Object.assign({}, state.byId);
byId[channelClaim.claim_id] = channelClaim;
return Object.assign({}, state, {
byId,
updateChannelError: '',
updatingChannel: false,
});

View file

@ -6,7 +6,7 @@ import {
} from 'redux/selectors/search';
import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
import { createSelector } from 'reselect';
import { isClaimNsfw, createNormalizedClaimSearchKey, filterClaims } from 'util/claim';
import { isClaimNsfw, filterClaims } from 'util/claim';
import { getSearchQueryString } from 'util/query-params';
import { PAGE_SIZE } from 'constants/claim';
@ -48,10 +48,9 @@ export const selectRepostError = createSelector(
);
export const selectClaimsByUri = createSelector(
selectState,
selectClaimIdsByUri,
selectClaimsById,
(state, byId) => {
const byUri = state.claimsByUri || {};
(byUri, byId) => {
const claims = {};
Object.keys(byUri).forEach(uri => {
@ -76,42 +75,25 @@ export const selectAllClaimsByChannel = createSelector(
state => state.paginatedClaimsByChannel || {}
);
export const selectPendingById = createSelector(
export const selectPendingIds = createSelector(
selectState,
state => state.pendingById || {}
);
export const selectPendingClaims = createSelector(
selectState,
state => Object.values(state.pendingById || [])
state => state.pendingIds || []
);
export const makeSelectClaimIsPending = (uri: string) =>
createSelector(
selectPendingById,
pendingById => {
let claimId;
try {
const { isChannel, channelClaimId, streamClaimId } = parseURI(uri);
claimId = isChannel ? channelClaimId : streamClaimId;
} catch (e) {}
selectClaimIdsByUri,
selectPendingIds,
(idsByUri, pendingIds) => {
const claimId = idsByUri[normalizeURI(uri)];
if (claimId) {
return Boolean(pendingById[claimId]);
return pendingIds.some(i => i === claimId);
}
return false;
}
);
export const makeSelectPendingByUri = (uri: string) =>
createSelector(
selectPendingById,
pendingById => {
const { isChannel, channelClaimId, streamClaimId } = parseURI(uri);
const claimId = isChannel ? channelClaimId : streamClaimId;
return pendingById[claimId];
}
);
export const selectReflectingById = createSelector(
selectState,
state => state.reflectingById
@ -119,30 +101,21 @@ export const selectReflectingById = createSelector(
export const makeSelectClaimForUri = (uri: string, returnRepost: boolean = true) =>
createSelector(
selectClaimsByUri,
selectPendingById,
(byUri, pendingById) => {
// Check if a claim is pending first
// It won't be in claimsByUri because resolving it will return nothing
let valid;
selectClaimIdsByUri,
selectClaimsById,
(byUri, byId) => {
let validUri;
let channelClaimId;
let streamClaimId;
let isChannel;
try {
({ isChannel, channelClaimId, streamClaimId } = parseURI(uri));
valid = true;
validUri = true;
} catch (e) {}
if (valid && byUri) {
const claimId = isChannel ? channelClaimId : streamClaimId;
const pendingClaim = pendingById[claimId];
if (pendingClaim) {
return pendingClaim;
}
const claim = byUri[normalizeURI(uri)];
if (validUri && byUri) {
const claimId = uri && byUri[normalizeURI(uri)];
const claim = byId[claimId];
if (claim === undefined || claim === null) {
// Make sure to return the claim as is so apps can check if it's been resolved before (null) or still needs to be resolved (undefined)
return claim;
@ -456,8 +429,7 @@ export const selectMyClaims = createSelector(
selectMyActiveClaims,
selectClaimsById,
selectAbandoningIds,
selectPendingClaims,
(myClaimIds, byId, abandoningIds, pendingClaims) => {
(myClaimIds, byId, abandoningIds) => {
const claims = [];
myClaimIds.forEach(id => {
@ -466,7 +438,7 @@ export const selectMyClaims = createSelector(
if (claim && abandoningIds.indexOf(id) === -1) claims.push(claim);
});
return [...claims, ...pendingClaims];
return [...claims];
}
);
@ -538,6 +510,11 @@ export const selectMyChannelClaims = createSelector(
}
);
export const selectMyChannelUrls = createSelector(
selectMyChannelClaims,
claims => claims ? claims.map(claim => claim.canonical_url || claim.permanent_url) : undefined
);
export const selectResolvingUris = createSelector(
selectState,
state => state.resolvingUris || []

7
src/util/merge-claim.js Normal file
View file

@ -0,0 +1,7 @@
/*
new claim = { ...maybeResolvedClaim, ...pendingClaim, meta: maybeResolvedClaim['meta'] }
*/
export default function mergeClaims(maybeResolved, pending){
return { ...maybeResolved, ...pending, meta: maybeResolved.meta };
}