Merge pull request #326 from lbryio/revert-325-pendingChannels

Revert "support pending channels"
This commit is contained in:
jessopb 2020-06-19 13:11:44 -04:00 committed by GitHub
commit 273090d42f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 488 additions and 467 deletions

597
dist/bundle.es.js vendored

File diff suppressed because it is too large Load diff

View file

@ -22,7 +22,7 @@ declare type GenericClaim = {
timestamp?: number, // date of last transaction
height: number, // block height the tx was confirmed
is_channel_signature_valid?: boolean,
is_my_output: boolean,
is_my_output: true,
name: string,
normalized_name: string, // `name` normalized via unicode NFD spec,
nout: number, // index number for an output of a tx

2
flow-typed/Claim.js vendored
View file

@ -22,7 +22,7 @@ declare type GenericClaim = {
timestamp?: number, // date of last transaction
height: number, // block height the tx was confirmed
is_channel_signature_valid?: boolean,
is_my_output: boolean,
is_my_output: true,
name: string,
normalized_name: string, // `name` normalized via unicode NFD spec,
nout: number, // index number for an output of a tx

View file

@ -76,7 +76,6 @@ export {
doClearRepostError,
doCheckPublishNameAvailability,
doPurchaseList,
doCheckPendingClaims,
} from 'redux/actions/claims';
export { doClearPurchasedUriSuccess, doPurchaseUri, doFileGet } from 'redux/actions/file';
@ -95,6 +94,7 @@ export {
doUploadThumbnail,
doPrepareEdit,
doPublish,
doCheckPendingPublishes,
doCheckReflectingFiles,
} from 'redux/actions/publish';
@ -198,6 +198,7 @@ export {
makeSelectFirstRecommendedFileForUri,
makeSelectChannelForClaimUri,
makeSelectClaimIsPending,
makeSelectPendingByUri,
makeSelectReflectingClaimForUri,
makeSelectClaimsInChannelForCurrentPageState,
makeSelectShortUrlForUri,
@ -206,6 +207,7 @@ export {
makeSelectSupportsForUri,
makeSelectMyPurchasesForPage,
makeSelectClaimWasPurchased,
selectPendingById,
selectReflectingById,
selectClaimsById,
selectClaimsByUri,
@ -215,9 +217,9 @@ export {
selectMyActiveClaims,
selectAllFetchingChannelClaims,
selectIsFetchingClaimListMine,
selectPendingClaims,
selectMyClaims,
selectMyClaimsWithoutChannels,
selectMyChannelUrls,
selectMyClaimUrisWithoutChannels,
selectAllMyClaimsByOutpoint,
selectMyClaimsOutpoints,

View file

@ -9,7 +9,6 @@ import {
selectResolvingUris,
selectClaimsByUri,
selectMyChannelClaims,
selectPendingIds,
} from 'redux/selectors/claims';
import { doFetchTxoPage } from 'redux/actions/wallet';
import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
@ -339,7 +338,7 @@ export function doFetchClaimsByChannel(uri: string, page: number = 1) {
};
}
export function doCreateChannel(name: string, amount: number, optionalParams: any, cb: any) {
export function doCreateChannel(name: string, amount: number, optionalParams: any) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.CREATE_CHANNEL_STARTED,
@ -396,13 +395,6 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
type: ACTIONS.CREATE_CHANNEL_COMPLETED,
data: { channelClaim },
});
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [channelClaim],
},
});
dispatch(doCheckPendingClaims(cb));
return channelClaim;
})
.catch(error => {
@ -416,7 +408,7 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
};
}
export function doUpdateChannel(params: any, cb: any) {
export function doUpdateChannel(params: any) {
return (dispatch: Dispatch, getState: GetState) => {
dispatch({
type: ACTIONS.UPDATE_CHANNEL_STARTED,
@ -462,16 +454,7 @@ export function doUpdateChannel(params: any, cb: any) {
type: ACTIONS.UPDATE_CHANNEL_COMPLETED,
data: { channelClaim },
});
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [channelClaim],
},
});
dispatch(doCheckPendingClaims(cb));
return Boolean(result.outputs[0]);
})
.then()
.catch(error => {
dispatch({
type: ACTIONS.UPDATE_CHANNEL_FAILED,
@ -686,48 +669,3 @@ export function doPurchaseList(page: number = 1, pageSize: number = PAGE_SIZE) {
}).then(success, failure);
};
}
export const doCheckPendingClaims = (onConfirmed: Function) => (
dispatch: Dispatch,
getState: GetState
) => {
let claimCheckInterval;
const checkClaimList = () => {
const state = getState();
const pendingIdSet = new Set(selectPendingIds(state));
Lbry.claim_list({ page: 1, page_size: 10 })
.then(result => {
const claims = result.items;
const claimsToConfirm = [];
claims.forEach(claim => {
const { claim_id: claimId } = claim;
if (claim.confirmations > 0 && pendingIdSet.has(claimId)) {
pendingIdSet.delete(claimId);
claimsToConfirm.push(claim);
if (onConfirmed) {
onConfirmed(claim);
}
}
});
if (claimsToConfirm.length) {
dispatch({
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
data: {
claims: claimsToConfirm,
},
});
}
return pendingIdSet.size;
})
.then(len => {
if (!len) {
clearInterval(claimCheckInterval);
}
});
};
claimCheckInterval = setInterval(() => {
checkClaimList();
}, 30000);
};

View file

@ -10,6 +10,7 @@ import { doError } from 'redux/actions/notifications';
import { isClaimNsfw } from 'util/claim';
import {
selectMyChannelClaims,
selectPendingById,
selectMyClaimsWithoutChannels,
selectReflectingById,
} from 'redux/selectors/claims';
@ -426,3 +427,46 @@ export const doCheckReflectingFiles = () => (dispatch: Dispatch, getState: GetSt
}, 5000);
}
};
export const doCheckPendingPublishes = (onConfirmed: Function) => (
dispatch: Dispatch,
getState: GetState
) => {
let publishCheckInterval;
const checkFileList = () => {
const state = getState();
const pendingById = selectPendingById(state);
Lbry.claim_list({ page: 1, page_size: 10 })
.then(result => {
const claims = result.items;
const claimsToConfirm = [];
claims.forEach(claim => {
if (claim.confirmations > 0 && pendingById[claim.claim_id]) {
delete pendingById[claim.claim_id];
claimsToConfirm.push(claim);
if (onConfirmed) {
onConfirmed(claim);
}
}
});
if (claimsToConfirm.length) {
dispatch({
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
data: {
claims: claimsToConfirm,
},
});
}
return Object.keys(pendingById).length;
})
.then(len => {
if (!len) {
clearInterval(publishCheckInterval);
}
});
};
publishCheckInterval = setInterval(() => {
checkFileList();
}, 30000);
};

View file

@ -9,7 +9,7 @@
// - Sean
import * as ACTIONS from 'constants/action_types';
import mergeClaim from 'util/merge-claim';
import { buildURI, parseURI } from 'lbryURI';
type State = {
createChannelError: ?string,
@ -17,7 +17,7 @@ type State = {
claimsByUri: { [string]: string },
byId: { [string]: Claim },
resolvingUris: Array<string>,
pendingIds: Array<string>,
pendingById: { [string]: Claim },
reflectingById: { [string]: ReflectingUpdate },
myClaims: ?Array<string>,
myChannelClaims: ?Array<string>,
@ -75,7 +75,7 @@ const defaultState = {
fetchingMyPurchasesError: undefined,
fetchingMyChannels: false,
abandoningById: {},
pendingIds: [],
pendingById: {},
reflectingById: {},
claimSearchError: false,
claimSearchByQuery: {},
@ -112,19 +112,18 @@ function handleClaimAction(state: State, action: any): State {
const byUri = Object.assign({}, state.claimsByUri);
const byId = Object.assign({}, state.byId);
const channelClaimCounts = Object.assign({}, state.channelClaimCounts);
const pendingIds = state.pendingIds;
let newResolvingUrls = new Set(state.resolvingUris);
Object.entries(resolveInfo).forEach(([url: string, resolveResponse: ResolveResponse]) => {
// $FlowFixMe
const { claimsInChannel, stream, channel } = resolveResponse;
if (claimsInChannel) {
channelClaimCounts[url] = claimsInChannel;
channelClaimCounts[channel.canonical_url] = claimsInChannel;
}
if (stream) {
if (pendingIds.includes(stream.claim_id)) {
byId[stream.claim_id] = mergeClaim(stream, byId[stream.claim_id]);
} else {
byId[stream.claim_id] = stream;
}
byId[stream.claim_id] = stream;
byUri[url] = stream.claim_id;
// If url isn't a canonical_url, make sure that is added too
@ -136,18 +135,12 @@ function handleClaimAction(state: State, action: any): State {
newResolvingUrls.delete(stream.permanent_url);
}
if (channel && channel.claim_id) {
if (claimsInChannel) {
channelClaimCounts[url] = claimsInChannel;
channelClaimCounts[channel.canonical_url] = claimsInChannel;
if (channel) {
if (!stream) {
byUri[url] = channel.claim_id;
}
byUri[url] = channel.claim_id;
if (pendingIds.includes(channel.claim_id)) {
byId[channel.claim_id] = mergeClaim(channel, byId[channel.claim_id]);
} else {
byId[channel.claim_id] = channel;
}
byId[channel.claim_id] = channel;
// Also add the permanent_url here until lighthouse returns canonical_url for search results
byUri[channel.permanent_url] = channel.claim_id;
byUri[channel.canonical_url] = channel.claim_id;
@ -205,37 +198,47 @@ reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any):
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
const pendingIds = state.pendingIds || [];
const pendingById: { [string]: Claim } = Object.assign({}, state.pendingById);
let myClaimIds = new Set(state.myClaims);
let urlsForCurrentPage = [];
const pendingIdSet = new Set(pendingIds);
claims.forEach((claim: Claim) => {
const { permanent_url: permanentUri, claim_id: claimId } = claim;
const uri = buildURI({ streamName: claim.name, streamClaimId: claim.claim_id });
const { claim_id: claimId } = claim;
if (claim.type && claim.type.match(/claim|update/)) {
urlsForCurrentPage.push(permanentUri);
urlsForCurrentPage.push(uri);
if (claim.confirmations < 1) {
pendingIdSet.add(claimId);
} else if (!resolve && pendingIdSet.has(claimId) && claim.confirmations > 0) {
pendingIdSet.delete(claimId);
}
if (pendingIds.includes(claimId)) {
byId[claimId] = mergeClaim(claim, byId[claimId]);
pendingById[claimId] = claim;
delete byId[claimId];
delete byUri[claimId];
} else {
byId[claimId] = claim;
byUri[uri] = claimId;
}
byUri[permanentUri] = claimId;
myClaimIds.add(claimId);
if (!resolve && pendingById[claimId] && claim.confirmations > 0) {
delete pendingById[claimId];
}
}
});
// Remove old pending publishes if resolve if false (resolve=true means confirmations on updates are not 0)
if (!resolve) {
Object.values(pendingById)
// $FlowFixMe
.filter(pendingClaim => byId[pendingClaim.claim_id])
.forEach(pendingClaim => {
// $FlowFixMe
delete pendingById[pendingClaim.claim_id];
});
}
return Object.assign({}, state, {
isFetchingClaimListMine: false,
myClaims: Array.from(myClaimIds),
byId,
pendingIds: Array.from(pendingIdSet),
claimsByUri: byUri,
pendingById,
myClaimsPageResults: urlsForCurrentPage,
myClaimsPageNumber: page,
myClaimsPageTotalResults: totalItems,
@ -249,7 +252,7 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
const { claims }: { claims: Array<ChannelClaim> } = action.data;
const myClaims = state.myClaims || [];
let myClaimIds = new Set(state.myClaims);
const pendingIds = state.pendingIds || [];
const pendingById = Object.assign(state.pendingById);
let myChannelClaims;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
@ -272,10 +275,18 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
// $FlowFixMe
myChannelClaims.add(claimId);
if (!pendingIds.some(c => c === claimId)) {
if (!byId[claimId]) {
byId[claimId] = claim;
}
myClaimIds.add(claimId);
if (pendingById[claimId] && claim.confirmations > 0) {
delete pendingById[claimId];
}
if (pendingById[claimId] && claim.confirmations > 0) {
delete pendingById[claimId];
}
});
}
@ -285,7 +296,7 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
channelClaimCounts,
fetchingMyChannels: false,
myChannelClaims: myChannelClaims ? Array.from(myChannelClaims) : null,
myClaims: myClaimIds ? Array.from(myClaimIds) : null,
myClaims: Array.from(myClaimIds),
});
};
@ -374,31 +385,19 @@ reducers[ACTIONS.ABANDON_CLAIM_STARTED] = (state: State, action: any): State =>
};
reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State => {
const { claims: pendingClaims }: { claims: Array<Claim> } = action.data;
const { claims }: { claims: Array<Claim> } = action.data;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
const pendingIds = state.pendingIds;
const pendingIdSet = new Set(pendingIds);
const pendingById: { [string]: Claim } = Object.assign({}, state.pendingById);
let myClaimIds = new Set(state.myClaims);
const myChannelClaims = new Set(state.myChannelClaims);
// $FlowFixMe
pendingClaims.forEach((claim: Claim) => {
let newClaim;
const { permanent_url: uri, claim_id: claimId, type, value_type: valueType } = claim;
pendingIdSet.add(claimId);
const oldClaim = byId[claimId];
if (oldClaim && oldClaim.canonical_url) {
newClaim = mergeClaim(oldClaim, claim);
} else {
newClaim = claim;
}
if (valueType === 'channel') {
myChannelClaims.add(claimId);
}
if (type && type.match(/claim|update/)) {
byId[claimId] = newClaim;
claims.forEach((claim: Claim) => {
const uri = buildURI({ streamName: claim.name, streamClaimId: claim.claim_id });
const { claim_id: claimId } = claim;
if (claim.type && claim.type.match(/claim|update/)) {
pendingById[claimId] = claim;
delete byId[claimId];
byUri[uri] = claimId;
}
myClaimIds.add(claimId);
@ -406,35 +405,32 @@ reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State =>
return Object.assign({}, state, {
myClaims: Array.from(myClaimIds),
byId,
myChannelClaims: Array.from(myChannelClaims),
claimsByUri: byUri,
pendingIds: Array.from(pendingIdSet),
pendingById,
});
};
reducers[ACTIONS.UPDATE_CONFIRMED_CLAIMS] = (state: State, action: any): State => {
const { claims: confirmedClaims }: { claims: Array<Claim> } = action.data;
const { claims }: { claims: Array<Claim> } = action.data;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
const pendingIds = state.pendingIds;
const pendingIdSet = new Set(pendingIds);
const pendingById: { [string]: Claim } = Object.assign({}, state.pendingById);
let myClaimIds = new Set(state.myClaims);
confirmedClaims.forEach((claim: GenericClaim) => {
const { permanent_url: permanentUri, claim_id: claimId, type } = claim;
let newClaim = claim;
const oldClaim = byId[claimId];
if (oldClaim && oldClaim.canonical_url) {
newClaim = mergeClaim(oldClaim, claim);
}
if (type && type.match(/claim|update|channel/)) {
byId[claimId] = newClaim;
pendingIdSet.delete(claimId);
claims.forEach((claim: GenericClaim) => {
const uri = buildURI({ streamName: claim.name, streamClaimId: claim.claim_id });
const { claim_id: claimId } = claim;
if (claim.type && claim.type.match(/claim|update/)) {
delete pendingById[claimId];
byId[claimId] = claim;
}
myClaimIds.add(claimId);
});
return Object.assign({}, state, {
pendingIds: Array.from(pendingIdSet),
myClaims: Array.from(myClaimIds),
byId,
claimsByUri: byUri,
pendingById,
});
};
@ -470,7 +466,19 @@ reducers[ACTIONS.CREATE_CHANNEL_STARTED] = (state: State): State => ({
});
reducers[ACTIONS.CREATE_CHANNEL_COMPLETED] = (state: State, action: any): State => {
const channelClaim: ChannelClaim = action.data.channelClaim;
const byId = Object.assign({}, state.byId);
const pendingById = Object.assign({}, state.pendingById);
const myChannelClaims = new Set(state.myChannelClaims);
byId[channelClaim.claim_id] = channelClaim;
pendingById[channelClaim.claim_id] = channelClaim;
myChannelClaims.add(channelClaim.claim_id);
return Object.assign({}, state, {
byId,
pendingById,
myChannelClaims: Array.from(myChannelClaims),
creatingChannel: false,
});
};
@ -490,7 +498,13 @@ reducers[ACTIONS.UPDATE_CHANNEL_STARTED] = (state: State, action: any): State =>
};
reducers[ACTIONS.UPDATE_CHANNEL_COMPLETED] = (state: State, action: any): State => {
const channelClaim: ChannelClaim = action.data.channelClaim;
const byId = Object.assign({}, state.byId);
byId[channelClaim.claim_id] = channelClaim;
return Object.assign({}, state, {
byId,
updateChannelError: '',
updatingChannel: false,
});

View file

@ -6,7 +6,7 @@ import {
} from 'redux/selectors/search';
import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
import { createSelector } from 'reselect';
import { isClaimNsfw, filterClaims } from 'util/claim';
import { isClaimNsfw, createNormalizedClaimSearchKey, filterClaims } from 'util/claim';
import { getSearchQueryString } from 'util/query-params';
import { PAGE_SIZE } from 'constants/claim';
@ -48,9 +48,10 @@ export const selectRepostError = createSelector(
);
export const selectClaimsByUri = createSelector(
selectClaimIdsByUri,
selectState,
selectClaimsById,
(byUri, byId) => {
(state, byId) => {
const byUri = state.claimsByUri || {};
const claims = {};
Object.keys(byUri).forEach(uri => {
@ -75,25 +76,42 @@ export const selectAllClaimsByChannel = createSelector(
state => state.paginatedClaimsByChannel || {}
);
export const selectPendingIds = createSelector(
export const selectPendingById = createSelector(
selectState,
state => state.pendingIds || []
state => state.pendingById || {}
);
export const selectPendingClaims = createSelector(
selectState,
state => Object.values(state.pendingById || [])
);
export const makeSelectClaimIsPending = (uri: string) =>
createSelector(
selectClaimIdsByUri,
selectPendingIds,
(idsByUri, pendingIds) => {
const claimId = idsByUri[normalizeURI(uri)];
selectPendingById,
pendingById => {
let claimId;
try {
const { isChannel, channelClaimId, streamClaimId } = parseURI(uri);
claimId = isChannel ? channelClaimId : streamClaimId;
} catch (e) {}
if (claimId) {
return pendingIds.some(i => i === claimId);
return Boolean(pendingById[claimId]);
}
return false;
}
);
export const makeSelectPendingByUri = (uri: string) =>
createSelector(
selectPendingById,
pendingById => {
const { isChannel, channelClaimId, streamClaimId } = parseURI(uri);
const claimId = isChannel ? channelClaimId : streamClaimId;
return pendingById[claimId];
}
);
export const selectReflectingById = createSelector(
selectState,
state => state.reflectingById
@ -101,21 +119,30 @@ export const selectReflectingById = createSelector(
export const makeSelectClaimForUri = (uri: string, returnRepost: boolean = true) =>
createSelector(
selectClaimIdsByUri,
selectClaimsById,
(byUri, byId) => {
let validUri;
selectClaimsByUri,
selectPendingById,
(byUri, pendingById) => {
// Check if a claim is pending first
// It won't be in claimsByUri because resolving it will return nothing
let valid;
let channelClaimId;
let streamClaimId;
let isChannel;
try {
({ isChannel, channelClaimId, streamClaimId } = parseURI(uri));
validUri = true;
valid = true;
} catch (e) {}
if (validUri && byUri) {
const claimId = uri && byUri[normalizeURI(uri)];
const claim = byId[claimId];
if (valid && byUri) {
const claimId = isChannel ? channelClaimId : streamClaimId;
const pendingClaim = pendingById[claimId];
if (pendingClaim) {
return pendingClaim;
}
const claim = byUri[normalizeURI(uri)];
if (claim === undefined || claim === null) {
// Make sure to return the claim as is so apps can check if it's been resolved before (null) or still needs to be resolved (undefined)
return claim;
@ -429,7 +456,8 @@ export const selectMyClaims = createSelector(
selectMyActiveClaims,
selectClaimsById,
selectAbandoningIds,
(myClaimIds, byId, abandoningIds) => {
selectPendingClaims,
(myClaimIds, byId, abandoningIds, pendingClaims) => {
const claims = [];
myClaimIds.forEach(id => {
@ -438,7 +466,7 @@ export const selectMyClaims = createSelector(
if (claim && abandoningIds.indexOf(id) === -1) claims.push(claim);
});
return [...claims];
return [...claims, ...pendingClaims];
}
);
@ -510,11 +538,6 @@ export const selectMyChannelClaims = createSelector(
}
);
export const selectMyChannelUrls = createSelector(
selectMyChannelClaims,
claims => claims ? claims.map(claim => claim.canonical_url || claim.permanent_url) : undefined
);
export const selectResolvingUris = createSelector(
selectState,
state => state.resolvingUris || []

View file

@ -1,7 +0,0 @@
/*
new claim = { ...maybeResolvedClaim, ...pendingClaim, meta: maybeResolvedClaim['meta'] }
*/
export default function mergeClaims(maybeResolved, pending){
return { ...maybeResolved, ...pending, meta: maybeResolved.meta };
}