Cleaner way of filtering published claims while abandoning

This commit is contained in:
6ea86b96 2017-07-11 15:30:28 +07:00
parent d0e3dd8f99
commit f1c45775ab
2 changed files with 11 additions and 15 deletions

View file

@ -102,20 +102,12 @@ export function doDeleteFile(outpoint, deleteFromComputer, abandonClaim) {
}, },
}); });
// We need to run this after a few seconds or the claim gets added back const success = dispatch({
// to the store again by an already running fetch claims query.
const success = setTimeout(
() => {
dispatch({
type: types.ABANDON_CLAIM_SUCCEEDED, type: types.ABANDON_CLAIM_SUCCEEDED,
data: { data: {
claimId: fileInfo.claim_id, claimId: fileInfo.claim_id,
}, },
}); });
},
10000,
{ once: true }
);
lbry.claim_abandon({ claim_id: fileInfo.claim_id }).then(success); lbry.claim_abandon({ claim_id: fileInfo.claim_id }).then(success);
} }
} }

View file

@ -43,8 +43,12 @@ reducers[types.FETCH_CLAIM_LIST_MINE_COMPLETED] = function(state, action) {
const byUri = Object.assign({}, state.claimsByUri); const byUri = Object.assign({}, state.claimsByUri);
const byId = Object.assign({}, state.byId); const byId = Object.assign({}, state.byId);
const pendingById = Object.assign({}, state.pendingById); const pendingById = Object.assign({}, state.pendingById);
const abandoningById = Object.assign({}, state.abandoningById);
const myClaims = new Set(claims.map(claim => claim.claim_id)); const myClaims = new Set(
claims
.map(claim => claim.claim_id)
.filter(claimId => Object.keys(abandoningById).indexOf(claimId) === -1)
);
claims.forEach(claim => { claims.forEach(claim => {
byId[claim.claim_id] = claim; byId[claim.claim_id] = claim;