commit little and often fail
This commit is contained in:
parent
f9b9221471
commit
470f61da9d
10 changed files with 114 additions and 61 deletions
|
@ -396,14 +396,16 @@ export function doPublish(params) {
|
|||
} else {
|
||||
uri = lbryuri.build({ name: name }, false);
|
||||
}
|
||||
const fakeId = "pending";
|
||||
const pendingPublish = {
|
||||
name,
|
||||
channel_name,
|
||||
claim_id: "pending_claim_" + uri,
|
||||
claim_id: fakeId,
|
||||
txid: "pending_" + uri,
|
||||
nout: 0,
|
||||
outpoint: "pending_" + uri + ":0",
|
||||
outpoint: fakeId + ":0",
|
||||
time: Date.now(),
|
||||
pending: true,
|
||||
};
|
||||
|
||||
dispatch({
|
||||
|
|
|
@ -102,14 +102,20 @@ export function doDeleteFile(outpoint, deleteFromComputer, abandonClaim) {
|
|||
},
|
||||
});
|
||||
|
||||
const success = () => {
|
||||
dispatch({
|
||||
type: types.ABANDON_CLAIM_COMPLETED,
|
||||
data: {
|
||||
claimId: fileInfo.claim_id,
|
||||
},
|
||||
});
|
||||
};
|
||||
// We need to run this after a few seconds or the claim gets added back
|
||||
// to the store again by an already running fetch claims query.
|
||||
const success = setTimeout(
|
||||
() => {
|
||||
dispatch({
|
||||
type: types.ABANDON_CLAIM_COMPLETED,
|
||||
data: {
|
||||
claimId: fileInfo.claim_id,
|
||||
},
|
||||
});
|
||||
},
|
||||
10000,
|
||||
{ once: true }
|
||||
);
|
||||
lbry.claim_abandon({ claim_id: fileInfo.claim_id }).then(success);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -96,7 +96,6 @@ class FileList extends React.PureComponent {
|
|||
<FormField type="select" onChange={this.handleSortChanged.bind(this)}>
|
||||
<option value="date">{__("Date")}</option>
|
||||
<option value="title">{__("Title")}</option>
|
||||
<option value="filename">{__("File name")}</option>
|
||||
</FormField>
|
||||
</span>
|
||||
{content}
|
||||
|
|
|
@ -64,9 +64,16 @@ class FileTile extends React.PureComponent {
|
|||
const isClaimable = lbryuri.isClaimable(uri);
|
||||
const title = isClaimed && metadata && metadata.title
|
||||
? metadata.title
|
||||
: uri;
|
||||
: lbryuri.parse(uri).contentName;
|
||||
const obscureNsfw = this.props.obscureNsfw && metadata && metadata.nsfw;
|
||||
let onClick = () => navigate("/show", { uri });
|
||||
let onClick;
|
||||
if (isClaimed) {
|
||||
onClick = () => navigate("/show", { uri });
|
||||
} else {
|
||||
onClick = () => {
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
let description = "";
|
||||
if (isClaimed) {
|
||||
|
|
|
@ -873,7 +873,11 @@ class PublishForm extends React.PureComponent {
|
|||
onClick={event => {
|
||||
this.handleSubmit(event);
|
||||
}}
|
||||
disabled={this.state.submitting}
|
||||
disabled={
|
||||
this.state.submitting ||
|
||||
(this.state.uri &&
|
||||
this.props.resolvingUris.indexOf(this.state.uri) !== -1)
|
||||
}
|
||||
/>
|
||||
<Link
|
||||
button="cancel"
|
||||
|
|
|
@ -3,7 +3,7 @@ import rewards from "rewards";
|
|||
import { connect } from "react-redux";
|
||||
import { doFetchClaimListMine } from "actions/content";
|
||||
import {
|
||||
selectMyClaims,
|
||||
selectMyClaimsWithoutChannels,
|
||||
selectIsFetchingClaimListMine,
|
||||
} from "selectors/claims";
|
||||
import { doClaimRewardType } from "actions/rewards";
|
||||
|
@ -12,7 +12,7 @@ import { doCancelAllResolvingUris } from "actions/content";
|
|||
import FileListPublished from "./view";
|
||||
|
||||
const select = state => ({
|
||||
claims: selectMyClaims(state),
|
||||
claims: selectMyClaimsWithoutChannels(state),
|
||||
isFetching: selectIsFetchingClaimListMine(state),
|
||||
});
|
||||
|
||||
|
|
|
@ -40,19 +40,39 @@ reducers[types.FETCH_CLAIM_LIST_MINE_STARTED] = function(state, action) {
|
|||
|
||||
reducers[types.FETCH_CLAIM_LIST_MINE_COMPLETED] = function(state, action) {
|
||||
const { claims } = action.data;
|
||||
const myClaims = new Set(state.myClaims);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
|
||||
const myClaims = new Set(claims.map(claim => claim.claim_id));
|
||||
|
||||
claims.forEach(claim => {
|
||||
myClaims.add(claim.claim_id);
|
||||
byId[claim.claim_id] = claim;
|
||||
|
||||
const pending = Object.values(pendingById).find(pendingClaim => {
|
||||
return (
|
||||
pendingClaim.name == claim.name &&
|
||||
pendingClaim.channel_name == claim.channel_name
|
||||
);
|
||||
});
|
||||
|
||||
if (pending) {
|
||||
delete pendingById[pending.claim_id];
|
||||
}
|
||||
});
|
||||
|
||||
// Remove old timed out pending publishes
|
||||
const old = Object.values(pendingById)
|
||||
.filter(pendingClaim => Date.now() - pendingClaim.time >= 20 * 60 * 1000)
|
||||
.forEach(pendingClaim => {
|
||||
delete pendingById[pendingClaim.claim_id];
|
||||
});
|
||||
|
||||
return Object.assign({}, state, {
|
||||
isFetchingClaimListMine: false,
|
||||
myClaims: myClaims,
|
||||
byId,
|
||||
pendingById,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -91,6 +111,17 @@ reducers[types.FETCH_CHANNEL_CLAIMS_COMPLETED] = function(state, action) {
|
|||
});
|
||||
};
|
||||
|
||||
reducers[types.ABANDON_CLAIM_STARTED] = function(state, action) {
|
||||
const { claimId } = action.data;
|
||||
const abandoningById = Object.assign({}, state.abandoningById);
|
||||
|
||||
abandoningById[claimId] = true;
|
||||
|
||||
return Object.assign({}, state, {
|
||||
abandoningById,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[types.ABANDON_CLAIM_COMPLETED] = function(state, action) {
|
||||
const { claimId } = action.data;
|
||||
const myClaims = new Set(state.myClaims);
|
||||
|
@ -128,17 +159,42 @@ reducers[types.CREATE_CHANNEL_COMPLETED] = function(state, action) {
|
|||
});
|
||||
};
|
||||
|
||||
reducers[types.PUBLISH_STARTED] = function(state, action) {
|
||||
const { pendingPublish } = action.data;
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
|
||||
pendingById[pendingPublish.claim_id] = pendingPublish;
|
||||
|
||||
return Object.assign({}, state, {
|
||||
pendingById,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[types.PUBLISH_COMPLETED] = function(state, action) {
|
||||
const { claim } = action.data;
|
||||
const { claim, pendingPublish } = action.data;
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const myClaims = new Set(state.myClaims);
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
|
||||
byId[claim.claim_id] = claim;
|
||||
myClaims.add(claim.claim_id);
|
||||
delete pendingById[pendingPublish.claim_id];
|
||||
|
||||
return Object.assign({}, state, {
|
||||
byId,
|
||||
myClaims,
|
||||
pendingById,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[types.PUBLISH_FAILED] = function(state, action) {
|
||||
const { pendingPublish } = action.data;
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
|
||||
delete pendingById[pendingPublish.claim_id];
|
||||
|
||||
return Object.assign({}, state, {
|
||||
pendingById,
|
||||
});
|
||||
};
|
||||
|
||||
|
|
|
@ -138,39 +138,6 @@ reducers[types.LOADING_VIDEO_FAILED] = function(state, action) {
|
|||
});
|
||||
};
|
||||
|
||||
reducers[types.PUBLISH_STARTED] = function(state, action) {
|
||||
const { pendingPublish } = action.data;
|
||||
const pendingByOutpoint = Object.assign({}, state.pendingByOutpoint);
|
||||
|
||||
pendingByOutpoint[pendingPublish.outpoint] = pendingPublish;
|
||||
|
||||
return Object.assign({}, state, {
|
||||
pendingByOutpoint,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[types.PUBLISH_COMPLETED] = function(state, action) {
|
||||
const { pendingPublish } = action.data;
|
||||
const pendingByOutpoint = Object.assign({}, state.pendingByOutpoint);
|
||||
|
||||
delete pendingByOutpoint[pendingPublish.outpoint];
|
||||
|
||||
return Object.assign({}, state, {
|
||||
pendingByOutpoint,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[types.PUBLISH_FAILED] = function(state, action) {
|
||||
const { pendingPublish } = action.data;
|
||||
const pendingByOutpoint = Object.assign({}, state.pendingByOutpoint);
|
||||
|
||||
delete pendingByOutpoint[pendingPublish.outpoint];
|
||||
|
||||
return Object.assign({}, state, {
|
||||
pendingByOutpoint,
|
||||
});
|
||||
};
|
||||
|
||||
export default function reducer(state = defaultState, action) {
|
||||
const handler = reducers[action.type];
|
||||
if (handler) return handler(state, action);
|
||||
|
|
|
@ -110,22 +110,37 @@ export const selectMyClaimsRaw = createSelector(
|
|||
state => new Set(state.myClaims)
|
||||
);
|
||||
|
||||
export const selectAbandoningIds = createSelector(_selectState, state =>
|
||||
Object.keys(state.abandoningById || {})
|
||||
);
|
||||
|
||||
export const selectPendingClaims = createSelector(_selectState, state =>
|
||||
Object.values(state.pendingById || {})
|
||||
);
|
||||
|
||||
export const selectMyClaims = createSelector(
|
||||
selectMyClaimsRaw,
|
||||
selectClaimsById,
|
||||
(myClaimIds, byId) => {
|
||||
selectAbandoningIds,
|
||||
selectPendingClaims,
|
||||
(myClaimIds, byId, abandoningIds, pendingClaims) => {
|
||||
const claims = [];
|
||||
|
||||
myClaimIds.forEach(id => {
|
||||
const claim = byId[id];
|
||||
|
||||
if (claim) claims.push(claim);
|
||||
if (claim && abandoningIds.indexOf(id) == -1) claims.push(claim);
|
||||
});
|
||||
|
||||
return claims;
|
||||
return [...claims, ...pendingClaims];
|
||||
}
|
||||
);
|
||||
|
||||
export const selectMyClaimsWithoutChannels = createSelector(
|
||||
selectMyClaims,
|
||||
myClaims => myClaims.filter(claim => !claim.name.match(/^@/))
|
||||
);
|
||||
|
||||
export const selectMyClaimsOutpoints = createSelector(
|
||||
selectMyClaims,
|
||||
myClaims => {
|
||||
|
|
|
@ -91,17 +91,14 @@ const saveClaimsFilter = createFilter("claims", [
|
|||
"claimsByUri",
|
||||
"myClaims",
|
||||
"myChannelClaims",
|
||||
]);
|
||||
const saveFileInfosFilter = createFilter("fileInfo", [
|
||||
"fileInfos",
|
||||
"pendingByOutpoint",
|
||||
"pendingById",
|
||||
]);
|
||||
|
||||
const persistOptions = {
|
||||
whitelist: ["claims", "fileInfo"],
|
||||
whitelist: ["claims"],
|
||||
// Order is important. Needs to be compressed last or other transforms can't
|
||||
// read the data
|
||||
transforms: [saveClaimsFilter, saveFileInfosFilter, compressor],
|
||||
transforms: [saveClaimsFilter, compressor],
|
||||
debounce: 10000,
|
||||
storage: localForage,
|
||||
};
|
||||
|
|
Loading…
Add table
Reference in a new issue