Mobile updates (#179)
* return lowercase tag names for unfollowed tags * fix: thumbnail url * fix claim.meta.creation_timestamp check * track if the last page reached was reached for claim_search queries * fix CLAIM_SEARCH_FAILED reducer
This commit is contained in:
parent
3098aa2d5e
commit
e10986e8e5
7 changed files with 70 additions and 35 deletions
35
dist/bundle.es.js
vendored
35
dist/bundle.es.js
vendored
|
@ -7,8 +7,6 @@ function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'defau
|
|||
require('proxy-polyfill');
|
||||
var reselect = require('reselect');
|
||||
var uuid = _interopDefault(require('uuid/v4'));
|
||||
var fs = _interopDefault(require('fs'));
|
||||
var path = _interopDefault(require('path'));
|
||||
|
||||
const MINIMUM_PUBLISH_BID = 0.00000001;
|
||||
|
||||
|
@ -1392,7 +1390,7 @@ const makeSelectMetadataItemForUri = (uri, key) => reselect.createSelector(makeS
|
|||
const makeSelectTitleForUri = uri => reselect.createSelector(makeSelectMetadataForUri(uri), metadata => metadata && metadata.title);
|
||||
|
||||
const makeSelectDateForUri = uri => reselect.createSelector(makeSelectClaimForUri(uri), claim => {
|
||||
const timestamp = claim && claim.value && (claim.value.release_time ? claim.value.release_time * 1000 : claim.meta.creation_timestamp ? claim.meta.creation_timestamp * 1000 : null);
|
||||
const timestamp = claim && claim.value && (claim.value.release_time ? claim.value.release_time * 1000 : claim.meta && claim.meta.creation_timestamp ? claim.meta.creation_timestamp * 1000 : null);
|
||||
if (!timestamp) {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -1570,6 +1568,8 @@ const selectFetchingClaimSearch = reselect.createSelector(selectFetchingClaimSea
|
|||
|
||||
const selectClaimSearchByQuery = reselect.createSelector(selectState$1, state => state.claimSearchByQuery || {});
|
||||
|
||||
const selectClaimSearchByQueryLastPageReached = reselect.createSelector(selectState$1, state => state.claimSearchByQueryLastPageReached || {});
|
||||
|
||||
const makeSelectShortUrlForUri = uri => reselect.createSelector(makeSelectClaimForUri(uri), claim => claim && claim.short_url);
|
||||
|
||||
const selectState$2 = state => state.wallet || {};
|
||||
|
@ -2403,7 +2403,13 @@ function doClaimSearch(options = {
|
|||
|
||||
dispatch({
|
||||
type: CLAIM_SEARCH_COMPLETED,
|
||||
data: { query, resolveInfo, uris, append: options.page && options.page !== 1 }
|
||||
data: {
|
||||
query,
|
||||
resolveInfo,
|
||||
uris,
|
||||
append: options.page && options.page !== 1,
|
||||
pageSize: options.page_size
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -2880,7 +2886,7 @@ const doUpdatePublishForm = publishFormValue => dispatch => dispatch({
|
|||
data: _extends$4({}, publishFormValue)
|
||||
});
|
||||
|
||||
const doUploadThumbnail = (filePath, thumbnailBuffer, fsAdapter) => dispatch => {
|
||||
const doUploadThumbnail = (filePath, thumbnailBuffer, fsAdapter, fs, path) => dispatch => {
|
||||
let thumbnail, fileExt, fileName, fileType;
|
||||
|
||||
const makeid = () => {
|
||||
|
@ -2925,7 +2931,7 @@ const doUploadThumbnail = (filePath, thumbnailBuffer, fsAdapter) => dispatch =>
|
|||
type: UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
uploadThumbnailStatus: COMPLETE,
|
||||
thumbnail: `${json.data.url}${fileExt}`
|
||||
thumbnail: `${json.data.url}.${fileExt}`
|
||||
}
|
||||
}) : uploadError(json.message)).catch(err => uploadError(err.message));
|
||||
});
|
||||
|
@ -2963,7 +2969,7 @@ const doUploadThumbnail = (filePath, thumbnailBuffer, fsAdapter) => dispatch =>
|
|||
}
|
||||
};
|
||||
|
||||
const doPrepareEdit = (claim, uri, fileInfo) => dispatch => {
|
||||
const doPrepareEdit = (claim, uri, fileInfo, fs) => dispatch => {
|
||||
const { name, amount, value } = claim;
|
||||
const channelName = claim && claim.signing_channel && claim.signing_channel.normalized_name || null;
|
||||
const {
|
||||
|
@ -3017,7 +3023,7 @@ const doPrepareEdit = (claim, uri, fileInfo) => dispatch => {
|
|||
publishData['channel'] = channelName;
|
||||
}
|
||||
|
||||
if (fileInfo && fileInfo.download_path) {
|
||||
if (fs && fileInfo && fileInfo.download_path) {
|
||||
try {
|
||||
fs.accessSync(fileInfo.download_path, fs.constants.R_OK);
|
||||
publishData.filePath = fileInfo.download_path;
|
||||
|
@ -3462,6 +3468,7 @@ const defaultState = {
|
|||
pendingById: {},
|
||||
claimSearchError: false,
|
||||
claimSearchByQuery: {},
|
||||
claimSearchByQueryLastPageReached: {},
|
||||
fetchingClaimSearchByQuery: {}
|
||||
};
|
||||
|
||||
|
@ -3703,7 +3710,8 @@ reducers[CLAIM_SEARCH_STARTED] = (state, action) => {
|
|||
reducers[CLAIM_SEARCH_COMPLETED] = (state, action) => {
|
||||
const fetchingClaimSearchByQuery = Object.assign({}, state.fetchingClaimSearchByQuery);
|
||||
const claimSearchByQuery = Object.assign({}, state.claimSearchByQuery);
|
||||
const { append, query, uris } = action.data;
|
||||
const claimSearchByQueryLastPageReached = Object.assign({}, state.claimSearchByQueryLastPageReached);
|
||||
const { append, query, uris, pageSize } = action.data;
|
||||
|
||||
if (append) {
|
||||
// todo: check for duplicate uris when concatenating?
|
||||
|
@ -3712,17 +3720,21 @@ reducers[CLAIM_SEARCH_COMPLETED] = (state, action) => {
|
|||
claimSearchByQuery[query] = uris;
|
||||
}
|
||||
|
||||
// the returned number of uris is less than the page size, so we're on the last page
|
||||
claimSearchByQueryLastPageReached[query] = uris.length < pageSize;
|
||||
|
||||
delete fetchingClaimSearchByQuery[query];
|
||||
|
||||
return Object.assign({}, state, _extends$5({}, handleClaimAction(state, action), {
|
||||
claimSearchByQuery,
|
||||
claimSearchByQueryLastPageReached,
|
||||
fetchingClaimSearchByQuery
|
||||
}));
|
||||
};
|
||||
|
||||
reducers[CLAIM_SEARCH_FAILED] = (state, action) => {
|
||||
const fetchingClaimSearchByQuery = Object.assign({}, state.fetchingClaimSearchByQuery);
|
||||
fetchingClaimSearchByQuery[action.data.tags] = false;
|
||||
delete fetchingClaimSearchByQuery[action.data.query];
|
||||
|
||||
return Object.assign({}, state, {
|
||||
fetchingClaimSearchByQuery
|
||||
|
@ -4724,7 +4736,7 @@ const selectUnfollowedTags = reselect.createSelector(selectKnownTagsByName, sele
|
|||
Object.keys(tagsByName).forEach(key => {
|
||||
if (!followedTagsSet.has(key)) {
|
||||
const { name } = tagsByName[key];
|
||||
tagsToReturn.push({ name });
|
||||
tagsToReturn.push({ name: name.toLowerCase() });
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -4884,6 +4896,7 @@ exports.selectBlocks = selectBlocks;
|
|||
exports.selectChannelClaimCounts = selectChannelClaimCounts;
|
||||
exports.selectChannelIsBlocked = selectChannelIsBlocked;
|
||||
exports.selectClaimSearchByQuery = selectClaimSearchByQuery;
|
||||
exports.selectClaimSearchByQueryLastPageReached = selectClaimSearchByQueryLastPageReached;
|
||||
exports.selectClaimsById = selectClaimsById;
|
||||
exports.selectClaimsByUri = selectClaimsByUri;
|
||||
exports.selectCurrentChannelPage = selectCurrentChannelPage;
|
||||
|
|
|
@ -197,6 +197,7 @@ export {
|
|||
selectFetchingClaimSearch,
|
||||
selectFetchingClaimSearchByQuery,
|
||||
selectClaimSearchByQuery,
|
||||
selectClaimSearchByQueryLastPageReached,
|
||||
} from 'redux/selectors/claims';
|
||||
|
||||
export { makeSelectCommentsForUri } from 'redux/selectors/comments';
|
||||
|
|
|
@ -336,7 +336,13 @@ export function doClaimSearch(
|
|||
|
||||
dispatch({
|
||||
type: ACTIONS.CLAIM_SEARCH_COMPLETED,
|
||||
data: { query, resolveInfo, uris, append: options.page && options.page !== 1 },
|
||||
data: {
|
||||
query,
|
||||
resolveInfo,
|
||||
uris,
|
||||
append: options.page && options.page !== 1,
|
||||
pageSize: options.page_size,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
|
|
|
@ -13,8 +13,6 @@ import {
|
|||
selectMyClaimsWithoutChannels,
|
||||
} from 'redux/selectors/claims';
|
||||
import { selectPublishFormValues, selectMyClaimForUri } from 'redux/selectors/publish';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
export const doResetThumbnailStatus = () => (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
|
@ -66,7 +64,9 @@ export const doUpdatePublishForm = (publishFormValue: UpdatePublishFormData) =>
|
|||
export const doUploadThumbnail = (
|
||||
filePath: string,
|
||||
thumbnailBuffer: Uint8Array,
|
||||
fsAdapter: any
|
||||
fsAdapter: any,
|
||||
fs: any,
|
||||
path: any
|
||||
) => (dispatch: Dispatch) => {
|
||||
let thumbnail, fileExt, fileName, fileType;
|
||||
|
||||
|
@ -118,12 +118,12 @@ export const doUploadThumbnail = (
|
|||
.then(json =>
|
||||
json.success
|
||||
? dispatch({
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
|
||||
thumbnail: `${json.data.url}${fileExt}`,
|
||||
},
|
||||
})
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
|
||||
thumbnail: `${json.data.url}.${fileExt}`,
|
||||
},
|
||||
})
|
||||
: uploadError(json.message)
|
||||
)
|
||||
.catch(err => uploadError(err.message));
|
||||
|
@ -157,19 +157,19 @@ export const doUploadThumbnail = (
|
|||
.then(json =>
|
||||
json.success
|
||||
? dispatch({
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
|
||||
thumbnail: `${json.data.url}${fileExt}`,
|
||||
},
|
||||
})
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
|
||||
thumbnail: `${json.data.url}${fileExt}`,
|
||||
},
|
||||
})
|
||||
: uploadError(json.message)
|
||||
)
|
||||
.catch(err => uploadError(err.message));
|
||||
}
|
||||
};
|
||||
|
||||
export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileListItem) => (
|
||||
export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileListItem, fs: any) => (
|
||||
dispatch: Dispatch
|
||||
) => {
|
||||
const { name, amount, value } = claim;
|
||||
|
@ -226,7 +226,7 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
|
|||
publishData['channel'] = channelName;
|
||||
}
|
||||
|
||||
if (fileInfo && fileInfo.download_path) {
|
||||
if (fs && fileInfo && fileInfo.download_path) {
|
||||
try {
|
||||
fs.accessSync(fileInfo.download_path, fs.constants.R_OK);
|
||||
publishData.filePath = fileInfo.download_path;
|
||||
|
|
|
@ -23,6 +23,7 @@ type State = {
|
|||
fetchingMyChannels: boolean,
|
||||
fetchingClaimSearchByQuery: { [string]: boolean },
|
||||
claimSearchByQuery: { [string]: Array<string> },
|
||||
claimSearchByQueryLastPageReached: { [string]: Array<boolean> },
|
||||
claimsByChannel: {
|
||||
[string]: {
|
||||
all: Array<string>,
|
||||
|
@ -47,6 +48,7 @@ const defaultState = {
|
|||
pendingById: {},
|
||||
claimSearchError: false,
|
||||
claimSearchByQuery: {},
|
||||
claimSearchByQueryLastPageReached: {},
|
||||
fetchingClaimSearchByQuery: {},
|
||||
};
|
||||
|
||||
|
@ -299,7 +301,11 @@ reducers[ACTIONS.CLAIM_SEARCH_STARTED] = (state: State, action: any): State => {
|
|||
reducers[ACTIONS.CLAIM_SEARCH_COMPLETED] = (state: State, action: any): State => {
|
||||
const fetchingClaimSearchByQuery = Object.assign({}, state.fetchingClaimSearchByQuery);
|
||||
const claimSearchByQuery = Object.assign({}, state.claimSearchByQuery);
|
||||
const { append, query, uris } = action.data;
|
||||
const claimSearchByQueryLastPageReached = Object.assign(
|
||||
{},
|
||||
state.claimSearchByQueryLastPageReached
|
||||
);
|
||||
const { append, query, uris, pageSize } = action.data;
|
||||
|
||||
if (append) {
|
||||
// todo: check for duplicate uris when concatenating?
|
||||
|
@ -311,18 +317,22 @@ reducers[ACTIONS.CLAIM_SEARCH_COMPLETED] = (state: State, action: any): State =>
|
|||
claimSearchByQuery[query] = uris;
|
||||
}
|
||||
|
||||
// the returned number of uris is less than the page size, so we're on the last page
|
||||
claimSearchByQueryLastPageReached[query] = uris.length < pageSize;
|
||||
|
||||
delete fetchingClaimSearchByQuery[query];
|
||||
|
||||
return Object.assign({}, state, {
|
||||
...handleClaimAction(state, action),
|
||||
claimSearchByQuery,
|
||||
claimSearchByQueryLastPageReached,
|
||||
fetchingClaimSearchByQuery,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.CLAIM_SEARCH_FAILED] = (state: State, action: any): State => {
|
||||
const fetchingClaimSearchByQuery = Object.assign({}, state.fetchingClaimSearchByQuery);
|
||||
fetchingClaimSearchByQuery[action.data.tags] = false;
|
||||
delete fetchingClaimSearchByQuery[action.data.query];
|
||||
|
||||
return Object.assign({}, state, {
|
||||
fetchingClaimSearchByQuery,
|
||||
|
|
|
@ -222,9 +222,9 @@ export const makeSelectDateForUri = (uri: string) =>
|
|||
claim.value &&
|
||||
(claim.value.release_time
|
||||
? claim.value.release_time * 1000
|
||||
: claim.meta.creation_timestamp
|
||||
? claim.meta.creation_timestamp * 1000
|
||||
: null);
|
||||
: claim.meta && claim.meta.creation_timestamp
|
||||
? claim.meta.creation_timestamp * 1000
|
||||
: null);
|
||||
if (!timestamp) {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -515,6 +515,11 @@ export const selectClaimSearchByQuery = createSelector(
|
|||
state => state.claimSearchByQuery || {}
|
||||
);
|
||||
|
||||
export const selectClaimSearchByQueryLastPageReached = createSelector(
|
||||
selectState,
|
||||
state => state.claimSearchByQueryLastPageReached || {}
|
||||
);
|
||||
|
||||
export const makeSelectClaimSearchUrisByOptions = (options: {}) =>
|
||||
createSelector(
|
||||
selectClaimSearchByQuery,
|
||||
|
|
|
@ -29,7 +29,7 @@ export const selectUnfollowedTags = createSelector(
|
|||
Object.keys(tagsByName).forEach(key => {
|
||||
if (!followedTagsSet.has(key)) {
|
||||
const { name } = tagsByName[key];
|
||||
tagsToReturn.push({ name });
|
||||
tagsToReturn.push({ name: name.toLowerCase() });
|
||||
}
|
||||
});
|
||||
|
||||
|
|
Loading…
Reference in a new issue