Publishing #158

Merged
akinwale merged 5 commits from publishing into master 2019-07-01 21:49:51 +02:00
27 changed files with 1512 additions and 661 deletions
Showing only changes of commit 24f57f9e01 - Show all commits

View file

@ -10,6 +10,7 @@
"__": true
},
"rules": {
"camelcase": 0,
"no-multi-spaces": 0,
"new-cap": 0,
"prefer-promise-reject-errors": 0,

1325
dist/bundle.es.js vendored

File diff suppressed because it is too large Load diff

View file

@ -1,49 +1,51 @@
// @flow
declare type ClaimWithPossibleCertificate = {
certificate?: ChannelClaim,
claim: StreamClaim,
};
declare type Claim = StreamClaim | ChannelClaim;
declare type ChannelClaim = GenericClaim & {
is_channel_signature_valid?: boolean, // we may have signed channels in the future
value: ChannelMetadata,
};
declare type StreamClaim = GenericClaim & {
is_channel_signature_valid?: boolean,
signing_channel?: {
claim_id: string,
name: string,
value: {
public_key: string,
},
},
value: StreamMetadata,
};
declare type GenericClaim = {
address: string, // address associated with tx
amount: number, // bid amount at time of tx
amount: string, // bid amount at time of tx
canonical_url: string, // URL with short id, includes channel with short id
claim_id: string, // unique claim identifier
claim_sequence: number,
claim_sequence: number, // not being used currently
claim_op: 'create' | 'update',
confirmations: number, // This isn't the most stable atm: https://github.com/lbryio/lbry/issues/2000
decoded_claim: boolean, // claim made in accordance with sdk protobuf types
effective_amount: number, // bid amount + supports
timestamp?: number, // date of transaction
has_signature: boolean,
confirmations: number,
decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044
timestamp?: number, // date of last transaction
height: number, // block height the tx was confirmed
hex: string, // `value` hex encoded
name: string,
channel_name?: string,
normalized_name: string, // `name` normalized via unicode NFD spec,
nout: number, // index number for an output of a tx
permanent_url: string, // name + claim_id
supports: Array<{}>, // TODO: add support type once we start using it
short_url: string, // permanent_url with short id, no channel
txid: string, // unique tx id
type: 'claim' | 'update' | 'support',
valid_at_height?: number, // BUG: this should always exist https://github.com/lbryio/lbry/issues/1728
value_type: 'stream' | 'channel',
signing_channel?: ChannelClaim,
meta: {
activation_height: number,
claims_in_channel?: number,
creation_height: number,
creation_timestamp: number,
effective_amount: string,
expiration_height: number,
is_controlling: boolean,
support_amount: string,
trending_global: number,
trending_group: number,
trending_local: number,
trending_mixed: number,
},
};
declare type GenericMetadata = {
@ -59,6 +61,7 @@ declare type GenericMetadata = {
declare type ChannelMetadata = GenericMetadata & {
public_key: string,
public_key_id: string,
cover_url?: string,
email?: string,
website_url?: string,

18
dist/flow-typed/Comment.js vendored Normal file
View file

@ -0,0 +1,18 @@
declare type Comment = {
author: string,
claim_index?: number,
comment_id?: number,
downvotes?: number,
message: string,
omitted?: number,
reply_count?: number,
time_posted?: number,
upvotes?: number,
parent_id?: number,
};
declare type CommentsState = {
byId: {},
isLoading: boolean,
commentsByUri: { [string]: string },
}

View file

@ -66,8 +66,8 @@ declare type VersionResponse = {
declare type ResolveResponse = {
// Keys are the url(s) passed to resolve
[string]:
| { error: {}, certificate: ChannelClaim, claims_in_channel: number }
| { error?: {}, claim: StreamClaim, certificate?: ChannelClaim },
| Claim
| { error?: {} },
};
declare type GetResponse = FileListItem;
@ -86,24 +86,28 @@ declare type GenericTxResponse = {
declare type PublishResponse = GenericTxResponse & {
// Only first value in outputs is a claim
// That's the only value we care about
outputs: Array<StreamClaim>,
outputs: Array<Claim>,
};
declare type ClaimSearchResponse = {
items: Array<StreamClaim>,
items: Array<Claim>,
page: number,
page_size: number,
page_number: number,
total_items: number,
total_pages: number,
};
declare type ClaimListResponse = {
claims: Array<ChannelClaim | StreamClaim>,
claims: Array<ChannelClaim | Claim>,
};
declare type ChannelCreateResponse = GenericTxResponse & {
outputs: Array<ChannelClaim>,
};
declare type CommentCreateResponse = Comment;
declare type CommentListResponse = Array<Comment>;
declare type ChannelListResponse = Array<ChannelClaim>;
declare type FileListResponse = Array<FileListItem>;
@ -183,6 +187,9 @@ declare type LbryTypes = {
blob_delete: (params: {}) => Promise<string>,
blob_list: (params: {}) => Promise<BlobListResponse>,
// Commenting
comment_list: (params: {}) => Promise<CommentListResponse>,
comment_create: (params: {}) => Promise<CommentCreateResponse>,
// Wallet utilities
account_balance: (params: {}) => Promise<string>,
account_decrypt: (prams: {}) => Promise<boolean>,

21
dist/flow-typed/Tags.js vendored Normal file
View file

@ -0,0 +1,21 @@
declare type TagState = {
followedTags: FollowedTags,
knownTags: KnownTags,
};
declare type Tag = {
name: string,
};
declare type KnownTags = {
[string]: Tag,
};
declare type FollowedTags = Array<string>;
declare type TagAction = {
type: string,
data: {
name: string,
},
};

47
flow-typed/Claim.js vendored
View file

@ -1,49 +1,51 @@
// @flow
declare type ClaimWithPossibleCertificate = {
certificate?: ChannelClaim,
claim: StreamClaim,
};
declare type Claim = StreamClaim | ChannelClaim;
declare type ChannelClaim = GenericClaim & {
is_channel_signature_valid?: boolean, // we may have signed channels in the future
value: ChannelMetadata,
};
declare type StreamClaim = GenericClaim & {
is_channel_signature_valid?: boolean,
signing_channel?: {
claim_id: string,
name: string,
value: {
public_key: string,
},
},
value: StreamMetadata,
};
declare type GenericClaim = {
address: string, // address associated with tx
amount: number, // bid amount at time of tx
amount: string, // bid amount at time of tx
canonical_url: string, // URL with short id, includes channel with short id
claim_id: string, // unique claim identifier
claim_sequence: number,
claim_sequence: number, // not being used currently
claim_op: 'create' | 'update',
confirmations: number, // This isn't the most stable atm: https://github.com/lbryio/lbry/issues/2000
decoded_claim: boolean, // claim made in accordance with sdk protobuf types
effective_amount: number, // bid amount + supports
timestamp?: number, // date of transaction
has_signature: boolean,
confirmations: number,
decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044
timestamp?: number, // date of last transaction
height: number, // block height the tx was confirmed
hex: string, // `value` hex encoded
name: string,
channel_name?: string,
normalized_name: string, // `name` normalized via unicode NFD spec,
nout: number, // index number for an output of a tx
permanent_url: string, // name + claim_id
supports: Array<{}>, // TODO: add support type once we start using it
short_url: string, // permanent_url with short id, no channel
txid: string, // unique tx id
type: 'claim' | 'update' | 'support',
valid_at_height?: number, // BUG: this should always exist https://github.com/lbryio/lbry/issues/1728
value_type: 'stream' | 'channel',
signing_channel?: ChannelClaim,
meta: {
activation_height: number,
claims_in_channel?: number,
creation_height: number,
creation_timestamp: number,
effective_amount: string,
expiration_height: number,
is_controlling: boolean,
support_amount: string,
trending_global: number,
trending_group: number,
trending_local: number,
trending_mixed: number,
},
};
declare type GenericMetadata = {
@ -59,6 +61,7 @@ declare type GenericMetadata = {
declare type ChannelMetadata = GenericMetadata & {
public_key: string,
public_key_id: string,
cover_url?: string,
email?: string,
website_url?: string,

18
flow-typed/Comment.js vendored Normal file
View file

@ -0,0 +1,18 @@
declare type Comment = {
author: string,
claim_index?: number,
comment_id?: number,
downvotes?: number,
message: string,
omitted?: number,
reply_count?: number,
time_posted?: number,
upvotes?: number,
parent_id?: number,
};
declare type CommentsState = {
byId: {},
isLoading: boolean,
commentsByUri: { [string]: string },
}

19
flow-typed/Lbry.js vendored
View file

@ -66,8 +66,8 @@ declare type VersionResponse = {
declare type ResolveResponse = {
// Keys are the url(s) passed to resolve
[string]:
| { error: {}, certificate: ChannelClaim, claims_in_channel: number }
| { error?: {}, claim: StreamClaim, certificate?: ChannelClaim },
| Claim
| { error?: {} },
};
declare type GetResponse = FileListItem;
@ -86,24 +86,28 @@ declare type GenericTxResponse = {
declare type PublishResponse = GenericTxResponse & {
// Only first value in outputs is a claim
// That's the only value we care about
outputs: Array<StreamClaim>,
outputs: Array<Claim>,
};
declare type ClaimSearchResponse = {
items: Array<StreamClaim>,
items: Array<Claim>,
page: number,
page_size: number,
page_number: number,
total_items: number,
total_pages: number,
};
declare type ClaimListResponse = {
claims: Array<ChannelClaim | StreamClaim>,
claims: Array<ChannelClaim | Claim>,
};
declare type ChannelCreateResponse = GenericTxResponse & {
outputs: Array<ChannelClaim>,
};
declare type CommentCreateResponse = Comment;
declare type CommentListResponse = Array<Comment>;
declare type ChannelListResponse = Array<ChannelClaim>;
declare type FileListResponse = Array<FileListItem>;
@ -183,6 +187,9 @@ declare type LbryTypes = {
blob_delete: (params: {}) => Promise<string>,
blob_list: (params: {}) => Promise<BlobListResponse>,
// Commenting
comment_list: (params: {}) => Promise<CommentListResponse>,
comment_create: (params: {}) => Promise<CommentCreateResponse>,
// Wallet utilities
account_balance: (params: {}) => Promise<string>,
account_decrypt: (prams: {}) => Promise<boolean>,

21
flow-typed/Tags.js vendored Normal file
View file

@ -0,0 +1,21 @@
declare type TagState = {
followedTags: FollowedTags,
knownTags: KnownTags,
};
declare type Tag = {
name: string,
};
declare type KnownTags = {
[string]: Tag,
};
declare type FollowedTags = Array<string>;
declare type TagAction = {
type: string,
data: {
name: string,
},
};

View file

@ -88,6 +88,17 @@ export const SET_CONTENT_POSITION = 'SET_CONTENT_POSITION';
export const SET_CONTENT_LAST_VIEWED = 'SET_CONTENT_LAST_VIEWED';
export const CLEAR_CONTENT_HISTORY_URI = 'CLEAR_CONTENT_HISTORY_URI';
export const CLEAR_CONTENT_HISTORY_ALL = 'CLEAR_CONTENT_HISTORY_ALL';
export const CLAIM_SEARCH_STARTED = 'CLAIM_SEARCH_STARTED';
export const CLAIM_SEARCH_COMPLETED = 'CLAIM_SEARCH_COMPLETED';
export const CLAIM_SEARCH_FAILED = 'CLAIM_SEARCH_FAILED';
// Comments
export const COMMENT_LIST_STARTED = 'COMMENT_LIST_STARTED';
export const COMMENT_LIST_COMPLETED = 'COMMENT_LIST_COMPLETED';
export const COMMENT_LIST_FAILED = 'COMMENT_LIST_FAILED';
export const COMMENT_CREATE_STARTED = 'COMMENT_CREATE_STARTED';
export const COMMENT_CREATE_COMPLETED = 'COMMENT_CREATE_COMPLETED';
export const COMMENT_CREATE_FAILED = 'COMMENT_CREATE_FAILED';
// Files
export const FILE_LIST_STARTED = 'FILE_LIST_STARTED';
@ -177,21 +188,6 @@ export const FETCH_REWARD_CONTENT_COMPLETED = 'FETCH_REWARD_CONTENT_COMPLETED';
export const DOWNLOAD_LANGUAGE_SUCCEEDED = 'DOWNLOAD_LANGUAGE_SUCCEEDED';
export const DOWNLOAD_LANGUAGE_FAILED = 'DOWNLOAD_LANGUAGE_FAILED';
// ShapeShift
export const GET_SUPPORTED_COINS_START = 'GET_SUPPORTED_COINS_START';
export const GET_SUPPORTED_COINS_SUCCESS = 'GET_SUPPORTED_COINS_SUCCESS';
export const GET_SUPPORTED_COINS_FAIL = 'GET_SUPPORTED_COINS_FAIL';
export const GET_COIN_STATS_START = 'GET_COIN_STATS_START';
export const GET_COIN_STATS_SUCCESS = 'GET_COIN_STATS_SUCCESS';
export const GET_COIN_STATS_FAIL = 'GET_COIN_STATS_FAIL';
export const PREPARE_SHAPE_SHIFT_START = 'PREPARE_SHAPE_SHIFT_START';
export const PREPARE_SHAPE_SHIFT_SUCCESS = 'PREPARE_SHAPE_SHIFT_SUCCESS';
export const PREPARE_SHAPE_SHIFT_FAIL = 'PREPARE_SHAPE_SHIFT_FAIL';
export const GET_ACTIVE_SHIFT_START = 'GET_ACTIVE_SHIFT_START';
export const GET_ACTIVE_SHIFT_SUCCESS = 'GET_ACTIVE_SHIFT_SUCCESS';
export const GET_ACTIVE_SHIFT_FAIL = 'GET_ACTIVE_SHIFT_FAIL';
export const CLEAR_SHAPE_SHIFT = 'CLEAR_SHAPE_SHIFT';
// Subscriptions
export const CHANNEL_SUBSCRIBE = 'CHANNEL_SUBSCRIBE';
export const CHANNEL_UNSUBSCRIBE = 'CHANNEL_UNSUBSCRIBE';
@ -229,3 +225,7 @@ export const FETCH_DATE = 'FETCH_DATE';
export const FETCH_COST_INFO_STARTED = 'FETCH_COST_INFO_STARTED';
export const FETCH_COST_INFO_COMPLETED = 'FETCH_COST_INFO_COMPLETED';
export const FETCH_COST_INFO_FAILED = 'FETCH_COST_INFO_FAILED';
// Tags
export const TOGGLE_TAG_FOLLOW = 'TOGGLE_TAG_FOLLOW';
export const TAG_ADD = 'TAG_ADD';
export const TAG_DELETE = 'TAG_DELETE';

View file

@ -49,6 +49,7 @@ export {
doResolveUri,
doFetchChannelListMine,
doCreateChannel,
doClaimSearch,
} from 'redux/actions/claims';
export { doDeletePurchasedUri, doPurchaseUri, doFileGet } from 'redux/actions/file';
@ -101,6 +102,10 @@ export {
doUpdateBlockHeight,
} from 'redux/actions/wallet';
export { doToggleTagFollow, doAddTag, doDeleteTag } from 'redux/actions/tags';
export { doCommentList, doCommentCreate } from 'redux/actions/comments';
// utils
export { batchActions } from 'util/batchActions';
export { parseQueryParams, toQueryString } from 'util/query_params';
@ -109,12 +114,14 @@ export { isClaimNsfw } from 'util/claim';
// reducers
export { claimsReducer } from 'redux/reducers/claims';
export { commentReducer } from 'redux/reducers/comments';
export { contentReducer } from 'redux/reducers/content';
export { fileReducer } from 'redux/reducers/file';
export { fileInfoReducer } from 'redux/reducers/file_info';
export { fileReducer } from 'redux/reducers/file';
export { notificationsReducer } from 'redux/reducers/notifications';
export { searchReducer } from 'redux/reducers/search';
export { publishReducer } from 'redux/reducers/publish';
export { searchReducer } from 'redux/reducers/search';
export { tagsReducerBuilder } from 'redux/reducers/tags';
export { walletReducer } from 'redux/reducers/wallet';
// selectors
@ -142,6 +149,7 @@ export {
makeSelectCoverForUri,
makeSelectTitleForUri,
makeSelectDateForUri,
makeSelectTagsForUri,
makeSelectContentTypeForUri,
makeSelectIsUriResolving,
makeSelectTotalItemsForChannel,
@ -167,6 +175,7 @@ export {
selectPendingClaims,
selectMyClaims,
selectMyClaimsWithoutChannels,
selectMyClaimUrisWithoutChannels,
selectAllMyClaimsByOutpoint,
selectMyClaimsOutpoints,
selectFetchingMyChannels,
@ -175,8 +184,12 @@ export {
selectPlayingUri,
selectChannelClaimCounts,
selectCurrentChannelPage,
selectFetchingClaimSearch,
selectLastClaimSearchUris,
} from 'redux/selectors/claims';
export { makeSelectCommentsForUri } from 'redux/selectors/comments';
export {
makeSelectFileInfoForUri,
makeSelectDownloadingForUri,
@ -192,6 +205,7 @@ export {
selectSearchDownloadUris,
selectFileListDownloadedSort,
selectFileListPublishedSort,
selectDownloadedUris,
} from 'redux/selectors/file_info';
export {
@ -244,3 +258,5 @@ export {
selectWalletUnlockResult,
selectTransactionListFilter,
} from 'redux/selectors/wallet';
export { selectFollowedTags, selectUnfollowedTags } from 'redux/selectors/tags';

View file

@ -100,6 +100,9 @@ const Lbry: LbryTypes = {
sync_hash: (params = {}) => daemonCallWithResult('sync_hash', params),
sync_apply: (params = {}) => daemonCallWithResult('sync_apply', params),
// Comments
comment_list: (params = {}) => daemonCallWithResult('comment_list', params),
comment_create: (params = {}) => daemonCallWithResult('comment_create', params),
// Connect to the sdk
connect: () => {
if (Lbry.connectPromise === null) {

View file

@ -1,7 +1,8 @@
const channelNameMinLength = 1;
const claimIdMaxLength = 40;
export const regexInvalidURI = /[^A-Za-z0-9-]/g;
// see https://spec.lbry.com/#urls
export const regexInvalidURI = (exports.regexInvalidURI = /[=&#:$@%?\u{0000}-\u{0008}\u{000b}-\u{000c}\u{000e}-\u{001F}\u{D800}-\u{DFFF}\u{FFFE}-\u{FFFF}]/gu);
export const regexAddress = /^(b|r)(?=[^0OIl]{32,33})[0-9A-Za-z]{32,33}$/;
/**
@ -200,9 +201,8 @@ export function isURIValid(URI) {
return parts && parts.claimName;
}
export function isNameValid(claimName, checkCase = true) {
const regexp = new RegExp('^[a-z0-9-]+$', checkCase ? '' : 'i');
return regexp.test(claimName);
export function isNameValid(claimName) {
return !regexInvalidURI.test(claimName);
}
export function isURIClaimable(URI) {

View file

@ -7,6 +7,7 @@ import { selectMyClaimsRaw, selectResolvingUris, selectClaimsByUri } from 'redux
import { doFetchTransactions } from 'redux/actions/wallet';
import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
import { creditsToString } from 'util/formatCredits';
import { batchActions } from 'util/batchActions';
export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean = false) {
return (dispatch: Dispatch, getState: GetState) => {
@ -34,8 +35,8 @@ export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean =
const resolveInfo: {
[string]: {
claim: ?StreamClaim,
certificate: ?ChannelClaim,
stream: ?StreamClaim,
channel: ?ChannelClaim,
claimsInChannel: ?number,
},
} = {};
@ -43,20 +44,35 @@ export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean =
Lbry.resolve({ urls: urisToResolve }).then((result: ResolveResponse) => {
Object.entries(result).forEach(([uri, uriResolveInfo]) => {
const fallbackResolveInfo = {
claim: null,
stream: null,
claimsInChannel: null,
certificate: null,
channel: null,
};
// Flow has terrible Object.entries support
// https://github.com/facebook/flow/issues/2221
// $FlowFixMe
if (uriResolveInfo) {
if (uriResolveInfo.error) {
resolveInfo[uri] = { ...fallbackResolveInfo };
} else {
let result = {};
if (uriResolveInfo.value_type === 'channel') {
result.channel = uriResolveInfo;
// $FlowFixMe
const { claim, certificate, claims_in_channel: claimsInChannel } = uriResolveInfo;
resolveInfo[uri] = { claim, certificate, claimsInChannel };
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
} else {
result.stream = uriResolveInfo;
if (uriResolveInfo.signing_channel) {
result.channel = uriResolveInfo.signing_channel;
result.claimsInChannel =
(uriResolveInfo.signing_channel.meta &&
uriResolveInfo.signing_channel.meta.claims_in_channel) ||
0;
}
}
// $FlowFixMe
resolveInfo[uri] = result;
}
}
});
@ -94,7 +110,7 @@ export function doAbandonClaim(txid: string, nout: number) {
return (dispatch: Dispatch, getState: GetState) => {
const state = getState();
const myClaims: Array<ChannelClaim | StreamClaim> = selectMyClaimsRaw(state);
const myClaims: Array<Claim> = selectMyClaimsRaw(state);
const mySupports: { [string]: Support } = selectSupportsByOutpoint(state);
// A user could be trying to abandon a support or one of their claims
@ -182,8 +198,12 @@ export function doFetchClaimsByChannel(uri: string, page: number = 1) {
data: { uri, page },
});
Lbry.claim_search({ channel_name: uri, page: page || 1, winning: true }).then(
(result: ClaimSearchResponse) => {
Lbry.claim_search({
channel: uri,
valid_channel_signature: true,
page: page || 1,
order_by: ['release_time'],
}).then((result: ClaimSearchResponse) => {
const { items: claimsInChannel, page: returnedPage } = result;
dispatch({
@ -194,8 +214,7 @@ export function doFetchClaimsByChannel(uri: string, page: number = 1) {
page: returnedPage || undefined,
},
});
}
);
});
};
}
@ -245,3 +264,37 @@ export function doFetchChannelListMine() {
Lbry.channel_list().then(callback);
};
}
export function doClaimSearch(amount: number = 20, options: {} = {}) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.CLAIM_SEARCH_STARTED,
});
const success = (data: ClaimSearchResponse) => {
const resolveInfo = {};
const uris = [];
data.items.forEach((stream: Claim) => {
resolveInfo[stream.permanent_url] = { stream };
uris.push(stream.permanent_url);
});
dispatch({
type: ACTIONS.CLAIM_SEARCH_COMPLETED,
data: { resolveInfo, uris },
});
};
const failure = err => {
dispatch({
type: ACTIONS.CLAIM_SEARCH_FAILED,
error: err,
});
};
Lbry.claim_search({
page_size: amount,
...options,
}).then(success, failure);
};
}

View file

@ -0,0 +1,80 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry';
import { selectClaimsByUri, selectMyChannelClaims } from 'redux/selectors/claims';
import { doToast } from 'redux/actions/notifications';
export function doCommentList(uri: string) {
return (dispatch: Dispatch, getState: GetState) => {
const state = getState();
const claim = selectClaimsByUri(state)[uri];
const claimId = claim ? claim.claim_id : null;
dispatch({
type: ACTIONS.COMMENT_LIST_STARTED,
});
Lbry.comment_list({
claim_id: claimId,
})
.then((results: CommentListResponse) => {
dispatch({
type: ACTIONS.COMMENT_LIST_COMPLETED,
data: {
comments: results,
claimId: claimId,
uri: uri,
},
});
})
.catch(error => {
console.log(error);
dispatch({
type: ACTIONS.COMMENT_LIST_FAILED,
data: error,
});
});
};
}
export function doCommentCreate(
comment: string = '',
claim_id: string = '',
channel: ?string,
parent_id?: number
) {
return (dispatch: Dispatch, getState: GetState) => {
const state = getState();
dispatch({
type: ACTIONS.COMMENT_CREATE_STARTED,
});
const myChannels = selectMyChannelClaims(state);
const namedChannelClaim = myChannels.find(myChannel => myChannel.name === channel);
const channel_id = namedChannelClaim ? namedChannelClaim.claim_id : null;
return Lbry.comment_create({
comment,
claim_id,
channel_id,
})
.then((result: Comment) => {
dispatch({
type: ACTIONS.COMMENT_CREATE_COMPLETED,
data: {
comment: result,
claimId: claim_id,
},
});
})
.catch(error => {
dispatch({
type: ACTIONS.COMMENT_CREATE_FAILED,
data: error,
});
dispatch(
doToast({
message: 'Oops, someone broke comments.',
isError: true,
})
);
});
};
}

24
src/redux/actions/tags.js Normal file
View file

@ -0,0 +1,24 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry';
export const doToggleTagFollow = (name: string) => ({
type: ACTIONS.TOGGLE_TAG_FOLLOW,
data: {
name,
},
});
export const doAddTag = (name: string) => ({
type: ACTIONS.TAG_ADD,
data: {
name,
},
});
export const doDeleteTag = (name: string) => ({
type: ACTIONS.TAG_DELETE,
data: {
name,
},
});

View file

@ -14,9 +14,9 @@ import { buildURI, parseURI } from 'lbryURI';
type State = {
channelClaimCounts: { [string]: number },
claimsByUri: { [string]: string },
byId: { [string]: StreamClaim | ChannelClaim },
byId: { [string]: Claim },
resolvingUris: Array<string>,
pendingById: { [string]: StreamClaim | ChannelClaim },
pendingById: { [string]: Claim },
myChannelClaims: Set<string>,
abandoningById: { [string]: boolean },
fetchingChannelClaims: { [string]: number },
@ -43,40 +43,43 @@ const defaultState = {
fetchingMyChannels: false,
abandoningById: {},
pendingById: {},
fetchingClaimSearch: false,
lastClaimSearchUris: [],
};
reducers[ACTIONS.RESOLVE_URIS_COMPLETED] = (state: State, action: any): State => {
const { resolveInfo }: { [string]: ClaimWithPossibleCertificate } = action.data;
function handleClaimAction(state: State, action: any): State {
const {
resolveInfo,
}: {
[string]: {
stream: ?StreamClaim,
channel: ?ChannelClaim,
claimsInChannel: ?number,
},
} = action.data;
const byUri = Object.assign({}, state.claimsByUri);
const byId = Object.assign({}, state.byId);
const channelClaimCounts = Object.assign({}, state.channelClaimCounts);
Object.entries(resolveInfo).forEach(
([uri: string, resolveResponse: ClaimWithPossibleCertificate]) => {
Object.entries(resolveInfo).forEach(([uri: string, resolveResponse: Claim]) => {
// $FlowFixMe
if (resolveResponse.certificate && !Number.isNaN(resolveResponse.claimsInChannel)) {
if (resolveResponse.claimsInChannel) {
// $FlowFixMe
channelClaimCounts[uri] = resolveResponse.claimsInChannel;
}
}
);
});
// $FlowFixMe
Object.entries(resolveInfo).forEach(([uri, { certificate, claim }]) => {
if (claim && !certificate) {
byId[claim.claim_id] = claim;
byUri[uri] = claim.claim_id;
} else if (claim && certificate) {
byId[claim.claim_id] = claim;
byUri[uri] = claim.claim_id;
byId[certificate.claim_id] = certificate;
const channelUri = `lbry://${certificate.name}#${certificate.claim_id}`;
byUri[channelUri] = certificate.claim_id;
} else if (!claim && certificate) {
byId[certificate.claim_id] = certificate;
byUri[uri] = certificate.claim_id;
} else {
Object.entries(resolveInfo).forEach(([uri, { channel, stream }]) => {
if (stream) {
byId[stream.claim_id] = stream;
byUri[uri] = stream.claim_id;
}
if (channel) {
byId[channel.claim_id] = channel;
byUri[stream ? channel.permanent_url : uri] = channel.claim_id;
}
if (!stream && !channel) {
byUri[uri] = null;
}
});
@ -87,6 +90,12 @@ reducers[ACTIONS.RESOLVE_URIS_COMPLETED] = (state: State, action: any): State =>
channelClaimCounts,
resolvingUris: (state.resolvingUris || []).filter(uri => !resolveInfo[uri]),
});
}
reducers[ACTIONS.RESOLVE_URIS_COMPLETED] = (state: State, action: any): State => {
return {
...handleClaimAction(state, action),
};
};
reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED] = (state: State): State =>
@ -95,15 +104,12 @@ reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED] = (state: State): State =>
});
reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any): State => {
const { claims }: { claims: Array<StreamClaim | ChannelClaim> } = action.data;
const { claims }: { claims: Array<Claim> } = action.data;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
const pendingById: { [string]: StreamClaim | ChannelClaim } = Object.assign(
{},
state.pendingById
);
const pendingById: { [string]: Claim } = Object.assign({}, state.pendingById);
claims.forEach((claim: StreamClaim | ChannelClaim) => {
claims.forEach((claim: Claim) => {
const uri = buildURI({ claimName: claim.name, claimId: claim.claim_id });
if (claim.type && claim.type.match(/claim|update/)) {
@ -267,6 +273,24 @@ reducers[ACTIONS.RESOLVE_URIS_STARTED] = (state: State, action: any): State => {
});
};
reducers[ACTIONS.CLAIM_SEARCH_STARTED] = (state: State): State => {
return Object.assign({}, state, {
fetchingClaimSearch: true,
});
};
reducers[ACTIONS.CLAIM_SEARCH_COMPLETED] = (state: State, action: any): State => {
return {
...handleClaimAction(state, action),
fetchingClaimSearch: false,
lastClaimSearchUris: action.data.uris,
};
};
reducers[ACTIONS.CLAIM_SEARCH_FAILED] = (state: State): State => {
return Object.assign({}, state, {
fetchingClaimSearch: false,
});
};
export function claimsReducer(state: State = defaultState, action: any) {
const handler = reducers[action.type];
if (handler) return handler(state, action);

View file

@ -0,0 +1,63 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { handleActions } from 'util/redux-utils';
const defaultState: CommentsState = {
byId: {},
commentsByUri: {},
isLoading: false,
};
export const commentReducer = handleActions(
{
[ACTIONS.COMMENT_CREATE_STARTED]: (state: CommentsState, action: any): CommentsState => ({
...state,
isLoading: true,
}),
[ACTIONS.COMMENT_CREATE_FAILED]: (state: CommentsState, action: any) => ({
...state,
isLoading: false,
}),
[ACTIONS.COMMENT_CREATE_COMPLETED]: (state: CommentsState, action: any): CommentsState => {
const { comment, claimId }: any = action.data;
const byId = Object.assign({}, state.byId);
const comments = byId[claimId];
const newComments = comments.slice();
newComments.unshift(comment);
byId[claimId] = newComments;
return {
...state,
byId,
};
},
[ACTIONS.COMMENT_LIST_STARTED]: state => ({ ...state, isLoading: true }),
[ACTIONS.COMMENT_LIST_COMPLETED]: (state: CommentsState, action: any) => {
const { comments, claimId, uri } = action.data;
const byId = Object.assign({}, state.byId);
const commentsByUri = Object.assign({}, state.commentsByUri);
if (comments['items']) {
byId[claimId] = comments['items'];
commentsByUri[uri] = claimId;
}
return {
...state,
byId,
commentsByUri,
isLoading: false,
};
},
[ACTIONS.COMMENT_LIST_FAILED]: (state: CommentsState, action: any) => ({
...state,
isLoading: false,
}),
},
defaultState
);

View file

@ -161,16 +161,6 @@ reducers[ACTIONS.LOADING_VIDEO_FAILED] = (state, action) => {
});
};
reducers[ACTIONS.FETCH_DATE] = (state, action) => {
const { time } = action.data;
if (time) {
return Object.assign({}, state, {
publishedDate: time,
});
}
return null;
};
reducers[ACTIONS.SET_FILE_LIST_SORT] = (state, action) => {
const pageSortStates = {
[PAGES.PUBLISHED]: 'fileListPublishedSort',

View file

@ -0,0 +1,55 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { handleActions } from 'util/redux-utils';
export const tagsReducerBuilder = (defaultState: TagState) =>
handleActions(
{
[ACTIONS.TOGGLE_TAG_FOLLOW]: (state: TagState, action: TagAction): TagState => {
const { followedTags } = state;
const { name } = action.data;
let newFollowedTags = followedTags.slice();
if (newFollowedTags.includes(name)) {
newFollowedTags = newFollowedTags.filter(tag => tag !== name);
} else {
newFollowedTags.push(name);
}
return {
...state,
followedTags: newFollowedTags,
};
},
[ACTIONS.TAG_ADD]: (state: TagState, action: TagAction) => {
const { knownTags } = state;
const { name } = action.data;
let newKnownTags = { ...knownTags };
newKnownTags[name] = { name };
return {
...state,
knownTags: newKnownTags,
};
},
[ACTIONS.TAG_DELETE]: (state: TagState, action: TagAction) => {
const { knownTags, followedTags } = state;
const { name } = action.data;
let newKnownTags = { ...knownTags };
delete newKnownTags[name];
const newFollowedTags = followedTags.filter(tag => tag !== name);
return {
...state,
knownTags: newKnownTags,
followedTags: newFollowedTags,
};
},
},
defaultState
);

View file

@ -190,7 +190,12 @@ export const makeSelectDateForUri = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri),
claim => {
const timestamp = claim && claim.timestamp ? claim.timestamp * 1000 : undefined;
const timestamp =
claim &&
claim.value &&
(claim.value.release_time
? claim.value.release_time * 1000
: claim.meta.creation_timestamp * 1000);
if (!timestamp) {
return undefined;
}
@ -254,6 +259,11 @@ export const selectMyClaimsWithoutChannels = createSelector(
myClaims => myClaims.filter(claim => !claim.name.match(/^@/))
);
export const selectMyClaimUrisWithoutChannels = createSelector(
selectMyClaimsWithoutChannels,
myClaims => myClaims.map(claim => `lbry://${claim.name}#${claim.claim_id}`)
);
export const selectAllMyClaimsByOutpoint = createSelector(
selectMyClaimsRaw,
claims =>
@ -368,7 +378,7 @@ export const makeSelectClaimIsNsfw = (uri: string): boolean =>
// Or possibly come from users settings of what tags they want to hide
// For now, there is just a hard coded list of tags inside `isClaimNsfw`
// selectNaughtyTags(),
(claim: StreamClaim) => {
(claim: Claim) => {
if (!claim) {
return false;
}
@ -428,3 +438,21 @@ export const makeSelectChannelForClaimUri = (uri: string, includePrefix: boolean
return includePrefix ? `lbry://${channel}` : channel;
}
);
export const makeSelectTagsForUri = (uri: string) =>
createSelector(
makeSelectMetadataForUri(uri),
(metadata: ?GenericMetadata) => {
return (metadata && metadata.tags) || [];
}
);
export const selectFetchingClaimSearch = createSelector(
selectState,
state => state.fetchingClaimSearch
);
export const selectLastClaimSearchUris = createSelector(
selectState,
state => state.lastClaimSearchUris
);

View file

@ -0,0 +1,36 @@
// @flow
import { createSelector } from 'reselect';
const selectState = state => state.comments || {};
export const selectCommentsById = createSelector(
selectState,
state => state.byId || {}
);
export const selectCommentsByUri = createSelector(
selectState,
state => {
const byUri = state.commentsByUri || {};
const comments = {};
Object.keys(byUri).forEach(uri => {
const claimId = byUri[uri];
if (claimId === null) {
comments[uri] = null;
} else {
comments[uri] = claimId;
}
});
return comments;
}
);
export const makeSelectCommentsForUri = (uri: string) =>
createSelector(
selectCommentsById,
selectCommentsByUri,
(byId, byUri) => {
const claimId = byUri[uri];
return byId && byId[claimId];
}
);

View file

@ -164,11 +164,13 @@ export const selectSearchDownloadUris = query =>
return;
}
if (title) {
const titleParts = title.toLowerCase().split(' ');
if (arrayContainsQueryPart(titleParts)) {
downloadResultsFromQuery.push(fileInfo);
return;
}
}
if (author) {
const authorParts = author.toLowerCase().split(' ');
@ -226,3 +228,9 @@ export const selectFileListDownloadedSort = createSelector(
selectState,
state => state.fileListDownloadedSort
);
export const selectDownloadedUris = createSelector(
selectFileInfosDownloaded,
// We should use permament_url but it doesn't exist in file_list
info => info.map(claim => `lbry://${claim.claim_name}#${claim.claim_id}`)
);

View file

@ -57,20 +57,21 @@ export const selectSearchSuggestions: Array<SearchSuggestion> = createSelector(
return [];
}
const queryIsPrefix = query === 'lbry:' || query === 'lbry:/' || query === 'lbry://';
const queryIsPrefix =
query === 'lbry:' || query === 'lbry:/' || query === 'lbry://' || query === 'lbry://@';
if (query.startsWith('lbry://') && query !== 'lbry://') {
if (queryIsPrefix) {
// If it is a prefix, wait until something else comes to figure out what to do
return [];
} else if (query.startsWith('lbry://')) {
// If it starts with a prefix, don't show any autocomplete results
// They are probably typing/pasting in a lbry uri
return [
{
value: query,
type: SEARCH_TYPES.FILE,
type: query[7] === '@' ? SEARCH_TYPES.CHANNEL : SEARCH_TYPES.FILE,
},
];
} else if (queryIsPrefix) {
// If it is a prefix, wait until something else comes to figure out what to do
return [];
}
let searchSuggestions = [];

View file

@ -0,0 +1,38 @@
// @flow
import { createSelector } from 'reselect';
const selectState = (state: { tags: TagState }) => state.tags || {};
export const selectKnownTagsByName = createSelector(
selectState,
(state: TagState): KnownTags => state.knownTags
);
export const selectFollowedTagsList = createSelector(
selectState,
(state: TagState): Array<string> => state.followedTags
);
export const selectFollowedTags = createSelector(
selectFollowedTagsList,
(followedTags: Array<string>): Array<Tag> =>
followedTags.map(tag => ({ name: tag })).sort((a, b) => a.name.localeCompare(b.name))
);
export const selectUnfollowedTags = createSelector(
selectKnownTagsByName,
selectFollowedTagsList,
(tagsByName: KnownTags, followedTags: Array<string>): Array<Tag> => {
const followedTagsSet = new Set(followedTags);
let tagsToReturn = [];
Object.keys(tagsByName).forEach(key => {
if (!followedTagsSet.has(key)) {
const { name } = tagsByName[key];
tagsToReturn.push({ name });
}
});
return tagsToReturn;
}
);

View file

@ -5,7 +5,7 @@ const naughtyTags = ['porn', 'nsfw', 'mature', 'xxx'].reduce(
{}
);
export const isClaimNsfw = (claim: StreamClaim): boolean => {
export const isClaimNsfw = (claim: Claim): boolean => {
if (!claim) {
throw new Error('No claim passed to isClaimNsfw()');
}