Compare commits

..

1 commit

Author SHA1 Message Date
jessop
387aa8d0a8 add txo exclude internal transfers constant 2020-04-15 13:37:24 -04:00
83 changed files with 4579 additions and 11226 deletions

View file

@ -1,6 +1,5 @@
[ignore] [ignore]
[include] [include]
[libs] [libs]
@ -13,5 +12,4 @@ module.name_mapper='^redux\(.*\)$' -> '<PROJECT_ROOT>/src/redux\1'
module.name_mapper='^util\(.*\)$' -> '<PROJECT_ROOT>/src/util\1' module.name_mapper='^util\(.*\)$' -> '<PROJECT_ROOT>/src/util\1'
module.name_mapper='^constants\(.*\)$' -> '<PROJECT_ROOT>/src/constants\1' module.name_mapper='^constants\(.*\)$' -> '<PROJECT_ROOT>/src/constants\1'
module.name_mapper='^lbry\(.*\)$' -> '<PROJECT_ROOT>/src/lbry\1' module.name_mapper='^lbry\(.*\)$' -> '<PROJECT_ROOT>/src/lbry\1'
module.name_mapper='^lbry-first\(.*\)$' -> '<PROJECT_ROOT>/src/lbry-first\1'
module.name_mapper='^lbryURI\(.*\)$' -> '<PROJECT_ROOT>/src/lbryURI\1' module.name_mapper='^lbryURI\(.*\)$' -> '<PROJECT_ROOT>/src/lbryURI\1'

View file

@ -1,6 +1,6 @@
The MIT License (MIT) The MIT License (MIT)
Copyright (c) 2017-2021 LBRY Inc Copyright (c) 2017-2020 LBRY Inc
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish,

View file

@ -20,9 +20,6 @@ yarn link lbry-redux
### Build ### Build
Run `$ yarn build`. If the symlink does not work, just build the file and move the `bundle.js` file into the `node_modules/` folder. Run `$ yarn build`. If the symlink does not work, just build the file and move the `bundle.js` file into the `node_modules/` folder.
### Tests
Run `$ yarn test`.
## Contributing ## Contributing
We :heart: contributions from everyone! We welcome [bug reports](https://github.com/lbryio/lbry-redux/issues/), [bug fixes](https://github.com/lbryio/lbry-redux/pulls) and feedback on the module is always appreciated. We :heart: contributions from everyone! We welcome [bug reports](https://github.com/lbryio/lbry-redux/issues/), [bug fixes](https://github.com/lbryio/lbry-redux/pulls) and feedback on the module is always appreciated.

5275
dist/bundle.es.js vendored

File diff suppressed because one or more lines are too long

View file

@ -1,16 +1,14 @@
// @flow // @flow
declare type Claim = StreamClaim | ChannelClaim | CollectionClaim; declare type Claim = StreamClaim | ChannelClaim;
declare type ChannelClaim = GenericClaim & { declare type ChannelClaim = GenericClaim & {
is_channel_signature_valid?: boolean, // we may have signed channels in the future
value: ChannelMetadata, value: ChannelMetadata,
}; };
declare type CollectionClaim = GenericClaim & {
value: CollectionMetadata,
};
declare type StreamClaim = GenericClaim & { declare type StreamClaim = GenericClaim & {
is_channel_signature_valid?: boolean,
value: StreamMetadata, value: StreamMetadata,
}; };
@ -25,8 +23,7 @@ declare type GenericClaim = {
decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044 decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044
timestamp?: number, // date of last transaction timestamp?: number, // date of last transaction
height: number, // block height the tx was confirmed height: number, // block height the tx was confirmed
is_channel_signature_valid?: boolean, is_mine: boolean,
is_my_output: boolean,
name: string, name: string,
normalized_name: string, // `name` normalized via unicode NFD spec, normalized_name: string, // `name` normalized via unicode NFD spec,
nout: number, // index number for an output of a tx nout: number, // index number for an output of a tx
@ -34,13 +31,9 @@ declare type GenericClaim = {
short_url: string, // permanent_url with short id, no channel short_url: string, // permanent_url with short id, no channel
txid: string, // unique tx id txid: string, // unique tx id
type: 'claim' | 'update' | 'support', type: 'claim' | 'update' | 'support',
value_type: 'stream' | 'channel' | 'collection', value_type: 'stream' | 'channel',
signing_channel?: ChannelClaim, signing_channel?: ChannelClaim,
reposted_claim?: GenericClaim,
repost_channel_url?: string, repost_channel_url?: string,
repost_url?: string,
repost_bid_amount?: string,
purchase_receipt?: PurchaseReceipt,
meta: { meta: {
activation_height: number, activation_height: number,
claims_in_channel?: number, claims_in_channel?: number,
@ -78,10 +71,6 @@ declare type ChannelMetadata = GenericMetadata & {
featured?: Array<string>, featured?: Array<string>,
}; };
declare type CollectionMetadata = GenericMetadata & {
claims: Array<string>,
}
declare type StreamMetadata = GenericMetadata & { declare type StreamMetadata = GenericMetadata & {
license?: string, // License "title" ex: Creative Commons, Custom copyright license?: string, // License "title" ex: Creative Commons, Custom copyright
license_url?: string, // Link to full license license_url?: string, // Link to full license
@ -132,83 +121,3 @@ declare type Fee = {
currency: string, currency: string,
address: string, address: string,
}; };
declare type PurchaseReceipt = {
address: string,
amount: string,
claim_id: string,
confirmations: number,
height: number,
nout: number,
timestamp: number,
txid: string,
type: 'purchase',
};
declare type ClaimActionResolveInfo = {
[string]: {
stream: ?StreamClaim,
channel: ?ChannelClaim,
claimsInChannel: ?number,
collection: ?CollectionClaim,
},
}
declare type ChannelUpdateParams = {
claim_id: string,
bid?: string,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
replace?: boolean,
languages?: Array<string>,
locations?: Array<string>,
blocking?: boolean,
}
declare type ChannelPublishParams = {
name: string,
bid: string,
blocking?: true,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
languages?: Array<string>,
}
declare type CollectionUpdateParams = {
claim_id: string,
claim_ids?: Array<string>,
bid?: string,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
replace?: boolean,
languages?: Array<string>,
locations?: Array<string>,
blocking?: boolean,
}
declare type CollectionPublishParams = {
name: string,
bid: string,
claim_ids: Array<string>,
blocking?: true,
title?: string,
thumbnail_url?: string,
description?: string,
tags?: Array<string>,
languages?: Array<string>,
}

View file

@ -1,29 +0,0 @@
declare type CoinSwapInfo = {
chargeCode: string,
coins: Array<string>,
sendAddresses: { [string]: string},
sendAmounts: { [string]: any },
lbcAmount: number,
status?: {
status: string,
receiptCurrency: string,
receiptTxid: string,
lbcTxid: string,
},
}
declare type CoinSwapState = {
coinSwaps: Array<CoinSwapInfo>,
};
declare type CoinSwapAddAction = {
type: string,
data: CoinSwapInfo,
};
declare type CoinSwapRemoveAction = {
type: string,
data: {
chargeCode: string,
},
};

View file

@ -1,34 +0,0 @@
declare type Collection = {
id: string,
items: Array<?string>,
name: string,
type: string,
updatedAt: number,
totalItems?: number,
sourceId?: string, // if copied, claimId of original collection
};
declare type CollectionState = {
unpublished: CollectionGroup,
resolved: CollectionGroup,
pending: CollectionGroup,
edited: CollectionGroup,
builtin: CollectionGroup,
saved: Array<string>,
isResolvingCollectionById: { [string]: boolean },
error?: string | null,
};
declare type CollectionGroup = {
[string]: Collection,
}
declare type CollectionEditParams = {
claims?: Array<Claim>,
remove?: boolean,
claimIds?: Array<string>,
replace?: boolean,
order?: { from: number, to: number },
type?: string,
name?: string,
}

23
dist/flow-typed/Comment.js vendored Normal file
View file

@ -0,0 +1,23 @@
declare type Comment = {
comment: string, // comment body
comment_id: string, // sha256 digest
claim_id: string, // id linking to the claim this comment
timestamp: number, // integer representing unix-time
is_hidden: boolean, // claim owner may enable/disable this
channel_id?: string, // claimId of channel signing this comment
channel_name?: string, // name of channel claim
channel_url?: string, // full lbry url to signing channel
signature?: string, // signature of comment by originating channel
signing_ts?: string, // timestamp used when signing this comment
is_channel_signature_valid?: boolean, // whether or not the signature could be validated
parent_id?: number, // comment_id of comment this is in reply to
};
// todo: relate individual comments to their commentId
declare type CommentsState = {
commentsByUri: { [string]: string },
byId: { [string]: Array<string> },
commentById: { [string]: Comment },
isLoading: boolean,
myComments: ?Set<string>,
};

View file

@ -11,8 +11,6 @@ declare type FileListItem = {
claim_id: string, claim_id: string,
claim_name: string, claim_name: string,
completed: false, completed: false,
content_fee?: { txid: string },
purchase_receipt?: { txid: string, amount: string },
download_directory: string, download_directory: string,
download_path: string, download_path: string,
file_name: string, file_name: string,
@ -22,7 +20,6 @@ declare type FileListItem = {
outpoint: string, outpoint: string,
points_paid: number, points_paid: number,
protobuf: string, protobuf: string,
reflector_progress: number,
sd_hash: string, sd_hash: string,
status: string, status: string,
stopped: false, stopped: false,
@ -32,12 +29,10 @@ declare type FileListItem = {
suggested_file_name: string, suggested_file_name: string,
total_bytes: number, total_bytes: number,
total_bytes_lower_bound: number, total_bytes_lower_bound: number,
is_fully_reflected: boolean,
// TODO: sdk plans to change `tx` // TODO: sdk plans to change `tx`
// It isn't currently used by the apps // It isn't currently used by the apps
tx: {}, tx: {},
txid: string, txid: string,
uploading_to_reflector: boolean,
written_bytes: number, written_bytes: number,
}; };
@ -71,7 +66,7 @@ declare type PurchaseUriStarted = {
}; };
declare type DeletePurchasedUri = { declare type DeletePurchasedUri = {
type: ACTIONS.CLEAR_PURCHASED_URI_SUCCESS, type: ACTIONS.DELETE_PURCHASED_URI,
data: { data: {
uri: string, uri: string,
}, },

View file

@ -7,6 +7,10 @@ declare type StatusResponse = {
download_progress: number, download_progress: number,
downloading_headers: boolean, downloading_headers: boolean,
}, },
connection_status: {
code: string,
message: string,
},
dht: { dht: {
node_id: string, node_id: string,
peers_in_routing_table: number, peers_in_routing_table: number,
@ -41,7 +45,6 @@ declare type StatusResponse = {
redirects: {}, redirects: {},
}, },
wallet: ?{ wallet: ?{
connected: string,
best_blockhash: string, best_blockhash: string,
blocks: number, blocks: number,
blocks_behind: number, blocks_behind: number,
@ -75,7 +78,7 @@ declare type BalanceResponse = {
declare type ResolveResponse = { declare type ResolveResponse = {
// Keys are the url(s) passed to resolve // Keys are the url(s) passed to resolve
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, collection?: CollectionClaim, claimsInChannel?: number }, [string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, claimsInChannel?: number },
}; };
declare type GetResponse = FileListItem & { error?: string }; declare type GetResponse = FileListItem & { error?: string };
@ -124,22 +127,12 @@ declare type ChannelUpdateResponse = GenericTxResponse & {
declare type CommentCreateResponse = Comment; declare type CommentCreateResponse = Comment;
declare type CommentUpdateResponse = Comment; declare type CommentUpdateResponse = Comment;
declare type MyReactions = { declare type CommentListResponse = {
// Keys are the commentId items: Array<Comment>,
[string]: Array<string>, page: number,
}; page_size: number,
total_items: number,
declare type OthersReactions = { total_pages: number,
// Keys are the commentId
[string]: {
// Keys are the reaction_type, e.g. 'like'
[string]: number,
},
};
declare type CommentReactListResponse = {
my_reactions: Array<MyReactions>,
others_reactions: Array<OthersReactions>,
}; };
declare type CommentHideResponse = { declare type CommentHideResponse = {
@ -147,11 +140,6 @@ declare type CommentHideResponse = {
[string]: { hidden: boolean }, [string]: { hidden: boolean },
}; };
declare type CommentPinResponse = {
// keyed by the CommentIds entered
items: Comment,
};
declare type CommentAbandonResponse = { declare type CommentAbandonResponse = {
// keyed by the CommentId given // keyed by the CommentId given
abandoned: boolean, abandoned: boolean,
@ -165,42 +153,6 @@ declare type ChannelListResponse = {
total_pages: number, total_pages: number,
}; };
declare type ChannelSignResponse = {
signature: string,
signing_ts: string,
};
declare type CollectionCreateResponse = {
outputs: Array<Claim>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
}
declare type CollectionListResponse = {
items: Array<Claim>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
};
declare type CollectionResolveResponse = {
items: Array<Claim>,
total_items: number,
};
declare type CollectionResolveOptions = {
claim_id: string,
};
declare type CollectionListOptions = {
page: number,
page_size: number,
resolve?: boolean,
};
declare type FileListResponse = { declare type FileListResponse = {
items: Array<FileListItem>, items: Array<FileListItem>,
page: number, page: number,
@ -257,27 +209,11 @@ declare type StreamRepostOptions = {
name: string, name: string,
bid: string, bid: string,
claim_id: string, claim_id: string,
channel_id?: string, channel_id: string,
}; };
declare type StreamRepostResponse = GenericTxResponse; declare type StreamRepostResponse = GenericTxResponse;
declare type PurchaseListResponse = {
items: Array<PurchaseReceipt & { claim: StreamClaim }>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
};
declare type PurchaseListOptions = {
page: number,
page_size: number,
resolve: boolean,
claim_id?: string,
channel_id?: string,
};
// //
// Types used in the generic Lbry object that is exported // Types used in the generic Lbry object that is exported
// //
@ -310,7 +246,6 @@ declare type LbryTypes = {
channel_update: (params: {}) => Promise<ChannelUpdateResponse>, channel_update: (params: {}) => Promise<ChannelUpdateResponse>,
channel_import: (params: {}) => Promise<string>, channel_import: (params: {}) => Promise<string>,
channel_list: (params: {}) => Promise<ChannelListResponse>, channel_list: (params: {}) => Promise<ChannelListResponse>,
channel_sign: (params: {}) => Promise<ChannelSignResponse>,
stream_abandon: (params: {}) => Promise<GenericTxResponse>, stream_abandon: (params: {}) => Promise<GenericTxResponse>,
stream_list: (params: {}) => Promise<StreamListResponse>, stream_list: (params: {}) => Promise<StreamListResponse>,
channel_abandon: (params: {}) => Promise<GenericTxResponse>, channel_abandon: (params: {}) => Promise<GenericTxResponse>,
@ -318,11 +253,6 @@ declare type LbryTypes = {
support_list: (params: {}) => Promise<SupportListResponse>, support_list: (params: {}) => Promise<SupportListResponse>,
support_abandon: (params: {}) => Promise<SupportAbandonResponse>, support_abandon: (params: {}) => Promise<SupportAbandonResponse>,
stream_repost: (params: StreamRepostOptions) => Promise<StreamRepostResponse>, stream_repost: (params: StreamRepostOptions) => Promise<StreamRepostResponse>,
purchase_list: (params: PurchaseListOptions) => Promise<PurchaseListResponse>,
collection_resolve: (params: CollectionResolveOptions) => Promise<CollectionResolveResponse>,
collection_list: (params: CollectionListOptions) => Promise<CollectionListResponse>,
collection_create: (params: {}) => Promise<CollectionCreateResponse>,
collection_update: (params: {}) => Promise<CollectionCreateResponse>,
// File fetching and manipulation // File fetching and manipulation
file_list: (params: {}) => Promise<FileListResponse>, file_list: (params: {}) => Promise<FileListResponse>,
@ -335,6 +265,8 @@ declare type LbryTypes = {
preference_set: (params: {}) => Promise<any>, preference_set: (params: {}) => Promise<any>,
// Commenting // Commenting
comment_list: (params: {}) => Promise<CommentListResponse>,
comment_create: (params: {}) => Promise<CommentCreateResponse>,
comment_update: (params: {}) => Promise<CommentUpdateResponse>, comment_update: (params: {}) => Promise<CommentUpdateResponse>,
comment_hide: (params: {}) => Promise<CommentHideResponse>, comment_hide: (params: {}) => Promise<CommentHideResponse>,
comment_abandon: (params: {}) => Promise<CommentAbandonResponse>, comment_abandon: (params: {}) => Promise<CommentAbandonResponse>,
@ -351,7 +283,6 @@ declare type LbryTypes = {
address_unused: (params: {}) => Promise<string>, // New address address_unused: (params: {}) => Promise<string>, // New address
address_list: (params: {}) => Promise<string>, address_list: (params: {}) => Promise<string>,
transaction_list: (params: {}) => Promise<TxListResponse>, transaction_list: (params: {}) => Promise<TxListResponse>,
txo_list: (params: {}) => Promise<any>,
// Sync // Sync
sync_hash: (params: {}) => Promise<string>, sync_hash: (params: {}) => Promise<string>,

View file

@ -1,99 +0,0 @@
// @flow
declare type LbryFirstStatusResponse = {
Version: string,
Message: string,
Running: boolean,
Commit: string,
};
declare type LbryFirstVersionResponse = {
build: string,
lbrynet_version: string,
os_release: string,
os_system: string,
platform: string,
processor: string,
python_version: string,
};
/* SAMPLE UPLOAD RESPONSE (FULL)
"Video": {
"etag": "\"Dn5xIderbhAnUk5TAW0qkFFir0M/xlGLrlTox7VFTRcR8F77RbKtaU4\"",
"id": "8InjtdvVmwE",
"kind": "youtube#video",
"snippet": {
"categoryId": "22",
"channelId": "UCXiVsGTU88fJjheB2rqF0rA",
"channelTitle": "Mark Beamer",
"liveBroadcastContent": "none",
"localized": {
"title": "my title"
},
"publishedAt": "2020-05-05T04:17:53.000Z",
"thumbnails": {
"default": {
"height": 90,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/default.jpg?sqp=CMTQw_UF&rs=AOn4CLB6dlhZMSMrazDlWRsitPgCsn8fVw",
"width": 120
},
"high": {
"height": 360,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/hqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLB-Je_7l6qvASRAR_bSGWZHaXaJWQ",
"width": 480
},
"medium": {
"height": 180,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/mqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLCvSnDLqVznRNMKuvJ_0misY_chPQ",
"width": 320
}
},
"title": "my title"
},
"status": {
"embeddable": true,
"license": "youtube",
"privacyStatus": "private",
"publicStatsViewable": true,
"uploadStatus": "uploaded"
}
}
*/
declare type UploadResponse = {
Video: {
id: string,
snippet: {
channelId: string,
},
status: {
uploadStatus: string,
},
},
};
declare type HasYTAuthResponse = {
HashAuth: boolean,
};
declare type YTSignupResponse = {};
//
// Types used in the generic LbryFirst object that is exported
//
declare type LbryFirstTypes = {
isConnected: boolean,
connectPromise: ?Promise<any>,
connect: () => void,
lbryFirstConnectionString: string,
apiRequestHeaders: { [key: string]: string },
setApiHeader: (string, string) => void,
unsetApiHeader: string => void,
overrides: { [string]: ?Function },
setOverride: (string, Function) => void,
// LbryFirst Methods
stop: () => Promise<string>,
status: () => Promise<StatusResponse>,
version: () => Promise<VersionResponse>,
upload: any => Promise<?UploadResponse>,
hasYTAuth: string => Promise<HasYTAuthResponse>,
ytSignup: () => Promise<YTSignupResponse>,
};

View file

@ -1,5 +0,0 @@
declare type ReflectingUpdate = {
fileListItem: FileListItem,
progress: number | boolean,
stalled: boolean,
};

84
dist/flow-typed/Search.js vendored Normal file
View file

@ -0,0 +1,84 @@
// @flow
import * as ACTIONS from 'constants/action_types';
declare type SearchSuggestion = {
value: string,
shorthand: string,
type: string,
};
declare type SearchOptions = {
// :(
// https://github.com/facebook/flow/issues/6492
RESULT_COUNT: number,
CLAIM_TYPE: string,
INCLUDE_FILES: string,
INCLUDE_CHANNELS: string,
INCLUDE_FILES_AND_CHANNELS: string,
MEDIA_AUDIO: string,
MEDIA_VIDEO: string,
MEDIA_TEXT: string,
MEDIA_IMAGE: string,
MEDIA_APPLICATION: string,
};
declare type SearchState = {
isActive: boolean,
searchQuery: string,
options: SearchOptions,
suggestions: { [string]: Array<SearchSuggestion> },
urisByQuery: {},
resolvedResultsByQuery: {},
resolvedResultsByQueryLastPageReached: {},
};
declare type SearchSuccess = {
type: ACTIONS.SEARCH_SUCCESS,
data: {
query: string,
uris: Array<string>,
},
};
declare type UpdateSearchQuery = {
type: ACTIONS.UPDATE_SEARCH_QUERY,
data: {
query: string,
},
};
declare type UpdateSearchSuggestions = {
type: ACTIONS.UPDATE_SEARCH_SUGGESTIONS,
data: {
query: string,
suggestions: Array<SearchSuggestion>,
},
};
declare type UpdateSearchOptions = {
type: ACTIONS.UPDATE_SEARCH_OPTIONS,
data: SearchOptions,
};
declare type ResolvedSearchResult = {
channel: string,
channel_claim_id: string,
claimId: string,
duration: number,
fee: number,
name: string,
nsfw: boolean,
release_time: string,
thumbnail_url: string,
title: string,
};
declare type ResolvedSearchSuccess = {
type: ACTIONS.RESOLVED_SEARCH_SUCCESS,
data: {
append: boolean,
pageSize: number,
results: Array<ResolvedSearchResult>,
query: string,
},
};

View file

@ -10,9 +10,6 @@ declare type Txo = {
is_my_output: boolean, is_my_output: boolean,
is_my_input: boolean, is_my_input: boolean,
is_spent: boolean, is_spent: boolean,
signing_channel?: {
channel_id: string,
},
}; };
declare type TxoListParams = { declare type TxoListParams = {
@ -24,4 +21,4 @@ declare type TxoListParams = {
is_not_my_input?: boolean, is_not_my_input?: boolean,
is_not_my_output?: boolean, is_not_my_output?: boolean,
is_spent?: boolean, is_spent?: boolean,
}; }

View file

@ -12,7 +12,6 @@ declare type LbryUrlObj = {
secondaryClaimSequence?: number, secondaryClaimSequence?: number,
primaryBidPosition?: number, primaryBidPosition?: number,
secondaryBidPosition?: number, secondaryBidPosition?: number,
startTime?: number,
// Below are considered deprecated and should not be used due to unreliableness with claim.canonical_url // Below are considered deprecated and should not be used due to unreliableness with claim.canonical_url
claimName?: string, claimName?: string,

View file

@ -1,5 +0,0 @@
// @flow
declare module '@ungap/from-entries' {
declare module.exports: any;
}

View file

@ -1,5 +0,0 @@
// @flow
declare module 'uuid' {
declare module.exports: any;
}

102
dist/flow-typed/npm/uuid_v3.x.x.js vendored Normal file
View file

@ -0,0 +1,102 @@
// flow-typed signature: 3cf668e64747095cab0bb360cf2fb34f
// flow-typed version: d659bd0cb8/uuid_v3.x.x/flow_>=v0.32.x
declare module "uuid" {
declare class uuid {
static (
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string,
static v1(
options?: {|
node?: number[],
clockseq?: number,
msecs?: number | Date,
nsecs?: number
|},
buffer?: number[] | Buffer,
offset?: number
): string,
static v4(
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<uuid>;
}
declare module "uuid/v1" {
declare class v1 {
static (
options?: {|
node?: number[],
clockseq?: number,
msecs?: number | Date,
nsecs?: number
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<v1>;
}
declare module "uuid/v3" {
declare class v3 {
static (
name?: string | number[],
namespace?: string | number[],
buffer?: number[] | Buffer,
offset?: number
): string,
static name: string,
static DNS: string,
static URL: string
}
declare module.exports: Class<v3>;
}
declare module "uuid/v4" {
declare class v4 {
static (
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<v4>;
}
declare module "uuid/v5" {
declare class v5 {
static (
name?: string | number[],
namespace?: string | number[],
buffer?: number[] | Buffer,
offset?: number
): string,
static name: string,
static DNS: string,
static URL: string
}
declare module.exports: Class<v5>;
}

101
flow-typed/Claim.js vendored
View file

@ -1,16 +1,14 @@
// @flow // @flow
declare type Claim = StreamClaim | ChannelClaim | CollectionClaim; declare type Claim = StreamClaim | ChannelClaim;
declare type ChannelClaim = GenericClaim & { declare type ChannelClaim = GenericClaim & {
is_channel_signature_valid?: boolean, // we may have signed channels in the future
value: ChannelMetadata, value: ChannelMetadata,
}; };
declare type CollectionClaim = GenericClaim & {
value: CollectionMetadata,
};
declare type StreamClaim = GenericClaim & { declare type StreamClaim = GenericClaim & {
is_channel_signature_valid?: boolean,
value: StreamMetadata, value: StreamMetadata,
}; };
@ -25,8 +23,7 @@ declare type GenericClaim = {
decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044 decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044
timestamp?: number, // date of last transaction timestamp?: number, // date of last transaction
height: number, // block height the tx was confirmed height: number, // block height the tx was confirmed
is_channel_signature_valid?: boolean, is_mine: boolean,
is_my_output: boolean,
name: string, name: string,
normalized_name: string, // `name` normalized via unicode NFD spec, normalized_name: string, // `name` normalized via unicode NFD spec,
nout: number, // index number for an output of a tx nout: number, // index number for an output of a tx
@ -34,13 +31,9 @@ declare type GenericClaim = {
short_url: string, // permanent_url with short id, no channel short_url: string, // permanent_url with short id, no channel
txid: string, // unique tx id txid: string, // unique tx id
type: 'claim' | 'update' | 'support', type: 'claim' | 'update' | 'support',
value_type: 'stream' | 'channel' | 'collection', value_type: 'stream' | 'channel',
signing_channel?: ChannelClaim, signing_channel?: ChannelClaim,
reposted_claim?: GenericClaim,
repost_channel_url?: string, repost_channel_url?: string,
repost_url?: string,
repost_bid_amount?: string,
purchase_receipt?: PurchaseReceipt,
meta: { meta: {
activation_height: number, activation_height: number,
claims_in_channel?: number, claims_in_channel?: number,
@ -78,10 +71,6 @@ declare type ChannelMetadata = GenericMetadata & {
featured?: Array<string>, featured?: Array<string>,
}; };
declare type CollectionMetadata = GenericMetadata & {
claims: Array<string>,
}
declare type StreamMetadata = GenericMetadata & { declare type StreamMetadata = GenericMetadata & {
license?: string, // License "title" ex: Creative Commons, Custom copyright license?: string, // License "title" ex: Creative Commons, Custom copyright
license_url?: string, // Link to full license license_url?: string, // Link to full license
@ -132,83 +121,3 @@ declare type Fee = {
currency: string, currency: string,
address: string, address: string,
}; };
declare type PurchaseReceipt = {
address: string,
amount: string,
claim_id: string,
confirmations: number,
height: number,
nout: number,
timestamp: number,
txid: string,
type: 'purchase',
};
declare type ClaimActionResolveInfo = {
[string]: {
stream: ?StreamClaim,
channel: ?ChannelClaim,
claimsInChannel: ?number,
collection: ?CollectionClaim,
},
}
declare type ChannelUpdateParams = {
claim_id: string,
bid?: string,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
replace?: boolean,
languages?: Array<string>,
locations?: Array<string>,
blocking?: boolean,
}
declare type ChannelPublishParams = {
name: string,
bid: string,
blocking?: true,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
languages?: Array<string>,
}
declare type CollectionUpdateParams = {
claim_id: string,
claim_ids?: Array<string>,
bid?: string,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
replace?: boolean,
languages?: Array<string>,
locations?: Array<string>,
blocking?: boolean,
}
declare type CollectionPublishParams = {
name: string,
bid: string,
claim_ids: Array<string>,
blocking?: true,
title?: string,
thumbnail_url?: string,
description?: string,
tags?: Array<string>,
languages?: Array<string>,
}

View file

@ -1,29 +0,0 @@
declare type CoinSwapInfo = {
chargeCode: string,
coins: Array<string>,
sendAddresses: { [string]: string},
sendAmounts: { [string]: any },
lbcAmount: number,
status?: {
status: string,
receiptCurrency: string,
receiptTxid: string,
lbcTxid: string,
},
}
declare type CoinSwapState = {
coinSwaps: Array<CoinSwapInfo>,
};
declare type CoinSwapAddAction = {
type: string,
data: CoinSwapInfo,
};
declare type CoinSwapRemoveAction = {
type: string,
data: {
chargeCode: string,
},
};

View file

@ -1,34 +0,0 @@
declare type Collection = {
id: string,
items: Array<?string>,
name: string,
type: string,
updatedAt: number,
totalItems?: number,
sourceId?: string, // if copied, claimId of original collection
};
declare type CollectionState = {
unpublished: CollectionGroup,
resolved: CollectionGroup,
pending: CollectionGroup,
edited: CollectionGroup,
builtin: CollectionGroup,
saved: Array<string>,
isResolvingCollectionById: { [string]: boolean },
error?: string | null,
};
declare type CollectionGroup = {
[string]: Collection,
}
declare type CollectionEditParams = {
claims?: Array<Claim>,
remove?: boolean,
claimIds?: Array<string>,
replace?: boolean,
order?: { from: number, to: number },
type?: string,
name?: string,
}

23
flow-typed/Comment.js vendored Normal file
View file

@ -0,0 +1,23 @@
declare type Comment = {
comment: string, // comment body
comment_id: string, // sha256 digest
claim_id: string, // id linking to the claim this comment
timestamp: number, // integer representing unix-time
is_hidden: boolean, // claim owner may enable/disable this
channel_id?: string, // claimId of channel signing this comment
channel_name?: string, // name of channel claim
channel_url?: string, // full lbry url to signing channel
signature?: string, // signature of comment by originating channel
signing_ts?: string, // timestamp used when signing this comment
is_channel_signature_valid?: boolean, // whether or not the signature could be validated
parent_id?: number, // comment_id of comment this is in reply to
};
// todo: relate individual comments to their commentId
declare type CommentsState = {
commentsByUri: { [string]: string },
byId: { [string]: Array<string> },
commentById: { [string]: Comment },
isLoading: boolean,
myComments: ?Set<string>,
};

7
flow-typed/File.js vendored
View file

@ -11,8 +11,6 @@ declare type FileListItem = {
claim_id: string, claim_id: string,
claim_name: string, claim_name: string,
completed: false, completed: false,
content_fee?: { txid: string },
purchase_receipt?: { txid: string, amount: string },
download_directory: string, download_directory: string,
download_path: string, download_path: string,
file_name: string, file_name: string,
@ -22,7 +20,6 @@ declare type FileListItem = {
outpoint: string, outpoint: string,
points_paid: number, points_paid: number,
protobuf: string, protobuf: string,
reflector_progress: number,
sd_hash: string, sd_hash: string,
status: string, status: string,
stopped: false, stopped: false,
@ -32,12 +29,10 @@ declare type FileListItem = {
suggested_file_name: string, suggested_file_name: string,
total_bytes: number, total_bytes: number,
total_bytes_lower_bound: number, total_bytes_lower_bound: number,
is_fully_reflected: boolean,
// TODO: sdk plans to change `tx` // TODO: sdk plans to change `tx`
// It isn't currently used by the apps // It isn't currently used by the apps
tx: {}, tx: {},
txid: string, txid: string,
uploading_to_reflector: boolean,
written_bytes: number, written_bytes: number,
}; };
@ -71,7 +66,7 @@ declare type PurchaseUriStarted = {
}; };
declare type DeletePurchasedUri = { declare type DeletePurchasedUri = {
type: ACTIONS.CLEAR_PURCHASED_URI_SUCCESS, type: ACTIONS.DELETE_PURCHASED_URI,
data: { data: {
uri: string, uri: string,
}, },

97
flow-typed/Lbry.js vendored
View file

@ -7,6 +7,10 @@ declare type StatusResponse = {
download_progress: number, download_progress: number,
downloading_headers: boolean, downloading_headers: boolean,
}, },
connection_status: {
code: string,
message: string,
},
dht: { dht: {
node_id: string, node_id: string,
peers_in_routing_table: number, peers_in_routing_table: number,
@ -41,7 +45,6 @@ declare type StatusResponse = {
redirects: {}, redirects: {},
}, },
wallet: ?{ wallet: ?{
connected: string,
best_blockhash: string, best_blockhash: string,
blocks: number, blocks: number,
blocks_behind: number, blocks_behind: number,
@ -75,7 +78,7 @@ declare type BalanceResponse = {
declare type ResolveResponse = { declare type ResolveResponse = {
// Keys are the url(s) passed to resolve // Keys are the url(s) passed to resolve
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, collection?: CollectionClaim, claimsInChannel?: number }, [string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, claimsInChannel?: number },
}; };
declare type GetResponse = FileListItem & { error?: string }; declare type GetResponse = FileListItem & { error?: string };
@ -124,22 +127,12 @@ declare type ChannelUpdateResponse = GenericTxResponse & {
declare type CommentCreateResponse = Comment; declare type CommentCreateResponse = Comment;
declare type CommentUpdateResponse = Comment; declare type CommentUpdateResponse = Comment;
declare type MyReactions = { declare type CommentListResponse = {
// Keys are the commentId items: Array<Comment>,
[string]: Array<string>, page: number,
}; page_size: number,
total_items: number,
declare type OthersReactions = { total_pages: number,
// Keys are the commentId
[string]: {
// Keys are the reaction_type, e.g. 'like'
[string]: number,
},
};
declare type CommentReactListResponse = {
my_reactions: Array<MyReactions>,
others_reactions: Array<OthersReactions>,
}; };
declare type CommentHideResponse = { declare type CommentHideResponse = {
@ -147,11 +140,6 @@ declare type CommentHideResponse = {
[string]: { hidden: boolean }, [string]: { hidden: boolean },
}; };
declare type CommentPinResponse = {
// keyed by the CommentIds entered
items: Comment,
};
declare type CommentAbandonResponse = { declare type CommentAbandonResponse = {
// keyed by the CommentId given // keyed by the CommentId given
abandoned: boolean, abandoned: boolean,
@ -165,42 +153,6 @@ declare type ChannelListResponse = {
total_pages: number, total_pages: number,
}; };
declare type ChannelSignResponse = {
signature: string,
signing_ts: string,
};
declare type CollectionCreateResponse = {
outputs: Array<Claim>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
}
declare type CollectionListResponse = {
items: Array<Claim>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
};
declare type CollectionResolveResponse = {
items: Array<Claim>,
total_items: number,
};
declare type CollectionResolveOptions = {
claim_id: string,
};
declare type CollectionListOptions = {
page: number,
page_size: number,
resolve?: boolean,
};
declare type FileListResponse = { declare type FileListResponse = {
items: Array<FileListItem>, items: Array<FileListItem>,
page: number, page: number,
@ -257,27 +209,11 @@ declare type StreamRepostOptions = {
name: string, name: string,
bid: string, bid: string,
claim_id: string, claim_id: string,
channel_id?: string, channel_id: string,
}; };
declare type StreamRepostResponse = GenericTxResponse; declare type StreamRepostResponse = GenericTxResponse;
declare type PurchaseListResponse = {
items: Array<PurchaseReceipt & { claim: StreamClaim }>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
};
declare type PurchaseListOptions = {
page: number,
page_size: number,
resolve: boolean,
claim_id?: string,
channel_id?: string,
};
// //
// Types used in the generic Lbry object that is exported // Types used in the generic Lbry object that is exported
// //
@ -310,7 +246,6 @@ declare type LbryTypes = {
channel_update: (params: {}) => Promise<ChannelUpdateResponse>, channel_update: (params: {}) => Promise<ChannelUpdateResponse>,
channel_import: (params: {}) => Promise<string>, channel_import: (params: {}) => Promise<string>,
channel_list: (params: {}) => Promise<ChannelListResponse>, channel_list: (params: {}) => Promise<ChannelListResponse>,
channel_sign: (params: {}) => Promise<ChannelSignResponse>,
stream_abandon: (params: {}) => Promise<GenericTxResponse>, stream_abandon: (params: {}) => Promise<GenericTxResponse>,
stream_list: (params: {}) => Promise<StreamListResponse>, stream_list: (params: {}) => Promise<StreamListResponse>,
channel_abandon: (params: {}) => Promise<GenericTxResponse>, channel_abandon: (params: {}) => Promise<GenericTxResponse>,
@ -318,11 +253,6 @@ declare type LbryTypes = {
support_list: (params: {}) => Promise<SupportListResponse>, support_list: (params: {}) => Promise<SupportListResponse>,
support_abandon: (params: {}) => Promise<SupportAbandonResponse>, support_abandon: (params: {}) => Promise<SupportAbandonResponse>,
stream_repost: (params: StreamRepostOptions) => Promise<StreamRepostResponse>, stream_repost: (params: StreamRepostOptions) => Promise<StreamRepostResponse>,
purchase_list: (params: PurchaseListOptions) => Promise<PurchaseListResponse>,
collection_resolve: (params: CollectionResolveOptions) => Promise<CollectionResolveResponse>,
collection_list: (params: CollectionListOptions) => Promise<CollectionListResponse>,
collection_create: (params: {}) => Promise<CollectionCreateResponse>,
collection_update: (params: {}) => Promise<CollectionCreateResponse>,
// File fetching and manipulation // File fetching and manipulation
file_list: (params: {}) => Promise<FileListResponse>, file_list: (params: {}) => Promise<FileListResponse>,
@ -335,6 +265,8 @@ declare type LbryTypes = {
preference_set: (params: {}) => Promise<any>, preference_set: (params: {}) => Promise<any>,
// Commenting // Commenting
comment_list: (params: {}) => Promise<CommentListResponse>,
comment_create: (params: {}) => Promise<CommentCreateResponse>,
comment_update: (params: {}) => Promise<CommentUpdateResponse>, comment_update: (params: {}) => Promise<CommentUpdateResponse>,
comment_hide: (params: {}) => Promise<CommentHideResponse>, comment_hide: (params: {}) => Promise<CommentHideResponse>,
comment_abandon: (params: {}) => Promise<CommentAbandonResponse>, comment_abandon: (params: {}) => Promise<CommentAbandonResponse>,
@ -351,7 +283,6 @@ declare type LbryTypes = {
address_unused: (params: {}) => Promise<string>, // New address address_unused: (params: {}) => Promise<string>, // New address
address_list: (params: {}) => Promise<string>, address_list: (params: {}) => Promise<string>,
transaction_list: (params: {}) => Promise<TxListResponse>, transaction_list: (params: {}) => Promise<TxListResponse>,
txo_list: (params: {}) => Promise<any>,
// Sync // Sync
sync_hash: (params: {}) => Promise<string>, sync_hash: (params: {}) => Promise<string>,

View file

@ -1,99 +0,0 @@
// @flow
declare type LbryFirstStatusResponse = {
Version: string,
Message: string,
Running: boolean,
Commit: string,
};
declare type LbryFirstVersionResponse = {
build: string,
lbrynet_version: string,
os_release: string,
os_system: string,
platform: string,
processor: string,
python_version: string,
};
/* SAMPLE UPLOAD RESPONSE (FULL)
"Video": {
"etag": "\"Dn5xIderbhAnUk5TAW0qkFFir0M/xlGLrlTox7VFTRcR8F77RbKtaU4\"",
"id": "8InjtdvVmwE",
"kind": "youtube#video",
"snippet": {
"categoryId": "22",
"channelId": "UCXiVsGTU88fJjheB2rqF0rA",
"channelTitle": "Mark Beamer",
"liveBroadcastContent": "none",
"localized": {
"title": "my title"
},
"publishedAt": "2020-05-05T04:17:53.000Z",
"thumbnails": {
"default": {
"height": 90,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/default.jpg?sqp=CMTQw_UF&rs=AOn4CLB6dlhZMSMrazDlWRsitPgCsn8fVw",
"width": 120
},
"high": {
"height": 360,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/hqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLB-Je_7l6qvASRAR_bSGWZHaXaJWQ",
"width": 480
},
"medium": {
"height": 180,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/mqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLCvSnDLqVznRNMKuvJ_0misY_chPQ",
"width": 320
}
},
"title": "my title"
},
"status": {
"embeddable": true,
"license": "youtube",
"privacyStatus": "private",
"publicStatsViewable": true,
"uploadStatus": "uploaded"
}
}
*/
declare type UploadResponse = {
Video: {
id: string,
snippet: {
channelId: string,
},
status: {
uploadStatus: string,
},
},
};
declare type HasYTAuthResponse = {
HashAuth: boolean,
};
declare type YTSignupResponse = {};
//
// Types used in the generic LbryFirst object that is exported
//
declare type LbryFirstTypes = {
isConnected: boolean,
connectPromise: ?Promise<any>,
connect: () => void,
lbryFirstConnectionString: string,
apiRequestHeaders: { [key: string]: string },
setApiHeader: (string, string) => void,
unsetApiHeader: string => void,
overrides: { [string]: ?Function },
setOverride: (string, Function) => void,
// LbryFirst Methods
stop: () => Promise<string>,
status: () => Promise<StatusResponse>,
version: () => Promise<VersionResponse>,
upload: any => Promise<?UploadResponse>,
hasYTAuth: string => Promise<HasYTAuthResponse>,
ytSignup: () => Promise<YTSignupResponse>,
};

View file

@ -1,5 +0,0 @@
declare type ReflectingUpdate = {
fileListItem: FileListItem,
progress: number | boolean,
stalled: boolean,
};

84
flow-typed/Search.js vendored Normal file
View file

@ -0,0 +1,84 @@
// @flow
import * as ACTIONS from 'constants/action_types';
declare type SearchSuggestion = {
value: string,
shorthand: string,
type: string,
};
declare type SearchOptions = {
// :(
// https://github.com/facebook/flow/issues/6492
RESULT_COUNT: number,
CLAIM_TYPE: string,
INCLUDE_FILES: string,
INCLUDE_CHANNELS: string,
INCLUDE_FILES_AND_CHANNELS: string,
MEDIA_AUDIO: string,
MEDIA_VIDEO: string,
MEDIA_TEXT: string,
MEDIA_IMAGE: string,
MEDIA_APPLICATION: string,
};
declare type SearchState = {
isActive: boolean,
searchQuery: string,
options: SearchOptions,
suggestions: { [string]: Array<SearchSuggestion> },
urisByQuery: {},
resolvedResultsByQuery: {},
resolvedResultsByQueryLastPageReached: {},
};
declare type SearchSuccess = {
type: ACTIONS.SEARCH_SUCCESS,
data: {
query: string,
uris: Array<string>,
},
};
declare type UpdateSearchQuery = {
type: ACTIONS.UPDATE_SEARCH_QUERY,
data: {
query: string,
},
};
declare type UpdateSearchSuggestions = {
type: ACTIONS.UPDATE_SEARCH_SUGGESTIONS,
data: {
query: string,
suggestions: Array<SearchSuggestion>,
},
};
declare type UpdateSearchOptions = {
type: ACTIONS.UPDATE_SEARCH_OPTIONS,
data: SearchOptions,
};
declare type ResolvedSearchResult = {
channel: string,
channel_claim_id: string,
claimId: string,
duration: number,
fee: number,
name: string,
nsfw: boolean,
release_time: string,
thumbnail_url: string,
title: string,
};
declare type ResolvedSearchSuccess = {
type: ACTIONS.RESOLVED_SEARCH_SUCCESS,
data: {
append: boolean,
pageSize: number,
results: Array<ResolvedSearchResult>,
query: string,
},
};

5
flow-typed/Txo.js vendored
View file

@ -10,9 +10,6 @@ declare type Txo = {
is_my_output: boolean, is_my_output: boolean,
is_my_input: boolean, is_my_input: boolean,
is_spent: boolean, is_spent: boolean,
signing_channel?: {
channel_id: string,
},
}; };
declare type TxoListParams = { declare type TxoListParams = {
@ -24,4 +21,4 @@ declare type TxoListParams = {
is_not_my_input?: boolean, is_not_my_input?: boolean,
is_not_my_output?: boolean, is_not_my_output?: boolean,
is_spent?: boolean, is_spent?: boolean,
}; }

View file

@ -12,7 +12,6 @@ declare type LbryUrlObj = {
secondaryClaimSequence?: number, secondaryClaimSequence?: number,
primaryBidPosition?: number, primaryBidPosition?: number,
secondaryBidPosition?: number, secondaryBidPosition?: number,
startTime?: number,
// Below are considered deprecated and should not be used due to unreliableness with claim.canonical_url // Below are considered deprecated and should not be used due to unreliableness with claim.canonical_url
claimName?: string, claimName?: string,

View file

@ -1,5 +0,0 @@
// @flow
declare module '@ungap/from-entries' {
declare module.exports: any;
}

View file

@ -1,5 +0,0 @@
// @flow
declare module 'uuid' {
declare module.exports: any;
}

102
flow-typed/npm/uuid_v3.x.x.js vendored Normal file
View file

@ -0,0 +1,102 @@
// flow-typed signature: 3cf668e64747095cab0bb360cf2fb34f
// flow-typed version: d659bd0cb8/uuid_v3.x.x/flow_>=v0.32.x
declare module "uuid" {
declare class uuid {
static (
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string,
static v1(
options?: {|
node?: number[],
clockseq?: number,
msecs?: number | Date,
nsecs?: number
|},
buffer?: number[] | Buffer,
offset?: number
): string,
static v4(
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<uuid>;
}
declare module "uuid/v1" {
declare class v1 {
static (
options?: {|
node?: number[],
clockseq?: number,
msecs?: number | Date,
nsecs?: number
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<v1>;
}
declare module "uuid/v3" {
declare class v3 {
static (
name?: string | number[],
namespace?: string | number[],
buffer?: number[] | Buffer,
offset?: number
): string,
static name: string,
static DNS: string,
static URL: string
}
declare module.exports: Class<v3>;
}
declare module "uuid/v4" {
declare class v4 {
static (
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<v4>;
}
declare module "uuid/v5" {
declare class v5 {
static (
name?: string | number[],
namespace?: string | number[],
buffer?: number[] | Buffer,
offset?: number
): string,
static name: string,
static DNS: string,
static URL: string
}
declare module.exports: Class<v5>;
}

View file

@ -1,8 +0,0 @@
module.exports = {
collectCoverageFrom: ["src/**/*.{js,jsx,mjs}"],
testMatch: ["<rootDir>/tests/**/*.test.js"],
transform: {
"^.+\\.(js|jsx|mjs)$": "<rootDir>/tests/config/jest-transformer.js",
},
transformIgnorePatterns: ["[/\\\\]node_modules[/\\\\].+\\.(js|jsx|mjs)$"]
};

View file

@ -25,21 +25,14 @@
"dev": "rollup --config --watch", "dev": "rollup --config --watch",
"precommit": "flow check && lint-staged", "precommit": "flow check && lint-staged",
"lint": "eslint 'src/**/*.js' --fix", "lint": "eslint 'src/**/*.js' --fix",
"format": "prettier 'src/**/*.{js,json}' --write", "format": "prettier 'src/**/*.{js,json}' --write"
"test": "jest"
}, },
"dependencies": { "dependencies": {
"@ungap/from-entries": "^0.2.1",
"proxy-polyfill": "0.1.6", "proxy-polyfill": "0.1.6",
"reselect": "^3.0.0", "reselect": "^3.0.0",
"uuid": "^8.3.1" "uuid": "^3.3.2"
}, },
"devDependencies": { "devDependencies": {
"@babel/plugin-proposal-class-properties": "^7.10.4",
"@babel/plugin-proposal-decorators": "^7.10.5",
"@babel/plugin-transform-flow-strip-types": "^7.10.4",
"@babel/preset-env": "^7.11.0",
"@babel/preset-react": "^7.10.4",
"babel-core": "^6.26.0", "babel-core": "^6.26.0",
"babel-eslint": "^8.0.3", "babel-eslint": "^8.0.3",
"babel-loader": "^7.1.4", "babel-loader": "^7.1.4",
@ -60,7 +53,6 @@
"flow-bin": "^0.97.0", "flow-bin": "^0.97.0",
"flow-typed": "^2.5.1", "flow-typed": "^2.5.1",
"husky": "^0.14.3", "husky": "^0.14.3",
"jest": "^26.4.2",
"lint-staged": "^7.0.4", "lint-staged": "^7.0.4",
"prettier": "^1.4.2", "prettier": "^1.4.2",
"rollup": "^1.8.0", "rollup": "^1.8.0",

View file

@ -79,16 +79,6 @@ export const SET_TRANSACTION_LIST_FILTER = 'SET_TRANSACTION_LIST_FILTER';
export const UPDATE_CURRENT_HEIGHT = 'UPDATE_CURRENT_HEIGHT'; export const UPDATE_CURRENT_HEIGHT = 'UPDATE_CURRENT_HEIGHT';
export const SET_DRAFT_TRANSACTION_AMOUNT = 'SET_DRAFT_TRANSACTION_AMOUNT'; export const SET_DRAFT_TRANSACTION_AMOUNT = 'SET_DRAFT_TRANSACTION_AMOUNT';
export const SET_DRAFT_TRANSACTION_ADDRESS = 'SET_DRAFT_TRANSACTION_ADDRESS'; export const SET_DRAFT_TRANSACTION_ADDRESS = 'SET_DRAFT_TRANSACTION_ADDRESS';
export const FETCH_UTXO_COUNT_STARTED = 'FETCH_UTXO_COUNT_STARTED';
export const FETCH_UTXO_COUNT_COMPLETED = 'FETCH_UTXO_COUNT_COMPLETED';
export const FETCH_UTXO_COUNT_FAILED = 'FETCH_UTXO_COUNT_FAILED';
export const TIP_CLAIM_MASS_STARTED = 'TIP_CLAIM_MASS_STARTED';
export const TIP_CLAIM_MASS_COMPLETED = 'TIP_CLAIM_MASS_COMPLETED';
export const TIP_CLAIM_MASS_FAILED = 'TIP_CLAIM_MASS_FAILED';
export const DO_UTXO_CONSOLIDATE_STARTED = 'DO_UTXO_CONSOLIDATE_STARTED';
export const DO_UTXO_CONSOLIDATE_COMPLETED = 'DO_UTXO_CONSOLIDATE_COMPLETED';
export const DO_UTXO_CONSOLIDATE_FAILED = 'DO_UTXO_CONSOLIDATE_FAILED';
export const PENDING_CONSOLIDATED_TXOS_UPDATED = 'PENDING_CONSOLIDATED_TXOS_UPDATED';
// Claims // Claims
export const RESOLVE_URIS_STARTED = 'RESOLVE_URIS_STARTED'; export const RESOLVE_URIS_STARTED = 'RESOLVE_URIS_STARTED';
@ -101,10 +91,6 @@ export const ABANDON_CLAIM_STARTED = 'ABANDON_CLAIM_STARTED';
export const ABANDON_CLAIM_SUCCEEDED = 'ABANDON_CLAIM_SUCCEEDED'; export const ABANDON_CLAIM_SUCCEEDED = 'ABANDON_CLAIM_SUCCEEDED';
export const FETCH_CHANNEL_LIST_STARTED = 'FETCH_CHANNEL_LIST_STARTED'; export const FETCH_CHANNEL_LIST_STARTED = 'FETCH_CHANNEL_LIST_STARTED';
export const FETCH_CHANNEL_LIST_COMPLETED = 'FETCH_CHANNEL_LIST_COMPLETED'; export const FETCH_CHANNEL_LIST_COMPLETED = 'FETCH_CHANNEL_LIST_COMPLETED';
export const FETCH_CHANNEL_LIST_FAILED = 'FETCH_CHANNEL_LIST_FAILED';
export const FETCH_COLLECTION_LIST_STARTED = 'FETCH_COLLECTION_LIST_STARTED';
export const FETCH_COLLECTION_LIST_COMPLETED = 'FETCH_COLLECTION_LIST_COMPLETED';
export const FETCH_COLLECTION_LIST_FAILED = 'FETCH_COLLECTION_LIST_FAILED';
export const CREATE_CHANNEL_STARTED = 'CREATE_CHANNEL_STARTED'; export const CREATE_CHANNEL_STARTED = 'CREATE_CHANNEL_STARTED';
export const CREATE_CHANNEL_COMPLETED = 'CREATE_CHANNEL_COMPLETED'; export const CREATE_CHANNEL_COMPLETED = 'CREATE_CHANNEL_COMPLETED';
export const CREATE_CHANNEL_FAILED = 'CREATE_CHANNEL_FAILED'; export const CREATE_CHANNEL_FAILED = 'CREATE_CHANNEL_FAILED';
@ -114,7 +100,6 @@ export const UPDATE_CHANNEL_FAILED = 'UPDATE_CHANNEL_FAILED';
export const IMPORT_CHANNEL_STARTED = 'IMPORT_CHANNEL_STARTED'; export const IMPORT_CHANNEL_STARTED = 'IMPORT_CHANNEL_STARTED';
export const IMPORT_CHANNEL_COMPLETED = 'IMPORT_CHANNEL_COMPLETED'; export const IMPORT_CHANNEL_COMPLETED = 'IMPORT_CHANNEL_COMPLETED';
export const IMPORT_CHANNEL_FAILED = 'IMPORT_CHANNEL_FAILED'; export const IMPORT_CHANNEL_FAILED = 'IMPORT_CHANNEL_FAILED';
export const CLEAR_CHANNEL_ERRORS = 'CLEAR_CHANNEL_ERRORS';
export const PUBLISH_STARTED = 'PUBLISH_STARTED'; export const PUBLISH_STARTED = 'PUBLISH_STARTED';
export const PUBLISH_COMPLETED = 'PUBLISH_COMPLETED'; export const PUBLISH_COMPLETED = 'PUBLISH_COMPLETED';
export const PUBLISH_FAILED = 'PUBLISH_FAILED'; export const PUBLISH_FAILED = 'PUBLISH_FAILED';
@ -133,38 +118,6 @@ export const CLAIM_REPOST_STARTED = 'CLAIM_REPOST_STARTED';
export const CLAIM_REPOST_COMPLETED = 'CLAIM_REPOST_COMPLETED'; export const CLAIM_REPOST_COMPLETED = 'CLAIM_REPOST_COMPLETED';
export const CLAIM_REPOST_FAILED = 'CLAIM_REPOST_FAILED'; export const CLAIM_REPOST_FAILED = 'CLAIM_REPOST_FAILED';
export const CLEAR_REPOST_ERROR = 'CLEAR_REPOST_ERROR'; export const CLEAR_REPOST_ERROR = 'CLEAR_REPOST_ERROR';
export const CHECK_PUBLISH_NAME_STARTED = 'CHECK_PUBLISH_NAME_STARTED';
export const CHECK_PUBLISH_NAME_COMPLETED = 'CHECK_PUBLISH_NAME_COMPLETED';
export const UPDATE_PENDING_CLAIMS = 'UPDATE_PENDING_CLAIMS';
export const UPDATE_CONFIRMED_CLAIMS = 'UPDATE_CONFIRMED_CLAIMS';
export const ADD_FILES_REFLECTING = 'ADD_FILES_REFLECTING';
export const UPDATE_FILES_REFLECTING = 'UPDATE_FILES_REFLECTING';
export const TOGGLE_CHECKING_REFLECTING = 'TOGGLE_CHECKING_REFLECTING';
export const TOGGLE_CHECKING_PENDING = 'TOGGLE_CHECKING_PENDING';
export const PURCHASE_LIST_STARTED = 'PURCHASE_LIST_STARTED';
export const PURCHASE_LIST_COMPLETED = 'PURCHASE_LIST_COMPLETED';
export const PURCHASE_LIST_FAILED = 'PURCHASE_LIST_FAILED';
export const COLLECTION_PUBLISH_STARTED = 'COLLECTION_PUBLISH_STARTED';
export const COLLECTION_PUBLISH_COMPLETED = 'COLLECTION_PUBLISH_COMPLETED';
export const COLLECTION_PUBLISH_FAILED = 'COLLECTION_PUBLISH_FAILED';
export const COLLECTION_PUBLISH_UPDATE_STARTED = 'COLLECTION_PUBLISH_UPDATE_STARTED';
export const COLLECTION_PUBLISH_UPDATE_COMPLETED = 'COLLECTION_PUBLISH_UPDATE_COMPLETED';
export const COLLECTION_PUBLISH_UPDATE_FAILED = 'COLLECTION_PUBLISH_UPDATE_FAILED';
export const COLLECTION_PUBLISH_ABANDON_STARTED = 'COLLECTION_PUBLISH_ABANDON_STARTED';
export const COLLECTION_PUBLISH_ABANDON_COMPLETED = 'COLLECTION_PUBLISH_ABANDON_COMPLETED';
export const COLLECTION_PUBLISH_ABANDON_FAILED = 'COLLECTION_PUBLISH_ABANDON_FAILED';
export const CLEAR_COLLECTION_ERRORS = 'CLEAR_COLLECTION_ERRORS';
export const COLLECTION_ITEMS_RESOLVE_STARTED = 'COLLECTION_ITEMS_RESOLVE_STARTED';
export const COLLECTION_ITEMS_RESOLVE_COMPLETED = 'COLLECTION_ITEMS_RESOLVE_COMPLETED';
export const COLLECTION_ITEMS_RESOLVE_FAILED = 'COLLECTION_ITEMS_RESOLVE_FAILED';
export const COLLECTION_NEW = 'COLLECTION_NEW';
export const COLLECTION_DELETE = 'COLLECTION_DELETE';
export const COLLECTION_PENDING = 'COLLECTION_PENDING';
export const COLLECTION_EDIT = 'COLLECTION_EDIT';
export const COLLECTION_COPY = 'COLLECTION_COPY';
export const COLLECTION_SAVE = 'COLLECTION_SAVE';
export const COLLECTION_ERROR = 'COLLECTION_ERROR';
// Comments // Comments
export const COMMENT_LIST_STARTED = 'COMMENT_LIST_STARTED'; export const COMMENT_LIST_STARTED = 'COMMENT_LIST_STARTED';
@ -204,7 +157,20 @@ export const SET_FILE_LIST_SORT = 'SET_FILE_LIST_SORT';
export const PURCHASE_URI_STARTED = 'PURCHASE_URI_STARTED'; export const PURCHASE_URI_STARTED = 'PURCHASE_URI_STARTED';
export const PURCHASE_URI_COMPLETED = 'PURCHASE_URI_COMPLETED'; export const PURCHASE_URI_COMPLETED = 'PURCHASE_URI_COMPLETED';
export const PURCHASE_URI_FAILED = 'PURCHASE_URI_FAILED'; export const PURCHASE_URI_FAILED = 'PURCHASE_URI_FAILED';
export const CLEAR_PURCHASED_URI_SUCCESS = 'CLEAR_PURCHASED_URI_SUCCESS'; export const DELETE_PURCHASED_URI = 'DELETE_PURCHASED_URI';
// Search
export const SEARCH_START = 'SEARCH_START';
export const SEARCH_SUCCESS = 'SEARCH_SUCCESS';
export const SEARCH_FAIL = 'SEARCH_FAIL';
export const RESOLVED_SEARCH_START = 'RESOLVED_SEARCH_START';
export const RESOLVED_SEARCH_SUCCESS = 'RESOLVED_SEARCH_SUCCESS';
export const RESOLVED_SEARCH_FAIL = 'RESOLVED_SEARCH_FAIL';
export const UPDATE_SEARCH_QUERY = 'UPDATE_SEARCH_QUERY';
export const UPDATE_SEARCH_OPTIONS = 'UPDATE_SEARCH_OPTIONS';
export const UPDATE_SEARCH_SUGGESTIONS = 'UPDATE_SEARCH_SUGGESTIONS';
export const SEARCH_FOCUS = 'SEARCH_FOCUS';
export const SEARCH_BLUR = 'SEARCH_BLUR';
// Settings // Settings
export const DAEMON_SETTINGS_RECEIVED = 'DAEMON_SETTINGS_RECEIVED'; export const DAEMON_SETTINGS_RECEIVED = 'DAEMON_SETTINGS_RECEIVED';
@ -300,6 +266,13 @@ export const FETCH_COST_INFO_STARTED = 'FETCH_COST_INFO_STARTED';
export const FETCH_COST_INFO_COMPLETED = 'FETCH_COST_INFO_COMPLETED'; export const FETCH_COST_INFO_COMPLETED = 'FETCH_COST_INFO_COMPLETED';
export const FETCH_COST_INFO_FAILED = 'FETCH_COST_INFO_FAILED'; export const FETCH_COST_INFO_FAILED = 'FETCH_COST_INFO_FAILED';
// Tags
export const TOGGLE_TAG_FOLLOW = 'TOGGLE_TAG_FOLLOW';
export const TAG_ADD = 'TAG_ADD';
export const TAG_DELETE = 'TAG_DELETE';
// Blocked Channels
export const TOGGLE_BLOCK_CHANNEL = 'TOGGLE_BLOCK_CHANNEL';
// Sync // Sync
export const USER_STATE_POPULATE = 'USER_STATE_POPULATE'; export const USER_STATE_POPULATE = 'USER_STATE_POPULATE';
export const SYNC_FATAL_ERROR = 'SYNC_FATAL_ERROR';

View file

@ -3,9 +3,3 @@ export const MINIMUM_PUBLISH_BID = 0.00000001;
export const CHANNEL_ANONYMOUS = 'anonymous'; export const CHANNEL_ANONYMOUS = 'anonymous';
export const CHANNEL_NEW = 'new'; export const CHANNEL_NEW = 'new';
export const PAGE_SIZE = 20; export const PAGE_SIZE = 20;
export const LEVEL_1_STAKED_AMOUNT = 0;
export const LEVEL_2_STAKED_AMOUNT = 1;
export const LEVEL_3_STAKED_AMOUNT = 50;
export const LEVEL_4_STAKED_AMOUNT = 250;
export const LEVEL_5_STAKED_AMOUNT = 1000;

View file

@ -1,15 +0,0 @@
export const COLLECTION_ID = 'lid';
export const COLLECTION_INDEX = 'linx';
export const COL_TYPE_PLAYLIST = 'playlist';
export const COL_TYPE_CHANNELS = 'channelList';
export const WATCH_LATER_ID = 'watchlater';
export const FAVORITES_ID = 'favorites';
export const FAVORITE_CHANNELS_ID = 'favoriteChannels';
export const BUILTIN_LISTS = [WATCH_LATER_ID, FAVORITES_ID, FAVORITE_CHANNELS_ID];
export const COL_KEY_EDITED = 'edited';
export const COL_KEY_UNPUBLISHED = 'unpublished';
export const COL_KEY_PENDING = 'pending';
export const COL_KEY_SAVED = 'saved';

19
src/constants/search.js Normal file
View file

@ -0,0 +1,19 @@
export const SEARCH_TYPES = {
FILE: 'file',
CHANNEL: 'channel',
SEARCH: 'search',
TAG: 'tag',
};
export const SEARCH_OPTIONS = {
RESULT_COUNT: 'size',
CLAIM_TYPE: 'claimType',
INCLUDE_FILES: 'file',
INCLUDE_CHANNELS: 'channel',
INCLUDE_FILES_AND_CHANNELS: 'file,channel',
MEDIA_AUDIO: 'audio',
MEDIA_VIDEO: 'video',
MEDIA_TEXT: 'text',
MEDIA_IMAGE: 'image',
MEDIA_APPLICATION: 'application',
};

View file

@ -6,16 +6,10 @@ export const SHOW_NSFW = 'showNsfw';
export const CREDIT_REQUIRED_ACKNOWLEDGED = 'credit_required_acknowledged'; export const CREDIT_REQUIRED_ACKNOWLEDGED = 'credit_required_acknowledged';
export const NEW_USER_ACKNOWLEDGED = 'welcome_acknowledged'; export const NEW_USER_ACKNOWLEDGED = 'welcome_acknowledged';
export const EMAIL_COLLECTION_ACKNOWLEDGED = 'email_collection_acknowledged'; export const EMAIL_COLLECTION_ACKNOWLEDGED = 'email_collection_acknowledged';
export const FIRST_RUN_STARTED = 'first_run_started';
export const INVITE_ACKNOWLEDGED = 'invite_acknowledged'; export const INVITE_ACKNOWLEDGED = 'invite_acknowledged';
export const FOLLOWING_ACKNOWLEDGED = 'following_acknowledged';
export const TAGS_ACKNOWLEDGED = 'tags_acknowledged';
export const REWARDS_ACKNOWLEDGED = 'rewards_acknowledged';
export const LANGUAGE = 'language'; export const LANGUAGE = 'language';
export const SEARCH_IN_LANGUAGE = 'search_in_language';
export const SHOW_MATURE = 'show_mature'; export const SHOW_MATURE = 'show_mature';
export const HOMEPAGE = 'homepage'; export const SHOW_REPOSTS = 'show_reposts';
export const HIDE_REPOSTS = 'hide_reposts';
export const SHOW_ANONYMOUS = 'show_anonymous'; export const SHOW_ANONYMOUS = 'show_anonymous';
export const SHOW_UNAVAILABLE = 'show_unavailable'; export const SHOW_UNAVAILABLE = 'show_unavailable';
export const INSTANT_PURCHASE_ENABLED = 'instant_purchase_enabled'; export const INSTANT_PURCHASE_ENABLED = 'instant_purchase_enabled';
@ -23,24 +17,16 @@ export const INSTANT_PURCHASE_MAX = 'instant_purchase_max';
export const THEME = 'theme'; export const THEME = 'theme';
export const THEMES = 'themes'; export const THEMES = 'themes';
export const AUTOMATIC_DARK_MODE_ENABLED = 'automatic_dark_mode_enabled'; export const AUTOMATIC_DARK_MODE_ENABLED = 'automatic_dark_mode_enabled';
export const AUTOPLAY_MEDIA = 'autoplay'; export const AUTOPLAY = 'autoplay';
export const AUTOPLAY_NEXT = 'autoplay_next';
export const OS_NOTIFICATIONS_ENABLED = 'os_notifications_enabled'; export const OS_NOTIFICATIONS_ENABLED = 'os_notifications_enabled';
export const AUTO_DOWNLOAD = 'auto_download'; export const AUTO_DOWNLOAD = 'auto_download';
export const AUTO_LAUNCH = 'auto_launch'; export const AUTO_LAUNCH = 'auto_launch';
export const TO_TRAY_WHEN_CLOSED = 'to_tray_when_closed';
export const SUPPORT_OPTION = 'support_option'; export const SUPPORT_OPTION = 'support_option';
export const HIDE_BALANCE = 'hide_balance'; export const HIDE_BALANCE = 'hide_balance';
export const HIDE_SPLASH_ANIMATION = 'hide_splash_animation'; export const HIDE_SPLASH_ANIMATION = 'hide_splash_animation';
export const FLOATING_PLAYER = 'floating_player'; export const FLOATING_PLAYER = 'floating_player';
export const DARK_MODE_TIMES = 'dark_mode_times'; export const DARK_MODE_TIMES = 'dark_mode_times';
export const ENABLE_SYNC = 'enable_sync'; export const ENABLE_SYNC = 'enable_sync';
export const ENABLE_PUBLISH_PREVIEW = 'enable-publish-preview';
export const TILE_LAYOUT = 'tile_layout';
export const VIDEO_THEATER_MODE = 'video_theater_mode';
export const VIDEO_PLAYBACK_RATE = 'video_playback_rate';
export const CUSTOM_COMMENTS_SERVER_ENABLED = 'custom_comments_server_enabled';
export const CUSTOM_COMMENTS_SERVER_URL = 'custom_comments_server_url';
// mobile settings // mobile settings
export const BACKGROUND_PLAY_ENABLED = 'backgroundPlayEnabled'; export const BACKGROUND_PLAY_ENABLED = 'backgroundPlayEnabled';

View file

@ -8,25 +8,6 @@
*/ */
import * as DAEMON_SETTINGS from './daemon_settings'; import * as DAEMON_SETTINGS from './daemon_settings';
import * as SETTINGS from './settings';
// DAEMON export const WALLET_SERVERS = DAEMON_SETTINGS.LBRYUM_SERVERS;
export const SDK_SYNC_KEYS = [DAEMON_SETTINGS.LBRYUM_SERVERS, DAEMON_SETTINGS.SHARE_USAGE_DATA]; export const SHARE_USAGE_DATA = DAEMON_SETTINGS.SHARE_USAGE_DATA;
// CLIENT
export const CLIENT_SYNC_KEYS = [
SETTINGS.SHOW_MATURE,
SETTINGS.HIDE_REPOSTS,
SETTINGS.SHOW_ANONYMOUS,
SETTINGS.INSTANT_PURCHASE_ENABLED,
SETTINGS.INSTANT_PURCHASE_MAX,
SETTINGS.THEME,
SETTINGS.AUTOPLAY_MEDIA,
SETTINGS.AUTOPLAY_NEXT,
SETTINGS.HIDE_BALANCE,
SETTINGS.HIDE_SPLASH_ANIMATION,
SETTINGS.FLOATING_PLAYER,
SETTINGS.DARK_MODE_TIMES,
SETTINGS.AUTOMATIC_DARK_MODE_ENABLED,
SETTINGS.LANGUAGE,
];

View file

@ -13,30 +13,9 @@ export const DEFAULT_FOLLOWED_TAGS = [
'technology', 'technology',
]; ];
export const MATURE_TAGS = [ export const MATURE_TAGS = ['porn', 'nsfw', 'mature', 'xxx'];
'porn',
'porno',
'nsfw',
'mature',
'xxx',
'sex',
'creampie',
'blowjob',
'handjob',
'vagina',
'boobs',
'big boobs',
'big dick',
'pussy',
'cumshot',
'anal',
'hard fucking',
'ass',
'fuck',
'hentai',
];
const DEFAULT_ENGLISH_KNOWN_TAGS = [ export const DEFAULT_KNOWN_TAGS = [
'free speech', 'free speech',
'censorship', 'censorship',
'gaming', 'gaming',
@ -60,7 +39,6 @@ const DEFAULT_ENGLISH_KNOWN_TAGS = [
'video game', 'video game',
'sports', 'sports',
'walkthrough', 'walkthrough',
'lbrytvpaidbeta',
'art', 'art',
'pc', 'pc',
'minecraft', 'minecraft',
@ -517,31 +495,6 @@ const DEFAULT_ENGLISH_KNOWN_TAGS = [
'teaser', 'teaser',
'lbry', 'lbry',
'coronavirus', 'coronavirus',
'2020protests',
'covidcuts', 'covidcuts',
'covid-19', 'covid-19',
'LBRYFoundationBoardCandidacy',
'helplbrysavecrypto'
]; ];
const DEFAULT_SPANISH_KNOWN_TAGS = [
'español',
'tecnología',
'criptomonedas',
'economía',
'bitcoin',
'educación',
'videojuegos',
'música',
'noticias',
'ciencia',
'deportes',
'latinoamérica',
'latam',
'conspiración',
'humor',
'política',
'tutoriales',
];
export const DEFAULT_KNOWN_TAGS = [...DEFAULT_ENGLISH_KNOWN_TAGS, ...DEFAULT_SPANISH_KNOWN_TAGS];

View file

@ -26,7 +26,6 @@ export const IS_MY_INPUT = 'is_my_input';
export const IS_MY_OUTPUT = 'is_my_output'; export const IS_MY_OUTPUT = 'is_my_output';
export const IS_NOT_MY_INPUT = 'is_not_my_input'; export const IS_NOT_MY_INPUT = 'is_not_my_input';
export const IS_NOT_MY_OUTPUT = 'is_not_my_output'; // use to further distinguish payments to self / from self. export const IS_NOT_MY_OUTPUT = 'is_not_my_output'; // use to further distinguish payments to self / from self.
export const IS_MY_INPUT_OR_OUTPUT = 'is_my_input_or_output';
export const EXCLUDE_INTERNAL_TRANSFERS = 'exclude_internal_transfers'; export const EXCLUDE_INTERNAL_TRANSFERS = 'exclude_internal_transfers';
// sdk unique types // sdk unique types

View file

@ -7,15 +7,15 @@ import * as SORT_OPTIONS from 'constants/sort_options';
import * as THUMBNAIL_STATUSES from 'constants/thumbnail_upload_statuses'; import * as THUMBNAIL_STATUSES from 'constants/thumbnail_upload_statuses';
import * as TRANSACTIONS from 'constants/transaction_types'; import * as TRANSACTIONS from 'constants/transaction_types';
import * as TX_LIST from 'constants/transaction_list'; import * as TX_LIST from 'constants/transaction_list';
import * as ABANDON_STATES from 'constants/abandon_states'; import * as TXO_ABANDON_STATES from 'constants/abandon_txo_states';
import * as TXO_LIST from 'constants/txo_list'; import * as TXO_LIST from 'constants/txo_list';
import * as SPEECH_URLS from 'constants/speech_urls'; import * as SPEECH_URLS from 'constants/speech_urls';
import * as DAEMON_SETTINGS from 'constants/daemon_settings'; import * as DAEMON_SETTINGS from 'constants/daemon_settings';
import * as SHARED_PREFERENCES from 'constants/shared_preferences'; import * as SHARED_PREFERENCES from 'constants/shared_preferences';
import * as COLLECTIONS_CONSTS from 'constants/collections'; import { SEARCH_TYPES, SEARCH_OPTIONS } from 'constants/search';
import { DEFAULT_KNOWN_TAGS, DEFAULT_FOLLOWED_TAGS, MATURE_TAGS } from 'constants/tags'; import { DEFAULT_KNOWN_TAGS, DEFAULT_FOLLOWED_TAGS, MATURE_TAGS } from 'constants/tags';
import Lbry, { apiCall } from 'lbry'; import Lbry, { apiCall } from 'lbry';
import LbryFirst from 'lbry-first'; import { selectState as selectSearchState } from 'redux/selectors/search';
// constants // constants
export { export {
@ -23,12 +23,14 @@ export {
CLAIM_VALUES, CLAIM_VALUES,
LICENSES, LICENSES,
THUMBNAIL_STATUSES, THUMBNAIL_STATUSES,
SEARCH_TYPES,
SEARCH_OPTIONS,
SETTINGS, SETTINGS,
DAEMON_SETTINGS, DAEMON_SETTINGS,
TRANSACTIONS, TRANSACTIONS,
TX_LIST, TX_LIST,
TXO_LIST, TXO_LIST,
ABANDON_STATES, TXO_ABANDON_STATES,
SORT_OPTIONS, SORT_OPTIONS,
PAGES, PAGES,
DEFAULT_KNOWN_TAGS, DEFAULT_KNOWN_TAGS,
@ -36,12 +38,10 @@ export {
MATURE_TAGS, MATURE_TAGS,
SPEECH_URLS, SPEECH_URLS,
SHARED_PREFERENCES, SHARED_PREFERENCES,
COLLECTIONS_CONSTS,
}; };
// common // common
export { Lbry, apiCall }; export { Lbry, apiCall };
export { LbryFirst };
export { export {
regexInvalidURI, regexInvalidURI,
regexAddress, regexAddress,
@ -52,8 +52,6 @@ export {
isURIClaimable, isURIClaimable,
isNameValid, isNameValid,
convertToShareLink, convertToShareLink,
splitBySeparator,
isURIEqual,
} from 'lbryURI'; } from 'lbryURI';
// middlware // middlware
@ -61,13 +59,6 @@ export { buildSharedStateMiddleware } from 'redux/middleware/shared-state';
// actions // actions
export { doToast, doDismissToast, doError, doDismissError } from 'redux/actions/notifications'; export { doToast, doDismissToast, doError, doDismissError } from 'redux/actions/notifications';
export {
doLocalCollectionCreate,
doFetchItemsInCollection,
doFetchItemsInCollections,
doCollectionEdit,
doCollectionDelete,
} from 'redux/actions/collections';
export { export {
doFetchClaimsByChannel, doFetchClaimsByChannel,
@ -77,27 +68,20 @@ export {
doResolveUris, doResolveUris,
doResolveUri, doResolveUri,
doFetchChannelListMine, doFetchChannelListMine,
doFetchCollectionListMine,
doCreateChannel, doCreateChannel,
doUpdateChannel, doUpdateChannel,
doClaimSearch, doClaimSearch,
doImportChannel, doImportChannel,
doRepost, doRepost,
doClearRepostError, doClearRepostError,
doClearChannelErrors,
doCheckPublishNameAvailability,
doPurchaseList,
doCheckPendingClaims,
doCollectionPublish,
doCollectionPublishUpdate,
} from 'redux/actions/claims'; } from 'redux/actions/claims';
export { doClearPurchasedUriSuccess, doPurchaseUri, doFileGet } from 'redux/actions/file'; export { doDeletePurchasedUri, doPurchaseUri, doFileGet } from 'redux/actions/file';
export { export {
doFetchFileInfo, doFetchFileInfo,
doFileList, doFileList,
doFetchFileInfos, doFetchFileInfosAndPublishedClaims,
doSetFileListSort, doSetFileListSort,
} from 'redux/actions/file_info'; } from 'redux/actions/file_info';
@ -108,9 +92,19 @@ export {
doUploadThumbnail, doUploadThumbnail,
doPrepareEdit, doPrepareEdit,
doPublish, doPublish,
doCheckReflectingFiles, doCheckPendingPublishes,
} from 'redux/actions/publish'; } from 'redux/actions/publish';
export {
doSearch,
doResolvedSearch,
doUpdateSearchQuery,
doFocusSearchInput,
doBlurSearchInput,
setSearchApi,
doUpdateSearchOptions,
} from 'redux/actions/search';
export { savePosition } from 'redux/actions/content'; export { savePosition } from 'redux/actions/content';
export { export {
@ -134,11 +128,20 @@ export {
doUpdateBlockHeight, doUpdateBlockHeight,
doClearSupport, doClearSupport,
doSupportAbandonForClaim, doSupportAbandonForClaim,
doFetchUtxoCounts,
doUtxoConsolidate,
doTipClaimMass,
} from 'redux/actions/wallet'; } from 'redux/actions/wallet';
export { doToggleTagFollow, doAddTag, doDeleteTag } from 'redux/actions/tags';
export {
doCommentList,
doCommentCreate,
doCommentAbandon,
doCommentHide,
doCommentUpdate,
} from 'redux/actions/comments';
export { doToggleBlockChannel } from 'redux/actions/blocked';
export { doPopulateSharedUserState, doPreferenceGet, doPreferenceSet } from 'redux/actions/sync'; export { doPopulateSharedUserState, doPreferenceGet, doPreferenceSet } from 'redux/actions/sync';
// utils // utils
@ -149,44 +152,29 @@ export { isClaimNsfw, createNormalizedClaimSearchKey } from 'util/claim';
// reducers // reducers
export { claimsReducer } from 'redux/reducers/claims'; export { claimsReducer } from 'redux/reducers/claims';
export { commentReducer } from 'redux/reducers/comments';
export { contentReducer } from 'redux/reducers/content'; export { contentReducer } from 'redux/reducers/content';
export { fileInfoReducer } from 'redux/reducers/file_info'; export { fileInfoReducer } from 'redux/reducers/file_info';
export { fileReducer } from 'redux/reducers/file';
export { notificationsReducer } from 'redux/reducers/notifications'; export { notificationsReducer } from 'redux/reducers/notifications';
export { publishReducer } from 'redux/reducers/publish'; export { publishReducer } from 'redux/reducers/publish';
export { searchReducer } from 'redux/reducers/search';
export { tagsReducer } from 'redux/reducers/tags';
export { blockedReducer } from 'redux/reducers/blocked';
export { walletReducer } from 'redux/reducers/wallet'; export { walletReducer } from 'redux/reducers/wallet';
export { collectionsReducer } from 'redux/reducers/collections';
// selectors // selectors
export { makeSelectContentPositionForUri } from 'redux/selectors/content'; export { makeSelectContentPositionForUri } from 'redux/selectors/content';
export { selectToast, selectError } from 'redux/selectors/notifications'; export { selectToast, selectError } from 'redux/selectors/notifications';
export { export {
selectSavedCollectionIds, selectFailedPurchaseUris,
selectBuiltinCollections, selectPurchasedUris,
selectResolvedCollections, selectPurchaseUriErrorMessage,
selectMyUnpublishedCollections, selectLastPurchasedUri,
selectMyEditedCollections, makeSelectStreamingUrlForUri,
selectMyPublishedCollections, } from 'redux/selectors/file';
selectMyPublishedMixedCollections,
selectMyPublishedPlaylistCollections,
makeSelectEditedCollectionForId,
makeSelectPendingCollectionForId,
makeSelectPublishedCollectionForId,
makeSelectCollectionIsMine,
makeSelectMyPublishedCollectionForId,
makeSelectUnpublishedCollectionForId,
makeSelectCollectionForId,
makeSelectClaimUrlInCollection,
makeSelectUrlsForCollectionId,
makeSelectClaimIdsForCollectionId,
makeSelectNameForCollectionId,
makeSelectCountForCollectionId,
makeSelectIsResolvingCollectionForId,
makeSelectIndexForUrlInCollection,
makeSelectPreviousUrlForCollectionAndUrl,
makeSelectNextUrlForCollectionAndUrl,
makeSelectCollectionForIdHasClaimUrl,
} from 'redux/selectors/collections';
export { export {
makeSelectClaimForUri, makeSelectClaimForUri,
@ -202,37 +190,27 @@ export {
makeSelectTitleForUri, makeSelectTitleForUri,
makeSelectDateForUri, makeSelectDateForUri,
makeSelectAmountForUri, makeSelectAmountForUri,
makeSelectEffectiveAmountForUri,
makeSelectTagsForUri, makeSelectTagsForUri,
makeSelectTagInClaimOrChannelForUri,
makeSelectTotalStakedAmountForChannelUri,
makeSelectStakedLevelForChannelUri,
makeSelectContentTypeForUri, makeSelectContentTypeForUri,
makeSelectIsUriResolving, makeSelectIsUriResolving,
makeSelectPendingClaimForUri,
makeSelectTotalItemsForChannel, makeSelectTotalItemsForChannel,
makeSelectTotalPagesForChannel, makeSelectTotalPagesForChannel,
makeSelectNsfwCountFromUris, makeSelectNsfwCountFromUris,
makeSelectNsfwCountForChannel,
makeSelectOmittedCountForChannel, makeSelectOmittedCountForChannel,
makeSelectClaimIsNsfw, makeSelectClaimIsNsfw,
makeSelectRecommendedContentForUri,
makeSelectResolvedRecommendedContentForUri,
makeSelectFirstRecommendedFileForUri,
makeSelectChannelForClaimUri, makeSelectChannelForClaimUri,
makeSelectChannelPermUrlForClaimUri,
makeSelectMyChannelPermUrlForName,
makeSelectClaimIsPending, makeSelectClaimIsPending,
makeSelectReflectingClaimForUri, makeSelectPendingByUri,
makeSelectClaimsInChannelForCurrentPageState,
makeSelectShortUrlForUri, makeSelectShortUrlForUri,
makeSelectCanonicalUrlForUri, makeSelectCanonicalUrlForUri,
makeSelectPermanentUrlForUri, makeSelectPermanentUrlForUri,
makeSelectSupportsForUri, makeSelectSupportsForUri,
makeSelectMyPurchasesForPage, selectPendingById,
makeSelectClaimWasPurchased,
makeSelectAbandoningClaimById,
makeSelectIsAbandoningClaimForUri,
makeSelectClaimHasSource,
makeSelectClaimIsStreamPlaceholder,
selectPendingIds,
selectReflectingById,
makeSelectClaimForClaimId,
selectClaimsById, selectClaimsById,
selectClaimsByUri, selectClaimsByUri,
selectAllClaimsByChannel, selectAllClaimsByChannel,
@ -241,16 +219,13 @@ export {
selectMyActiveClaims, selectMyActiveClaims,
selectAllFetchingChannelClaims, selectAllFetchingChannelClaims,
selectIsFetchingClaimListMine, selectIsFetchingClaimListMine,
selectMyClaims,
selectPendingClaims, selectPendingClaims,
selectMyClaims,
selectMyClaimsWithoutChannels, selectMyClaimsWithoutChannels,
selectMyChannelUrls,
selectMyClaimUrisWithoutChannels, selectMyClaimUrisWithoutChannels,
selectAllMyClaimsByOutpoint, selectAllMyClaimsByOutpoint,
selectMyClaimsOutpoints, selectMyClaimsOutpoints,
selectFetchingMyChannels, selectFetchingMyChannels,
selectFetchingMyCollections,
selectMyCollectionIds,
selectMyChannelClaims, selectMyChannelClaims,
selectResolvingUris, selectResolvingUris,
selectPlayingUri, selectPlayingUri,
@ -270,23 +245,10 @@ export {
selectRepostError, selectRepostError,
selectRepostLoading, selectRepostLoading,
selectClaimIdsByUri, selectClaimIdsByUri,
selectMyClaimsPage,
selectMyClaimsPageNumber,
selectMyClaimsPageItemCount,
selectFetchingMyClaimsPageError,
selectMyPurchases,
selectIsFetchingMyPurchases,
selectFetchingMyPurchasesError,
selectMyPurchasesCount,
selectPurchaseUriSuccess,
makeSelectClaimIdForUri,
selectUpdatingCollection,
selectUpdateCollectionError,
selectCreatingCollection,
selectCreateCollectionError,
makeSelectClaimIdIsPending,
} from 'redux/selectors/claims'; } from 'redux/selectors/claims';
export { makeSelectCommentsForUri } from 'redux/selectors/comments';
export { export {
makeSelectFileInfoForUri, makeSelectFileInfoForUri,
makeSelectDownloadingForUri, makeSelectDownloadingForUri,
@ -310,7 +272,6 @@ export {
makeSelectSearchDownloadUrlsForPage, makeSelectSearchDownloadUrlsForPage,
makeSelectSearchDownloadUrlsCount, makeSelectSearchDownloadUrlsCount,
selectDownloadUrlsCount, selectDownloadUrlsCount,
makeSelectStreamingUrlForUri,
} from 'redux/selectors/file_info'; } from 'redux/selectors/file_info';
export { export {
@ -322,6 +283,22 @@ export {
selectTakeOverAmount, selectTakeOverAmount,
} from 'redux/selectors/publish'; } from 'redux/selectors/publish';
export { selectSearchState };
export {
makeSelectSearchUris,
makeSelectResolvedSearchResults,
makeSelectResolvedSearchResultsLastPageReached,
selectSearchValue,
selectSearchOptions,
selectIsSearching,
selectResolvedSearchResultsByQuery,
selectResolvedSearchResultsByQueryLastPageReached,
selectSearchUrisByQuery,
selectSearchBarFocused,
selectSearchSuggestions,
makeSelectQueryWithOptions,
} from 'redux/selectors/search';
export { export {
selectBalance, selectBalance,
selectTotalBalance, selectTotalBalance,
@ -333,7 +310,6 @@ export {
selectSupportsByOutpoint, selectSupportsByOutpoint,
selectTotalSupports, selectTotalSupports,
selectTransactionItems, selectTransactionItems,
selectTransactionsFile,
selectRecentTransactions, selectRecentTransactions,
selectHasTransactions, selectHasTransactions,
selectIsFetchingTransactions, selectIsFetchingTransactions,
@ -371,11 +347,17 @@ export {
selectPendingSupportTransactions, selectPendingSupportTransactions,
selectAbandonClaimSupportError, selectAbandonClaimSupportError,
makeSelectPendingAmountByUri, makeSelectPendingAmountByUri,
selectIsFetchingUtxoCounts,
selectIsConsolidatingUtxos,
selectIsMassClaimingTips,
selectUtxoCounts,
selectPendingOtherTransactions,
selectPendingConsolidateTxid,
selectPendingMassClaimTxid,
} from 'redux/selectors/wallet'; } from 'redux/selectors/wallet';
export {
selectFollowedTags,
selectFollowedTagsList,
selectUnfollowedTags,
makeSelectIsFollowingTag,
} from 'redux/selectors/tags';
export {
selectBlockedChannels,
selectChannelIsBlocked,
selectBlockedChannelsCount,
} from 'redux/selectors/blocked';

View file

@ -1,183 +0,0 @@
// @flow
import 'proxy-polyfill';
const CHECK_LBRYFIRST_STARTED_TRY_NUMBER = 200;
//
// Basic LBRYFIRST connection config
// Offers a proxy to call LBRYFIRST methods
//
const LbryFirst: LbryFirstTypes = {
isConnected: false,
connectPromise: null,
lbryFirstConnectionString: 'http://localhost:1337/rpc',
apiRequestHeaders: { 'Content-Type': 'application/json' },
// Allow overriding lbryFirst connection string (e.g. to `/api/proxy` for lbryweb)
setLbryFirstConnectionString: (value: string) => {
LbryFirst.lbryFirstConnectionString = value;
},
setApiHeader: (key: string, value: string) => {
LbryFirst.apiRequestHeaders = Object.assign(LbryFirst.apiRequestHeaders, { [key]: value });
},
unsetApiHeader: key => {
Object.keys(LbryFirst.apiRequestHeaders).includes(key) &&
delete LbryFirst.apiRequestHeaders['key'];
},
// Allow overriding Lbry methods
overrides: {},
setOverride: (methodName, newMethod) => {
LbryFirst.overrides[methodName] = newMethod;
},
getApiRequestHeaders: () => LbryFirst.apiRequestHeaders,
//
// LbryFirst Methods
//
status: (params = {}) => lbryFirstCallWithResult('status', params),
stop: () => lbryFirstCallWithResult('stop', {}),
version: () => lbryFirstCallWithResult('version', {}),
// Upload to youtube
upload: (params: { title: string, description: string, file_path: ?string } = {}) => {
// Only upload when originally publishing for now
if (!params.file_path) {
return Promise.resolve();
}
const uploadParams: {
Title: string,
Description: string,
FilePath: string,
Category: string,
Keywords: string,
} = {
Title: params.title,
Description: params.description,
FilePath: params.file_path,
Category: '',
Keywords: '',
};
return lbryFirstCallWithResult('youtube.Upload', uploadParams);
},
hasYTAuth: (token: string) => {
const hasYTAuthParams = {};
hasYTAuthParams.AuthToken = token;
return lbryFirstCallWithResult('youtube.HasAuth', hasYTAuthParams);
},
ytSignup: () => {
const emptyParams = {};
return lbryFirstCallWithResult('youtube.Signup', emptyParams);
},
remove: () => {
const emptyParams = {};
return lbryFirstCallWithResult('youtube.Remove', emptyParams);
},
// Connect to lbry-first
connect: () => {
if (LbryFirst.connectPromise === null) {
LbryFirst.connectPromise = new Promise((resolve, reject) => {
let tryNum = 0;
// Check every half second to see if the lbryFirst is accepting connections
function checkLbryFirstStarted() {
tryNum += 1;
LbryFirst.status()
.then(resolve)
.catch(() => {
if (tryNum <= CHECK_LBRYFIRST_STARTED_TRY_NUMBER) {
setTimeout(checkLbryFirstStarted, tryNum < 50 ? 400 : 1000);
} else {
reject(new Error('Unable to connect to LBRY'));
}
});
}
checkLbryFirstStarted();
});
}
// Flow thinks this could be empty, but it will always return a promise
// $FlowFixMe
return LbryFirst.connectPromise;
},
};
function checkAndParse(response) {
if (response.status >= 200 && response.status < 300) {
return response.json();
}
return response.json().then(json => {
let error;
if (json.error) {
const errorMessage = typeof json.error === 'object' ? json.error.message : json.error;
error = new Error(errorMessage);
} else {
error = new Error('Protocol error with unknown response signature');
}
return Promise.reject(error);
});
}
export function apiCall(method: string, params: ?{}, resolve: Function, reject: Function) {
const counter = new Date().getTime();
const paramsArray = [params];
const options = {
method: 'POST',
headers: LbryFirst.apiRequestHeaders,
body: JSON.stringify({
jsonrpc: '2.0',
method,
params: paramsArray,
id: counter,
}),
};
return fetch(LbryFirst.lbryFirstConnectionString, options)
.then(checkAndParse)
.then(response => {
const error = response.error || (response.result && response.result.error);
if (error) {
return reject(error);
}
return resolve(response.result);
})
.catch(reject);
}
function lbryFirstCallWithResult(name: string, params: ?{} = {}) {
return new Promise((resolve, reject) => {
apiCall(
name,
params,
result => {
resolve(result);
},
reject
);
});
}
// This is only for a fallback
// If there is a LbryFirst method that is being called by an app, it should be added to /flow-typed/LbryFirst.js
const lbryFirstProxy = new Proxy(LbryFirst, {
get(target: LbryFirstTypes, name: string) {
if (name in target) {
return target[name];
}
return (params = {}) =>
new Promise((resolve, reject) => {
apiCall(name, params, resolve, reject);
});
},
});
export default lbryFirstProxy;

View file

@ -40,7 +40,7 @@ const Lbry: LbryTypes = {
const formats = [ const formats = [
[/\.(mp4|m4v|webm|flv|f4v|ogv)$/i, 'video'], [/\.(mp4|m4v|webm|flv|f4v|ogv)$/i, 'video'],
[/\.(mp3|m4a|aac|wav|flac|ogg|opus)$/i, 'audio'], [/\.(mp3|m4a|aac|wav|flac|ogg|opus)$/i, 'audio'],
[/\.(jpeg|jpg|png|gif|svg|webp)$/i, 'image'], [/\.(jpeg|jpg|png|gif|svg)$/i, 'image'],
[/\.(h|go|ja|java|js|jsx|c|cpp|cs|css|rb|scss|sh|php|py)$/i, 'script'], [/\.(h|go|ja|java|js|jsx|c|cpp|cs|css|rb|scss|sh|php|py)$/i, 'script'],
[/\.(html|json|csv|txt|log|md|markdown|docx|pdf|xml|yml|yaml)$/i, 'document'], [/\.(html|json|csv|txt|log|md|markdown|docx|pdf|xml|yml|yaml)$/i, 'document'],
[/\.(pdf|odf|doc|docx|epub|org|rtf)$/i, 'e-book'], [/\.(pdf|odf|doc|docx|epub|org|rtf)$/i, 'e-book'],
@ -86,14 +86,9 @@ const Lbry: LbryTypes = {
stream_abandon: params => daemonCallWithResult('stream_abandon', params), stream_abandon: params => daemonCallWithResult('stream_abandon', params),
stream_list: params => daemonCallWithResult('stream_list', params), stream_list: params => daemonCallWithResult('stream_list', params),
channel_abandon: params => daemonCallWithResult('channel_abandon', params), channel_abandon: params => daemonCallWithResult('channel_abandon', params),
channel_sign: params => daemonCallWithResult('channel_sign', params),
support_create: params => daemonCallWithResult('support_create', params), support_create: params => daemonCallWithResult('support_create', params),
support_list: params => daemonCallWithResult('support_list', params), support_list: params => daemonCallWithResult('support_list', params),
stream_repost: params => daemonCallWithResult('stream_repost', params), stream_repost: params => daemonCallWithResult('stream_repost', params),
collection_resolve: params => daemonCallWithResult('collection_resolve', params),
collection_list: params => daemonCallWithResult('collection_list', params),
collection_create: params => daemonCallWithResult('collection_create', params),
collection_update: params => daemonCallWithResult('collection_update', params),
// File fetching and manipulation // File fetching and manipulation
file_list: (params = {}) => daemonCallWithResult('file_list', params), file_list: (params = {}) => daemonCallWithResult('file_list', params),
@ -116,8 +111,6 @@ const Lbry: LbryTypes = {
transaction_list: (params = {}) => daemonCallWithResult('transaction_list', params), transaction_list: (params = {}) => daemonCallWithResult('transaction_list', params),
utxo_release: (params = {}) => daemonCallWithResult('utxo_release', params), utxo_release: (params = {}) => daemonCallWithResult('utxo_release', params),
support_abandon: (params = {}) => daemonCallWithResult('support_abandon', params), support_abandon: (params = {}) => daemonCallWithResult('support_abandon', params),
purchase_list: (params = {}) => daemonCallWithResult('purchase_list', params),
txo_list: (params = {}) => daemonCallWithResult('txo_list', params),
sync_hash: (params = {}) => daemonCallWithResult('sync_hash', params), sync_hash: (params = {}) => daemonCallWithResult('sync_hash', params),
sync_apply: (params = {}) => daemonCallWithResult('sync_apply', params), sync_apply: (params = {}) => daemonCallWithResult('sync_apply', params),

View file

@ -4,7 +4,7 @@ const channelNameMinLength = 1;
const claimIdMaxLength = 40; const claimIdMaxLength = 40;
// see https://spec.lbry.com/#urls // see https://spec.lbry.com/#urls
export const regexInvalidURI = /[ =&#:$@%?;/\\"<>%{}|^~[\]`\u{0000}-\u{0008}\u{000b}-\u{000c}\u{000e}-\u{001F}\u{D800}-\u{DFFF}\u{FFFE}-\u{FFFF}]/u; export const regexInvalidURI = /[ =&#:$@%?;/\\"<>%\{\}|^~[\]`\u{0000}-\u{0008}\u{000b}-\u{000c}\u{000e}-\u{001F}\u{D800}-\u{DFFF}\u{FFFE}-\u{FFFF}]/u;
export const regexAddress = /^(b|r)(?=[^0OIl]{32,33})[0-9A-Za-z]{32,33}$/; export const regexAddress = /^(b|r)(?=[^0OIl]{32,33})[0-9A-Za-z]{32,33}$/;
const regexPartProtocol = '^((?:lbry://)?)'; const regexPartProtocol = '^((?:lbry://)?)';
const regexPartStreamOrChannelName = '([^:$#/]*)'; const regexPartStreamOrChannelName = '([^:$#/]*)';
@ -12,11 +12,6 @@ const regexPartModifierSeparator = '([:$#]?)([^/]*)';
const queryStringBreaker = '^([\\S]+)([?][\\S]*)'; const queryStringBreaker = '^([\\S]+)([?][\\S]*)';
const separateQuerystring = new RegExp(queryStringBreaker); const separateQuerystring = new RegExp(queryStringBreaker);
const MOD_SEQUENCE_SEPARATOR = '*';
const MOD_CLAIM_ID_SEPARATOR_OLD = '#';
const MOD_CLAIM_ID_SEPARATOR = ':';
const MOD_BID_POSITION_SEPARATOR = '$';
/** /**
* Parses a LBRY name into its component parts. Throws errors with user-friendly * Parses a LBRY name into its component parts. Throws errors with user-friendly
* messages for invalid names. * messages for invalid names.
@ -34,7 +29,7 @@ const MOD_BID_POSITION_SEPARATOR = '$';
* - secondaryBidPosition (int, if present) * - secondaryBidPosition (int, if present)
*/ */
export function parseURI(url: string, requireProto: boolean = false): LbryUrlObj { export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj {
// Break into components. Empty sub-matches are converted to null // Break into components. Empty sub-matches are converted to null
const componentsRegex = new RegExp( const componentsRegex = new RegExp(
@ -47,12 +42,12 @@ export function parseURI(url: string, requireProto: boolean = false): LbryUrlObj
); );
// chop off the querystring first // chop off the querystring first
let QSStrippedURL, qs; let QSStrippedURL, qs;
const qsRegexResult = separateQuerystring.exec(url); const qsRegexResult = separateQuerystring.exec(URL);
if (qsRegexResult) { if (qsRegexResult) {
[QSStrippedURL, qs] = qsRegexResult.slice(1).map(match => match || null); [QSStrippedURL, qs] = qsRegexResult.slice(1).map(match => match || null);
} }
const cleanURL = QSStrippedURL || url; const cleanURL = QSStrippedURL || URL;
const regexMatch = componentsRegex.exec(cleanURL) || []; const regexMatch = componentsRegex.exec(cleanURL) || [];
const [proto, ...rest] = regexMatch.slice(1).map(match => match || null); const [proto, ...rest] = regexMatch.slice(1).map(match => match || null);
const path = rest.join(''); const path = rest.join('');
@ -65,8 +60,6 @@ export function parseURI(url: string, requireProto: boolean = false): LbryUrlObj
secondaryModSeparator, secondaryModSeparator,
secondaryModValue, secondaryModValue,
] = rest; ] = rest;
const searchParams = new URLSearchParams(qs || '');
const startTime = searchParams.get('t');
// Validate protocol // Validate protocol
if (requireProto && !proto) { if (requireProto && !proto) {
@ -80,7 +73,7 @@ export function parseURI(url: string, requireProto: boolean = false): LbryUrlObj
rest.forEach(urlPiece => { rest.forEach(urlPiece => {
if (urlPiece && urlPiece.includes(' ')) { if (urlPiece && urlPiece.includes(' ')) {
throw new Error(__('URL can not include a space')); console.error('URL can not include a space');
} }
}); });
@ -128,7 +121,6 @@ export function parseURI(url: string, requireProto: boolean = false): LbryUrlObj
: {}), : {}),
...(primaryBidPosition ? { primaryBidPosition: parseInt(primaryBidPosition, 10) } : {}), ...(primaryBidPosition ? { primaryBidPosition: parseInt(primaryBidPosition, 10) } : {}),
...(secondaryBidPosition ? { secondaryBidPosition: parseInt(secondaryBidPosition, 10) } : {}), ...(secondaryBidPosition ? { secondaryBidPosition: parseInt(secondaryBidPosition, 10) } : {}),
...(startTime ? { startTime: parseInt(startTime, 10) } : {}),
// The values below should not be used for new uses of parseURI // The values below should not be used for new uses of parseURI
// They will not work properly with canonical_urls // They will not work properly with canonical_urls
@ -149,11 +141,11 @@ function parseURIModifier(modSeperator: ?string, modValue: ?string) {
throw new Error(__(`No modifier provided after separator %modSeperator%.`, { modSeperator })); throw new Error(__(`No modifier provided after separator %modSeperator%.`, { modSeperator }));
} }
if (modSeperator === MOD_CLAIM_ID_SEPARATOR || MOD_CLAIM_ID_SEPARATOR_OLD) { if (modSeperator === '#') {
claimId = modValue; claimId = modValue;
} else if (modSeperator === MOD_SEQUENCE_SEPARATOR) { } else if (modSeperator === ':') {
claimSequence = modValue; claimSequence = modValue;
} else if (modSeperator === MOD_BID_POSITION_SEPARATOR) { } else if (modSeperator === '$') {
bidPosition = modValue; bidPosition = modValue;
} }
} }
@ -192,7 +184,6 @@ export function buildURI(
primaryBidPosition, primaryBidPosition,
secondaryClaimSequence, secondaryClaimSequence,
secondaryBidPosition, secondaryBidPosition,
startTime,
...deprecatedParts ...deprecatedParts
} = UrlObj; } = UrlObj;
const { claimId, claimName, contentName } = deprecatedParts; const { claimId, claimName, contentName } = deprecatedParts;
@ -242,8 +233,7 @@ export function buildURI(
(secondaryClaimName ? `/${secondaryClaimName}` : '') + (secondaryClaimName ? `/${secondaryClaimName}` : '') +
(secondaryClaimId ? `#${secondaryClaimId}` : '') + (secondaryClaimId ? `#${secondaryClaimId}` : '') +
(secondaryClaimSequence ? `:${secondaryClaimSequence}` : '') + (secondaryClaimSequence ? `:${secondaryClaimSequence}` : '') +
(secondaryBidPosition ? `${secondaryBidPosition}` : '') + (secondaryBidPosition ? `${secondaryBidPosition}` : '')
(startTime ? `?t=${startTime}` : '')
); );
} }
@ -258,7 +248,6 @@ export function normalizeURI(URL: string) {
primaryBidPosition, primaryBidPosition,
secondaryClaimSequence, secondaryClaimSequence,
secondaryBidPosition, secondaryBidPosition,
startTime,
} = parseURI(URL); } = parseURI(URL);
return buildURI({ return buildURI({
@ -270,7 +259,6 @@ export function normalizeURI(URL: string) {
primaryBidPosition, primaryBidPosition,
secondaryClaimSequence, secondaryClaimSequence,
secondaryBidPosition, secondaryBidPosition,
startTime,
}); });
} }
@ -325,22 +313,3 @@ export function convertToShareLink(URL: string) {
'https://open.lbry.com/' 'https://open.lbry.com/'
); );
} }
export function splitBySeparator(uri: string) {
const protocolLength = 7;
return uri.startsWith('lbry://') ? uri.slice(protocolLength).split(/[#:*]/) : uri.split(/#:\*\$/);
}
export function isURIEqual(uriA: string, uriB: string) {
const parseA = parseURI(normalizeURI(uriA));
const parseB = parseURI(normalizeURI(uriB));
if (parseA.isChannel) {
if (parseB.isChannel && parseA.channelClaimId === parseB.channelClaimId) {
return true;
}
} else if (parseA.streamClaimId === parseB.streamClaimId) {
return true;
} else {
return false;
}
}

View file

@ -1,6 +1,6 @@
// @flow // @flow
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import * as ABANDON_STATES from 'constants/abandon_states'; import * as TXO_STATES from 'constants/abandon_txo_states';
import Lbry from 'lbry'; import Lbry from 'lbry';
import { normalizeURI } from 'lbryURI'; import { normalizeURI } from 'lbryURI';
import { doToast } from 'redux/actions/notifications'; import { doToast } from 'redux/actions/notifications';
@ -9,34 +9,14 @@ import {
selectResolvingUris, selectResolvingUris,
selectClaimsByUri, selectClaimsByUri,
selectMyChannelClaims, selectMyChannelClaims,
selectPendingIds,
selectPendingClaimsById,
} from 'redux/selectors/claims'; } from 'redux/selectors/claims';
import { doFetchTxoPage } from 'redux/actions/wallet'; import { doFetchTxoPage } from 'redux/actions/wallet';
import { selectSupportsByOutpoint } from 'redux/selectors/wallet'; import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
import { creditsToString } from 'util/format-credits'; import { creditsToString } from 'util/format-credits';
import { batchActions } from 'util/batch-actions'; import { batchActions } from 'util/batch-actions';
import { createNormalizedClaimSearchKey } from 'util/claim'; import { createNormalizedClaimSearchKey } from 'util/claim';
import { PAGE_SIZE } from 'constants/claim';
import {
selectPendingCollections,
makeSelectClaimIdsForCollectionId,
} from 'redux/selectors/collections';
import {
doFetchItemsInCollection,
doFetchItemsInCollections,
doCollectionDelete,
} from 'redux/actions/collections';
let onChannelConfirmCallback; export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean = false) {
let checkPendingInterval;
export function doResolveUris(
uris: Array<string>,
returnCachedClaims: boolean = false,
resolveReposts: boolean = true
) {
return (dispatch: Dispatch, getState: GetState) => { return (dispatch: Dispatch, getState: GetState) => {
const normalizedUris = uris.map(normalizeURI); const normalizedUris = uris.map(normalizeURI);
const state = getState(); const state = getState();
@ -55,13 +35,6 @@ export function doResolveUris(
return; return;
} }
const options: { include_is_my_output?: boolean, include_purchase_receipt: boolean } = {
include_purchase_receipt: true,
};
if (urisToResolve.length === 1) {
options.include_is_my_output = true;
}
dispatch({ dispatch({
type: ACTIONS.RESOLVE_URIS_STARTED, type: ACTIONS.RESOLVE_URIS_STARTED,
data: { uris: normalizedUris }, data: { uris: normalizedUris },
@ -72,88 +45,49 @@ export function doResolveUris(
stream: ?StreamClaim, stream: ?StreamClaim,
channel: ?ChannelClaim, channel: ?ChannelClaim,
claimsInChannel: ?number, claimsInChannel: ?number,
collection: ?CollectionClaim,
}, },
} = {}; } = {};
const collectionIds: Array<string> = []; Lbry.resolve({ urls: urisToResolve }).then((result: ResolveResponse) => {
Object.entries(result).forEach(([uri, uriResolveInfo]) => {
return Lbry.resolve({ urls: urisToResolve, ...options }).then(
async(result: ResolveResponse) => {
let repostedResults = {};
const repostsToResolve = [];
const fallbackResolveInfo = { const fallbackResolveInfo = {
stream: null, stream: null,
claimsInChannel: null, claimsInChannel: null,
channel: null, channel: null,
}; };
function processResult(result, resolveInfo = {}, checkReposts = false) { // Flow has terrible Object.entries support
Object.entries(result).forEach(([uri, uriResolveInfo]) => { // https://github.com/facebook/flow/issues/2221
// Flow has terrible Object.entries support if (uriResolveInfo) {
// https://github.com/facebook/flow/issues/2221 if (uriResolveInfo.error) {
if (uriResolveInfo) { resolveInfo[uri] = { ...fallbackResolveInfo };
if (uriResolveInfo.error) { } else {
// $FlowFixMe let result = {};
resolveInfo[uri] = { ...fallbackResolveInfo }; if (uriResolveInfo.value_type === 'channel') {
} else { result.channel = uriResolveInfo;
if (checkReposts) { // $FlowFixMe
if (uriResolveInfo.reposted_claim) { result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
// $FlowFixMe } else {
const repostUrl = uriResolveInfo.reposted_claim.permanent_url; result.stream = uriResolveInfo;
if (!resolvingUris.includes(repostUrl)) { if (uriResolveInfo.signing_channel) {
repostsToResolve.push(repostUrl); result.channel = uriResolveInfo.signing_channel;
} result.claimsInChannel =
} (uriResolveInfo.signing_channel.meta &&
} uriResolveInfo.signing_channel.meta.claims_in_channel) ||
let result = {}; 0;
if (uriResolveInfo.value_type === 'channel') {
result.channel = uriResolveInfo;
// $FlowFixMe
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
} else if (uriResolveInfo.value_type === 'collection') {
result.collection = uriResolveInfo;
// $FlowFixMe
collectionIds.push(uriResolveInfo.claim_id);
} else {
result.stream = uriResolveInfo;
if (uriResolveInfo.signing_channel) {
result.channel = uriResolveInfo.signing_channel;
result.claimsInChannel =
(uriResolveInfo.signing_channel.meta &&
uriResolveInfo.signing_channel.meta.claims_in_channel) ||
0;
}
}
// $FlowFixMe
resolveInfo[uri] = result;
} }
} }
}); // $FlowFixMe
resolveInfo[uri] = result;
}
} }
processResult(result, resolveInfo, resolveReposts); });
if (repostsToResolve.length) { dispatch({
dispatch({ type: ACTIONS.RESOLVE_URIS_COMPLETED,
type: ACTIONS.RESOLVE_URIS_STARTED, data: { resolveInfo },
data: { uris: repostsToResolve, debug: 'reposts' }, });
}); });
repostedResults = await Lbry.resolve({ urls: repostsToResolve, ...options });
}
processResult(repostedResults, resolveInfo);
dispatch({
type: ACTIONS.RESOLVE_URIS_COMPLETED,
data: { resolveInfo },
});
if (collectionIds.length) {
dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 }));
}
return result;
}
);
}; };
} }
@ -164,40 +98,32 @@ export function doResolveUri(uri: string) {
export function doFetchClaimListMine( export function doFetchClaimListMine(
page: number = 1, page: number = 1,
pageSize: number = 99999, pageSize: number = 99999,
resolve: boolean = true, resolve: boolean = true
filterBy: Array<string> = []
) { ) {
return (dispatch: Dispatch) => { return (dispatch: Dispatch) => {
dispatch({ dispatch({
type: ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED, type: ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED,
}); });
let claimTypes = ['stream', 'repost'];
if (filterBy && filterBy.length !== 0) {
claimTypes = claimTypes.filter(t => filterBy.includes(t));
}
// $FlowFixMe // $FlowFixMe
Lbry.claim_list({ Lbry.claim_list({ page, page_size: pageSize, claim_type: ['stream', 'repost'], resolve }).then(
page: page, (result: StreamListResponse) => {
page_size: pageSize, const claims = result.items;
claim_type: claimTypes,
resolve, dispatch({
}).then((result: StreamListResponse) => { type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED,
dispatch({ data: {
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED, claims,
data: { },
result, });
resolve, }
}, );
});
});
}; };
} }
export function doAbandonTxo(txo: Txo, cb: string => void) { export function doAbandonTxo(txo: Txo, cb: string => void) {
return (dispatch: Dispatch) => { return (dispatch: Dispatch) => {
if (cb) cb(ABANDON_STATES.PENDING); if (cb) cb(TXO_STATES.PENDING);
const isClaim = txo.type === 'claim'; const isClaim = txo.type === 'claim';
const isSupport = txo.type === 'support' && txo.is_my_input === true; const isSupport = txo.type === 'support' && txo.is_my_input === true;
const isTip = txo.type === 'support' && txo.is_my_input === false; const isTip = txo.type === 'support' && txo.is_my_input === false;
@ -217,7 +143,7 @@ export function doAbandonTxo(txo: Txo, cb: string => void) {
}); });
const errorCallback = () => { const errorCallback = () => {
if (cb) cb(ABANDON_STATES.ERROR); if (cb) cb(TXO_STATES.ERROR);
dispatch( dispatch(
doToast({ doToast({
message: isClaim ? 'Error abandoning your claim/support' : 'Error unlocking your tip', message: isClaim ? 'Error abandoning your claim/support' : 'Error unlocking your tip',
@ -240,7 +166,7 @@ export function doAbandonTxo(txo: Txo, cb: string => void) {
} else { } else {
abandonMessage = __('Successfully unlocked your tip!'); abandonMessage = __('Successfully unlocked your tip!');
} }
if (cb) cb(ABANDON_STATES.DONE); if (cb) cb(TXO_STATES.DONE);
dispatch( dispatch(
doToast({ doToast({
@ -280,7 +206,7 @@ export function doAbandonTxo(txo: Txo, cb: string => void) {
}; };
} }
export function doAbandonClaim(txid: string, nout: number, cb: string => void) { export function doAbandonClaim(txid: string, nout: number) {
const outpoint = `${txid}:${nout}`; const outpoint = `${txid}:${nout}`;
return (dispatch: Dispatch, getState: GetState) => { return (dispatch: Dispatch, getState: GetState) => {
@ -321,7 +247,6 @@ export function doAbandonClaim(txid: string, nout: number, cb: string => void) {
isError: true, isError: true,
}) })
); );
if (cb) cb(ABANDON_STATES.ERROR);
}; };
const successCallback = () => { const successCallback = () => {
@ -329,7 +254,6 @@ export function doAbandonClaim(txid: string, nout: number, cb: string => void) {
type: completedActionType, type: completedActionType,
data, data,
}); });
if (cb) cb(ABANDON_STATES.DONE);
let abandonMessage; let abandonMessage;
if (isClaim) { if (isClaim) {
@ -383,8 +307,6 @@ export function doFetchClaimsByChannel(uri: string, page: number = 1) {
valid_channel_signature: true, valid_channel_signature: true,
page: page || 1, page: page || 1,
order_by: ['release_time'], order_by: ['release_time'],
include_is_my_output: true,
include_purchase_receipt: true,
}).then((result: ClaimSearchResponse) => { }).then((result: ClaimSearchResponse) => {
const { items: claims, total_items: claimsInChannel, page: returnedPage } = result; const { items: claims, total_items: claimsInChannel, page: returnedPage } = result;
@ -401,13 +323,7 @@ export function doFetchClaimsByChannel(uri: string, page: number = 1) {
}; };
} }
export function doClearChannelErrors() { export function doCreateChannel(name: string, amount: number, optionalParams: any) {
return {
type: ACTIONS.CLEAR_CHANNEL_ERRORS,
};
}
export function doCreateChannel(name: string, amount: number, optionalParams: any, onConfirm: any) {
return (dispatch: Dispatch) => { return (dispatch: Dispatch) => {
dispatch({ dispatch({
type: ACTIONS.CREATE_CHANNEL_STARTED, type: ACTIONS.CREATE_CHANNEL_STARTED,
@ -423,8 +339,7 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
description?: string, description?: string,
website_url?: string, website_url?: string,
email?: string, email?: string,
tags?: Array<Tag>, tags?: Array<string>,
languages?: Array<string>,
} = { } = {
name, name,
bid: creditsToString(amount), bid: creditsToString(amount),
@ -453,9 +368,6 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
if (optionalParams.tags) { if (optionalParams.tags) {
createParams.tags = optionalParams.tags.map(tag => tag.name); createParams.tags = optionalParams.tags.map(tag => tag.name);
} }
if (optionalParams.languages) {
createParams.languages = optionalParams.languages;
}
} }
return ( return (
@ -468,13 +380,6 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
type: ACTIONS.CREATE_CHANNEL_COMPLETED, type: ACTIONS.CREATE_CHANNEL_COMPLETED,
data: { channelClaim }, data: { channelClaim },
}); });
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [channelClaim],
},
});
dispatch(doCheckPendingClaims(onConfirm));
return channelClaim; return channelClaim;
}) })
.catch(error => { .catch(error => {
@ -482,12 +387,13 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
type: ACTIONS.CREATE_CHANNEL_FAILED, type: ACTIONS.CREATE_CHANNEL_FAILED,
data: error.message, data: error.message,
}); });
return error;
}) })
); );
}; };
} }
export function doUpdateChannel(params: any, cb: any) { export function doUpdateChannel(params: any) {
return (dispatch: Dispatch, getState: GetState) => { return (dispatch: Dispatch, getState: GetState) => {
dispatch({ dispatch({
type: ACTIONS.UPDATE_CHANNEL_STARTED, type: ACTIONS.UPDATE_CHANNEL_STARTED,
@ -507,7 +413,7 @@ export function doUpdateChannel(params: any, cb: any) {
email: params.email, email: params.email,
tags: [], tags: [],
replace: true, replace: true,
languages: params.languages || [], languages: [],
locations: [], locations: [],
blocking: true, blocking: true,
}; };
@ -517,10 +423,15 @@ export function doUpdateChannel(params: any, cb: any) {
} }
// we'll need to remove these once we add locations/channels to channel page edit/create options // we'll need to remove these once we add locations/channels to channel page edit/create options
if (channelClaim && channelClaim.value && channelClaim.value.locations) { if (channelClaim && channelClaim.value && channelClaim.value.locations) {
updateParams.locations = channelClaim.value.locations; updateParams.locations = channelClaim.value.locations;
} }
if (channelClaim && channelClaim.value && channelClaim.value.languages) {
updateParams.languages = channelClaim.value.languages;
}
return Lbry.channel_update(updateParams) return Lbry.channel_update(updateParams)
.then((result: ChannelUpdateResponse) => { .then((result: ChannelUpdateResponse) => {
const channelClaim = result.outputs[0]; const channelClaim = result.outputs[0];
@ -528,16 +439,7 @@ export function doUpdateChannel(params: any, cb: any) {
type: ACTIONS.UPDATE_CHANNEL_COMPLETED, type: ACTIONS.UPDATE_CHANNEL_COMPLETED,
data: { channelClaim }, data: { channelClaim },
}); });
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [channelClaim],
},
});
dispatch(doCheckPendingClaims(cb));
return Boolean(result.outputs[0]);
}) })
.then()
.catch(error => { .catch(error => {
dispatch({ dispatch({
type: ACTIONS.UPDATE_CHANNEL_FAILED, type: ACTIONS.UPDATE_CHANNEL_FAILED,
@ -554,7 +456,7 @@ export function doImportChannel(certificate: string) {
}); });
return Lbry.channel_import({ channel_data: certificate }) return Lbry.channel_import({ channel_data: certificate })
.then(() => { .then((result: string) => {
dispatch({ dispatch({
type: ACTIONS.IMPORT_CHANNEL_COMPLETED, type: ACTIONS.IMPORT_CHANNEL_COMPLETED,
}); });
@ -585,48 +487,7 @@ export function doFetchChannelListMine(
}); });
}; };
const failure = error => { Lbry.channel_list({ page, page_size: pageSize, resolve }).then(callback);
dispatch({
type: ACTIONS.FETCH_CHANNEL_LIST_FAILED,
data: error,
});
};
Lbry.channel_list({ page, page_size: pageSize, resolve }).then(callback, failure);
};
}
export function doFetchCollectionListMine(page: number = 1, pageSize: number = 99999) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.FETCH_COLLECTION_LIST_STARTED,
});
const callback = (response: CollectionListResponse) => {
const { items } = response;
dispatch({
type: ACTIONS.FETCH_COLLECTION_LIST_COMPLETED,
data: { claims: items },
});
dispatch(
doFetchItemsInCollections({
collectionIds: items.map(claim => claim.claim_id),
page_size: 5,
})
);
};
const failure = error => {
dispatch({
type: ACTIONS.FETCH_COLLECTION_LIST_FAILED,
data: error,
});
};
Lbry.collection_list({ page, page_size: pageSize, resolve_claims: 1, resolve: true }).then(
callback,
failure
);
}; };
} }
@ -634,16 +495,13 @@ export function doClaimSearch(
options: { options: {
page_size: number, page_size: number,
page: number, page: number,
no_totals?: boolean, no_totals: boolean,
any_tags?: Array<string>, any_tags?: Array<string>,
claim_ids?: Array<string>,
channel_ids?: Array<string>, channel_ids?: Array<string>,
not_channel_ids?: Array<string>, not_channel_ids?: Array<string>,
not_tags?: Array<string>, not_tags?: Array<string>,
order_by?: Array<string>, order_by?: Array<string>,
release_time?: string, release_time?: string,
has_source?: boolean,
has_no_souce?: boolean,
} = { } = {
no_totals: true, no_totals: true,
page_size: 10, page_size: 10,
@ -651,7 +509,7 @@ export function doClaimSearch(
} }
) { ) {
const query = createNormalizedClaimSearchKey(options); const query = createNormalizedClaimSearchKey(options);
return async(dispatch: Dispatch) => { return (dispatch: Dispatch) => {
dispatch({ dispatch({
type: ACTIONS.CLAIM_SEARCH_STARTED, type: ACTIONS.CLAIM_SEARCH_STARTED,
data: { query: query }, data: { query: query },
@ -675,7 +533,6 @@ export function doClaimSearch(
pageSize: options.page_size, pageSize: options.page_size,
}, },
}); });
return resolveInfo;
}; };
const failure = err => { const failure = err => {
@ -684,18 +541,15 @@ export function doClaimSearch(
data: { query }, data: { query },
error: err, error: err,
}); });
return false;
}; };
return await Lbry.claim_search({ Lbry.claim_search(options).then(success, failure);
...options,
include_purchase_receipt: true,
}).then(success, failure);
}; };
} }
export function doRepost(options: StreamRepostOptions) { export function doRepost(options: StreamRepostOptions) {
return (dispatch: Dispatch): Promise<any> => { return (dispatch: Dispatch) => {
// $FlowFixMe
return new Promise(resolve => { return new Promise(resolve => {
dispatch({ dispatch({
type: ACTIONS.CLAIM_REPOST_STARTED, type: ACTIONS.CLAIM_REPOST_STARTED,
@ -710,12 +564,6 @@ export function doRepost(options: StreamRepostOptions) {
repostClaim, repostClaim,
}, },
}); });
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [repostClaim],
},
});
dispatch(doFetchClaimListMine(1, 10)); dispatch(doFetchClaimListMine(1, 10));
resolve(repostClaim); resolve(repostClaim);
@ -735,336 +583,8 @@ export function doRepost(options: StreamRepostOptions) {
}; };
} }
export function doCollectionPublish(
options: {
name: string,
bid: string,
blocking: true,
title?: string,
channel_id?: string,
thumbnail_url?: string,
description?: string,
tags?: Array<Tag>,
languages?: Array<string>,
claims: Array<string>,
},
localId: string
) {
return (dispatch: Dispatch): Promise<any> => {
// $FlowFixMe
const params: {
name: string,
bid: string,
channel_id?: string,
blocking?: true,
title?: string,
thumbnail_url?: string,
description?: string,
tags?: Array<string>,
languages?: Array<string>,
claims: Array<string>,
} = {
name: options.name,
bid: creditsToString(options.bid),
title: options.title,
thumbnail_url: options.thumbnail_url,
description: options.description,
tags: [],
languages: options.languages || [],
locations: [],
blocking: true,
claims: options.claims,
};
if (options.tags) {
params['tags'] = options.tags.map(tag => tag.name);
}
if (options.channel_id) {
params['channel_id'] = options.channel_id;
}
return new Promise(resolve => {
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_STARTED,
});
function success(response) {
const collectionClaim = response.outputs[0];
dispatch(
batchActions(
{
type: ACTIONS.COLLECTION_PUBLISH_COMPLETED,
data: { claimId: collectionClaim.claim_id },
},
// move unpublished collection to pending collection with new publish id
// recent publish won't resolve this second. handle it in checkPending
{
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [collectionClaim],
},
}
)
);
dispatch({
type: ACTIONS.COLLECTION_PENDING,
data: { localId: localId, claimId: collectionClaim.claim_id },
});
dispatch(doCheckPendingClaims());
dispatch(doFetchCollectionListMine(1, 10));
return resolve(collectionClaim);
}
function failure(error) {
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_FAILED,
data: {
error: error.message,
},
});
}
return Lbry.collection_create(params).then(success, failure);
});
};
}
export function doCollectionPublishUpdate(
options: {
bid?: string,
blocking?: true,
title?: string,
thumbnail_url?: string,
description?: string,
claim_id: string,
tags?: Array<Tag>,
languages?: Array<string>,
claims?: Array<string>,
channel_id?: string,
},
isBackgroundUpdate?: boolean
) {
return (dispatch: Dispatch, getState: GetState): Promise<any> => {
// TODO: implement one click update
const updateParams: {
bid?: string,
blocking?: true,
title?: string,
thumbnail_url?: string,
channel_id?: string,
description?: string,
claim_id: string,
tags?: Array<string>,
languages?: Array<string>,
claims?: Array<string>,
clear_claims: boolean,
replace?: boolean,
} = isBackgroundUpdate
? {
blocking: true,
claim_id: options.claim_id,
clear_claims: true,
}
: {
bid: creditsToString(options.bid),
title: options.title,
thumbnail_url: options.thumbnail_url,
description: options.description,
tags: [],
languages: options.languages || [],
locations: [],
blocking: true,
claim_id: options.claim_id,
clear_claims: true,
replace: true,
};
if (isBackgroundUpdate && updateParams.claim_id) {
const state = getState();
updateParams['claims'] = makeSelectClaimIdsForCollectionId(updateParams.claim_id)(state);
} else if (options.claims) {
updateParams['claims'] = options.claims;
}
if (options.tags) {
updateParams['tags'] = options.tags.map(tag => tag.name);
}
if (options.channel_id) {
updateParams['channel_id'] = options.channel_id;
}
return new Promise(resolve => {
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_STARTED,
});
function success(response) {
const collectionClaim = response.outputs[0];
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_COMPLETED,
data: {
collectionClaim,
},
});
dispatch({
type: ACTIONS.COLLECTION_PENDING,
data: { claimId: collectionClaim.claim_id },
});
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [collectionClaim],
},
});
dispatch(doCheckPendingClaims());
return resolve(collectionClaim);
}
function failure(error) {
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED,
data: {
error: error.message,
},
});
}
return Lbry.collection_update(updateParams).then(success, failure);
});
};
}
export function doCheckPublishNameAvailability(name: string) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.CHECK_PUBLISH_NAME_STARTED,
});
return Lbry.claim_list({ name: name }).then(result => {
dispatch({
type: ACTIONS.CHECK_PUBLISH_NAME_COMPLETED,
});
if (result.items.length) {
dispatch({
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED,
data: {
result,
resolve: false,
},
});
}
return !(result && result.items && result.items.length);
});
};
}
export function doClearRepostError() { export function doClearRepostError() {
return { return {
type: ACTIONS.CLEAR_REPOST_ERROR, type: ACTIONS.CLEAR_REPOST_ERROR,
}; };
} }
export function doPurchaseList(page: number = 1, pageSize: number = PAGE_SIZE) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.PURCHASE_LIST_STARTED,
});
const success = (result: PurchaseListResponse) => {
return dispatch({
type: ACTIONS.PURCHASE_LIST_COMPLETED,
data: {
result,
},
});
};
const failure = error => {
dispatch({
type: ACTIONS.PURCHASE_LIST_FAILED,
data: {
error: error.message,
},
});
};
Lbry.purchase_list({
page: page,
page_size: pageSize,
resolve: true,
}).then(success, failure);
};
}
export const doCheckPendingClaims = (onChannelConfirmed: Function) => (
dispatch: Dispatch,
getState: GetState
) => {
if (onChannelConfirmed) {
onChannelConfirmCallback = onChannelConfirmed;
}
clearInterval(checkPendingInterval);
const checkTxoList = () => {
const state = getState();
const pendingById = Object.assign({}, selectPendingClaimsById(state));
const pendingTxos = (Object.values(pendingById): any).map(p => p.txid);
// use collections
const pendingCollections = selectPendingCollections(state);
if (pendingTxos.length) {
Lbry.txo_list({ txid: pendingTxos })
.then(result => {
const txos = result.items;
const idsToConfirm = [];
txos.forEach(txo => {
if (txo.claim_id && txo.confirmations > 0) {
idsToConfirm.push(txo.claim_id);
delete pendingById[txo.claim_id];
}
});
return { idsToConfirm, pendingById };
})
.then(results => {
const { idsToConfirm, pendingById } = results;
if (idsToConfirm.length) {
return Lbry.claim_list({ claim_id: idsToConfirm, resolve: true }).then(results => {
const claims = results.items;
const collectionIds = claims
.filter(c => c.value_type === 'collection')
.map(c => c.claim_id);
dispatch({
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
data: {
claims: claims,
pending: pendingById,
},
});
if (collectionIds.length) {
dispatch(
doFetchItemsInCollections({
collectionIds,
})
);
}
const channelClaims = claims.filter(claim => claim.value_type === 'channel');
if (channelClaims.length && onChannelConfirmCallback) {
channelClaims.forEach(claim => onChannelConfirmCallback(claim));
}
if (Object.keys(pendingById).length === 0) {
clearInterval(checkPendingInterval);
}
});
}
});
} else {
clearInterval(checkPendingInterval);
}
};
// do something with onConfirmed (typically get blocklist for channel)
checkPendingInterval = setInterval(() => {
checkTxoList();
}, 30000);
};

View file

@ -1,495 +0,0 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { v4 as uuid } from 'uuid';
import Lbry from 'lbry';
import { doClaimSearch, doAbandonClaim } from 'redux/actions/claims';
import { makeSelectClaimForClaimId } from 'redux/selectors/claims';
import {
makeSelectCollectionForId,
// makeSelectPublishedCollectionForId, // for "save" or "copy" action
makeSelectMyPublishedCollectionForId,
makeSelectPublishedCollectionForId,
makeSelectUnpublishedCollectionForId,
makeSelectEditedCollectionForId,
} from 'redux/selectors/collections';
import * as COLS from 'constants/collections';
const getTimestamp = () => {
return Math.floor(Date.now() / 1000);
};
const FETCH_BATCH_SIZE = 50;
export const doLocalCollectionCreate = (
name: string,
collectionItems: Array<string>,
type: string,
sourceId: string
) => (dispatch: Dispatch) => {
return dispatch({
type: ACTIONS.COLLECTION_NEW,
data: {
entry: {
id: uuid(), // start with a uuid, this becomes a claimId after publish
name: name,
updatedAt: getTimestamp(),
items: collectionItems || [],
sourceId: sourceId,
type: type,
},
},
});
};
export const doCollectionDelete = (id: string, colKey: ?string = undefined) => (
dispatch: Dispatch,
getState: GetState
) => {
const state = getState();
const claim = makeSelectClaimForClaimId(id)(state);
const collectionDelete = () =>
dispatch({
type: ACTIONS.COLLECTION_DELETE,
data: {
id: id,
collectionKey: colKey,
},
});
if (claim && !colKey) {
// could support "abandon, but keep" later
const { txid, nout } = claim;
return dispatch(doAbandonClaim(txid, nout, collectionDelete));
}
return collectionDelete();
};
// Given a collection, save its collectionId to be resolved and displayed in Library
// export const doCollectionSave = (
// id: string,
// ) => (dispatch: Dispatch) => {
// return dispatch({
// type: ACTIONS.COLLECTION_SAVE,
// data: {
// id: id,
// },
// });
// };
// Given a collection and name, copy it to a local private collection with a name
// export const doCollectionCopy = (
// id: string,
// ) => (dispatch: Dispatch) => {
// return dispatch({
// type: ACTIONS.COLLECTION_COPY,
// data: {
// id: id,
// },
// });
// };
export const doFetchItemsInCollections = (
resolveItemsOptions: {
collectionIds: Array<string>,
pageSize?: number,
},
resolveStartedCallback?: () => void
) => async(dispatch: Dispatch, getState: GetState) => {
/*
1) make sure all the collection claims are loaded into claims reducer, search/resolve if necessary.
2) get the item claims for each
3) format and make sure they're in the order as in the claim
4) Build the collection objects and update collections reducer
5) Update redux claims reducer
*/
let state = getState();
const { collectionIds, pageSize } = resolveItemsOptions;
dispatch({
type: ACTIONS.COLLECTION_ITEMS_RESOLVE_STARTED,
data: { ids: collectionIds },
});
if (resolveStartedCallback) resolveStartedCallback();
const collectionIdsToSearch = collectionIds.filter(claimId => !state.claims.byId[claimId]);
if (collectionIdsToSearch.length) {
await dispatch(doClaimSearch({ claim_ids: collectionIdsToSearch, page: 1, page_size: 9999 }));
}
const stateAfterClaimSearch = getState();
async function fetchItemsForCollectionClaim(claim: CollectionClaim, pageSize?: number) {
const totalItems = claim.value.claims && claim.value.claims.length;
const claimId = claim.claim_id;
const itemOrder = claim.value.claims;
const sortResults = (items: Array<Claim>, claimList) => {
const newItems: Array<Claim> = [];
claimList.forEach(id => {
const index = items.findIndex(i => i.claim_id === id);
if (index >= 0) {
newItems.push(items[index]);
}
});
/*
This will return newItems[] of length less than total_items below
if one or more of the claims has been abandoned. That's ok for now.
*/
return newItems;
};
const mergeBatches = (
arrayOfResults: Array<{ items: Array<Claim>, total_items: number }>,
claimList: Array<string>
) => {
const mergedResults: { items: Array<Claim>, total_items: number } = {
items: [],
total_items: 0,
};
arrayOfResults.forEach(result => {
mergedResults.items = mergedResults.items.concat(result.items);
mergedResults.total_items = result.total_items;
});
mergedResults.items = sortResults(mergedResults.items, claimList);
return mergedResults;
};
try {
const batchSize = pageSize || FETCH_BATCH_SIZE;
const batches: Array<Promise<any>> = [];
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
batches[i] = Lbry.claim_search({
claim_ids: claim.value.claims,
page: i + 1,
page_size: batchSize,
no_totals: true,
});
}
const itemsInBatches = await Promise.all(batches);
const result = mergeBatches(itemsInBatches, itemOrder);
// $FlowFixMe
const itemsById: { claimId: string, items?: ?Array<GenericClaim> } = { claimId: claimId };
if (result.items) {
itemsById.items = result.items;
} else {
itemsById.items = null;
}
return itemsById;
} catch (e) {
return {
claimId: claimId,
items: null,
};
}
}
function formatForClaimActions(resultClaimsByUri) {
const formattedClaims = {};
Object.entries(resultClaimsByUri).forEach(([uri, uriResolveInfo]) => {
// Flow has terrible Object.entries support
// https://github.com/facebook/flow/issues/2221
if (uriResolveInfo) {
let result = {};
if (uriResolveInfo.value_type === 'channel') {
result.channel = uriResolveInfo;
// $FlowFixMe
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
// ALSO SKIP COLLECTIONS
} else if (uriResolveInfo.value_type === 'collection') {
result.collection = uriResolveInfo;
} else {
result.stream = uriResolveInfo;
if (uriResolveInfo.signing_channel) {
result.channel = uriResolveInfo.signing_channel;
result.claimsInChannel =
(uriResolveInfo.signing_channel.meta &&
uriResolveInfo.signing_channel.meta.claims_in_channel) ||
0;
}
}
// $FlowFixMe
formattedClaims[uri] = result;
}
});
return formattedClaims;
}
const invalidCollectionIds = [];
const promisedCollectionItemFetches = [];
collectionIds.forEach(collectionId => {
const claim = makeSelectClaimForClaimId(collectionId)(stateAfterClaimSearch);
if (!claim) {
invalidCollectionIds.push(collectionId);
} else {
promisedCollectionItemFetches.push(fetchItemsForCollectionClaim(claim, pageSize));
}
});
// $FlowFixMe
const collectionItemsById: Array<{
claimId: string,
items: ?Array<GenericClaim>,
}> = await Promise.all(promisedCollectionItemFetches);
const newCollectionObjectsById = {};
const resolvedItemsByUrl = {};
collectionItemsById.forEach(entry => {
// $FlowFixMe
const collectionItems: Array<any> = entry.items;
const collectionId = entry.claimId;
if (collectionItems) {
const claim = makeSelectClaimForClaimId(collectionId)(stateAfterClaimSearch);
const editedCollection = makeSelectEditedCollectionForId(collectionId)(stateAfterClaimSearch);
const { name, timestamp, value } = claim || {};
const { title } = value;
const valueTypes = new Set();
const streamTypes = new Set();
let newItems = [];
let isPlaylist;
if (collectionItems) {
collectionItems.forEach(collectionItem => {
newItems.push(collectionItem.permanent_url);
valueTypes.add(collectionItem.value_type);
if (collectionItem.value.stream_type) {
streamTypes.add(collectionItem.value.stream_type);
}
resolvedItemsByUrl[collectionItem.canonical_url] = collectionItem;
});
isPlaylist =
valueTypes.size === 1 &&
valueTypes.has('stream') &&
((streamTypes.size === 1 && (streamTypes.has('audio') || streamTypes.has('video'))) ||
(streamTypes.size === 2 && (streamTypes.has('audio') && streamTypes.has('video'))));
}
newCollectionObjectsById[collectionId] = {
items: newItems,
id: collectionId,
name: title || name,
itemCount: claim.value.claims.length,
type: isPlaylist ? 'playlist' : 'collection',
updatedAt: timestamp,
};
if (editedCollection && timestamp > editedCollection['updatedAt']) {
dispatch({
type: ACTIONS.COLLECTION_DELETE,
data: {
id: collectionId,
collectionKey: 'edited',
},
});
}
} else {
invalidCollectionIds.push(collectionId);
}
});
const formattedClaimsByUri = formatForClaimActions(collectionItemsById);
dispatch({
type: ACTIONS.RESOLVE_URIS_COMPLETED,
data: { resolveInfo: formattedClaimsByUri },
});
dispatch({
type: ACTIONS.COLLECTION_ITEMS_RESOLVE_COMPLETED,
data: {
resolvedCollections: newCollectionObjectsById,
failedCollectionIds: invalidCollectionIds,
},
});
};
export const doFetchItemsInCollection = (
options: { collectionId: string, pageSize?: number },
cb?: () => void
) => {
const { collectionId, pageSize } = options;
const newOptions: { collectionIds: Array<string>, pageSize?: number } = {
collectionIds: [collectionId],
};
if (pageSize) newOptions.pageSize = pageSize;
return doFetchItemsInCollections(newOptions, cb);
};
export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => async(
dispatch: Dispatch,
getState: GetState
) => {
const state = getState();
const collection: Collection = makeSelectCollectionForId(collectionId)(state);
const editedCollection: Collection = makeSelectEditedCollectionForId(collectionId)(state);
const unpublishedCollection: Collection = makeSelectUnpublishedCollectionForId(collectionId)(
state
);
const publishedCollection: Collection = makeSelectPublishedCollectionForId(collectionId)(state); // needs to be published only
const generateCollectionItemsFromSearchResult = results => {
return (
Object.values(results)
// $FlowFixMe
.reduce(
(
acc,
cur: {
stream: ?StreamClaim,
channel: ?ChannelClaim,
claimsInChannel: ?number,
collection: ?CollectionClaim,
}
) => {
let url;
if (cur.stream) {
url = cur.stream.permanent_url;
} else if (cur.channel) {
url = cur.channel.permanent_url;
} else if (cur.collection) {
url = cur.collection.permanent_url;
} else {
return acc;
}
acc.push(url);
return acc;
},
[]
)
);
};
if (!collection) {
return dispatch({
type: ACTIONS.COLLECTION_ERROR,
data: {
message: 'collection does not exist',
},
});
}
let currentItems = collection.items ? collection.items.concat() : [];
const { claims: passedClaims, order, claimIds, replace, remove, type } = params;
const collectionType = type || collection.type;
let newItems: Array<?string> = currentItems;
if (passedClaims) {
if (remove) {
const passedUrls = passedClaims.map(claim => claim.permanent_url);
// $FlowFixMe // need this?
newItems = currentItems.filter((item: string) => !passedUrls.includes(item));
} else {
passedClaims.forEach(claim => newItems.push(claim.permanent_url));
}
}
if (claimIds) {
const batches = [];
if (claimIds.length > 50) {
for (let i = 0; i < Math.ceil(claimIds.length / 50); i++) {
batches[i] = claimIds.slice(i * 50, (i + 1) * 50);
}
} else {
batches[0] = claimIds;
}
const resultArray = await Promise.all(
batches.map(batch => {
let options = { claim_ids: batch, page: 1, page_size: 50 };
return dispatch(doClaimSearch(options));
})
);
const searchResults = Object.assign({}, ...resultArray);
if (replace) {
newItems = generateCollectionItemsFromSearchResult(searchResults);
} else {
newItems = currentItems.concat(generateCollectionItemsFromSearchResult(searchResults));
}
}
if (order) {
const [movedItem] = currentItems.splice(order.from, 1);
currentItems.splice(order.to, 0, movedItem);
}
// console.log('p&e', publishedCollection.items, newItems, publishedCollection.items.join(','), newItems.join(','))
if (editedCollection) {
// delete edited if newItems are the same as publishedItems
if (publishedCollection.items.join(',') === newItems.join(',')) {
dispatch({
type: ACTIONS.COLLECTION_DELETE,
data: {
id: collectionId,
collectionKey: 'edited',
},
});
} else {
dispatch({
type: ACTIONS.COLLECTION_EDIT,
data: {
id: collectionId,
collectionKey: 'edited',
collection: {
items: newItems,
id: collectionId,
name: params.name || collection.name,
updatedAt: getTimestamp(),
type: collectionType,
},
},
});
}
} else if (publishedCollection) {
dispatch({
type: ACTIONS.COLLECTION_EDIT,
data: {
id: collectionId,
collectionKey: 'edited',
collection: {
items: newItems,
id: collectionId,
name: params.name || collection.name,
updatedAt: getTimestamp(),
type: collectionType,
},
},
});
} else if (COLS.BUILTIN_LISTS.includes(collectionId)) {
dispatch({
type: ACTIONS.COLLECTION_EDIT,
data: {
id: collectionId,
collectionKey: 'builtin',
collection: {
items: newItems,
id: collectionId,
name: params.name || collection.name,
updatedAt: getTimestamp(),
type: collectionType,
},
},
});
} else if (unpublishedCollection) {
dispatch({
type: ACTIONS.COLLECTION_EDIT,
data: {
id: collectionId,
collectionKey: 'unpublished',
collection: {
items: newItems,
id: collectionId,
name: params.name || collection.name,
updatedAt: getTimestamp(),
type: collectionType,
},
},
});
}
return true;
};

View file

@ -0,0 +1,225 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry';
import { selectClaimsByUri, selectMyChannelClaims } from 'redux/selectors/claims';
import { doToast } from 'redux/actions/notifications';
export function doCommentList(uri: string, page: number = 1, pageSize: number = 99999) {
return (dispatch: Dispatch, getState: GetState) => {
const state = getState();
const claim = selectClaimsByUri(state)[uri];
const claimId = claim ? claim.claim_id : null;
dispatch({
type: ACTIONS.COMMENT_LIST_STARTED,
});
Lbry.comment_list({
claim_id: claimId,
page,
page_size: pageSize,
})
.then((result: CommentListResponse) => {
const { items: comments } = result;
dispatch({
type: ACTIONS.COMMENT_LIST_COMPLETED,
data: {
comments,
claimId: claimId,
uri: uri,
},
});
})
.catch(error => {
console.log(error);
dispatch({
type: ACTIONS.COMMENT_LIST_FAILED,
data: error,
});
});
};
}
export function doCommentCreate(
comment: string = '',
claim_id: string = '',
channel: string,
parent_id?: string
) {
return (dispatch: Dispatch, getState: GetState) => {
const state = getState();
dispatch({
type: ACTIONS.COMMENT_CREATE_STARTED,
});
const myChannels = selectMyChannelClaims(state);
const namedChannelClaim =
myChannels && myChannels.find(myChannel => myChannel.name === channel);
const channel_id = namedChannelClaim.claim_id;
if (channel_id == null) {
dispatch({
type: ACTIONS.COMMENT_CREATE_FAILED,
data: {},
});
dispatch(
doToast({
message: 'Channel cannot be anonymous, please select a channel and try again.',
isError: true,
})
);
return;
}
return Lbry.comment_create({
comment: comment,
claim_id: claim_id,
channel_id: channel_id,
parent_id: parent_id,
})
.then((result: CommentCreateResponse) => {
dispatch({
type: ACTIONS.COMMENT_CREATE_COMPLETED,
data: {
comment: result,
claimId: claim_id,
},
});
})
.catch(error => {
dispatch({
type: ACTIONS.COMMENT_CREATE_FAILED,
data: error,
});
dispatch(
doToast({
message: 'Unable to create comment, please try again later.',
isError: true,
})
);
});
};
}
export function doCommentHide(comment_id: string) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.COMMENT_HIDE_STARTED,
});
return Lbry.comment_hide({
comment_ids: [comment_id],
})
.then((result: CommentHideResponse) => {
dispatch({
type: ACTIONS.COMMENT_HIDE_COMPLETED,
data: result,
});
})
.catch(error => {
dispatch({
type: ACTIONS.COMMENT_HIDE_FAILED,
data: error,
});
dispatch(
doToast({
message: 'Unable to hide this comment, please try again later.',
isError: true,
})
);
});
};
}
export function doCommentAbandon(comment_id: string) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.COMMENT_ABANDON_STARTED,
});
return Lbry.comment_abandon({
comment_id: comment_id,
})
.then((result: CommentAbandonResponse) => {
// Comment may not be deleted if the signing channel can't be signed.
// This will happen if the channel was recently created or abandoned.
if (result.abandoned) {
dispatch({
type: ACTIONS.COMMENT_ABANDON_COMPLETED,
data: {
comment_id: comment_id,
},
});
} else {
dispatch({
type: ACTIONS.COMMENT_ABANDON_FAILED,
});
dispatch(
doToast({
message: 'Your channel is still being setup, try again in a few moments.',
isError: true,
})
);
}
})
.catch(error => {
dispatch({
type: ACTIONS.COMMENT_ABANDON_FAILED,
data: error,
});
dispatch(
doToast({
message: 'Unable to delete this comment, please try again later.',
isError: true,
})
);
});
};
}
export function doCommentUpdate(comment_id: string, comment: string) {
// if they provided an empty string, they must have wanted to abandon
if (comment === '') {
return doCommentAbandon(comment_id);
} else {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.COMMENT_UPDATE_STARTED,
});
return Lbry.comment_update({
comment_id: comment_id,
comment: comment,
})
.then((result: CommentUpdateResponse) => {
if (result != null) {
dispatch({
type: ACTIONS.COMMENT_UPDATE_COMPLETED,
data: {
comment: result,
},
});
} else {
// the result will return null
dispatch({
type: ACTIONS.COMMENT_UPDATE_FAILED,
});
dispatch(
doToast({
message: 'Your channel is still being setup, try again in a few moments.',
isError: true,
})
);
}
})
.catch(error => {
dispatch({
type: ACTIONS.COMMENT_UPDATE_FAILED,
data: error,
});
dispatch(
doToast({
message: 'Unable to edit this comment, please try again later.',
isError: true,
})
);
});
};
}
}

View file

@ -3,11 +3,8 @@ import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry'; import Lbry from 'lbry';
import { doToast } from 'redux/actions/notifications'; import { doToast } from 'redux/actions/notifications';
import { selectBalance } from 'redux/selectors/wallet'; import { selectBalance } from 'redux/selectors/wallet';
import { import { makeSelectFileInfoForUri, selectDownloadingByOutpoint } from 'redux/selectors/file_info';
makeSelectFileInfoForUri, import { makeSelectStreamingUrlForUri } from 'redux/selectors/file';
selectDownloadingByOutpoint,
makeSelectStreamingUrlForUri,
} from 'redux/selectors/file_info';
import { makeSelectClaimForUri } from 'redux/selectors/claims'; import { makeSelectClaimForUri } from 'redux/selectors/claims';
type Dispatch = (action: any) => any; type Dispatch = (action: any) => any;
@ -31,6 +28,7 @@ export function doFileGet(uri: string, saveFile: boolean = true, onSuccess?: Get
.then((streamInfo: GetResponse) => { .then((streamInfo: GetResponse) => {
const timeout = const timeout =
streamInfo === null || typeof streamInfo !== 'object' || streamInfo.error === 'Timeout'; streamInfo === null || typeof streamInfo !== 'object' || streamInfo.error === 'Timeout';
if (timeout) { if (timeout) {
dispatch({ dispatch({
type: ACTIONS.FETCH_FILE_INFO_FAILED, type: ACTIONS.FETCH_FILE_INFO_FAILED,
@ -39,17 +37,16 @@ export function doFileGet(uri: string, saveFile: boolean = true, onSuccess?: Get
dispatch(doToast({ message: `File timeout for uri ${uri}`, isError: true })); dispatch(doToast({ message: `File timeout for uri ${uri}`, isError: true }));
} else { } else {
if (streamInfo.purchase_receipt || streamInfo.content_fee) { // purchase was completed successfully
dispatch({ dispatch({
type: ACTIONS.PURCHASE_URI_COMPLETED, type: ACTIONS.PURCHASE_URI_COMPLETED,
data: { uri, purchaseReceipt: streamInfo.purchase_receipt || streamInfo.content_fee }, data: { uri },
}); });
}
dispatch({ dispatch({
type: ACTIONS.FETCH_FILE_INFO_COMPLETED, type: ACTIONS.FETCH_FILE_INFO_COMPLETED,
data: { data: {
fileInfo: streamInfo, fileInfo: streamInfo,
outpoint: outpoint, outpoint: streamInfo.outpoint,
}, },
}); });
@ -58,10 +55,10 @@ export function doFileGet(uri: string, saveFile: boolean = true, onSuccess?: Get
} }
} }
}) })
.catch(error => { .catch(() => {
dispatch({ dispatch({
type: ACTIONS.PURCHASE_URI_FAILED, type: ACTIONS.PURCHASE_URI_FAILED,
data: { uri, error }, data: { uri },
}); });
dispatch({ dispatch({
@ -104,10 +101,7 @@ export function doPurchaseUri(
data: { uri, error: `Already fetching uri: ${uri}` }, data: { uri, error: `Already fetching uri: ${uri}` },
}); });
if (onSuccess) { Promise.resolve();
onSuccess(fileInfo);
}
return; return;
} }
@ -126,8 +120,9 @@ export function doPurchaseUri(
}; };
} }
export function doClearPurchasedUriSuccess() { export function doDeletePurchasedUri(uri: string) {
return { return {
type: ACTIONS.CLEAR_PURCHASED_URI_SUCCESS, type: ACTIONS.DELETE_PURCHASED_URI,
data: { uri },
}; };
} }

View file

@ -1,6 +1,7 @@
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry'; import Lbry from 'lbry';
import { selectClaimsByUri } from 'redux/selectors/claims'; import { doFetchClaimListMine } from 'redux/actions/claims';
import { selectClaimsByUri, selectIsFetchingClaimListMine } from 'redux/selectors/claims';
import { selectIsFetchingFileList, selectUrisLoading } from 'redux/selectors/file_info'; import { selectIsFetchingFileList, selectUrisLoading } from 'redux/selectors/file_info';
export function doFetchFileInfo(uri) { export function doFetchFileInfo(uri) {
@ -57,10 +58,13 @@ export function doFileList(page = 1, pageSize = 99999) {
}; };
} }
export function doFetchFileInfos() { export function doFetchFileInfosAndPublishedClaims() {
return (dispatch, getState) => { return (dispatch, getState) => {
const state = getState(); const state = getState();
const isFetchingClaimListMine = selectIsFetchingClaimListMine(state);
const isFetchingFileInfo = selectIsFetchingFileList(state); const isFetchingFileInfo = selectIsFetchingFileList(state);
if (!isFetchingClaimListMine) dispatch(doFetchClaimListMine());
if (!isFetchingFileInfo) dispatch(doFileList()); if (!isFetchingFileInfo) dispatch(doFileList());
}; };
} }

View file

@ -1,6 +1,6 @@
// @flow // @flow
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import { v4 as uuid } from 'uuid'; import uuid from 'uuid/v4';
export function doToast(params: ToastParams) { export function doToast(params: ToastParams) {
if (!params) { if (!params) {

View file

@ -4,15 +4,14 @@ import { SPEECH_STATUS, SPEECH_PUBLISH } from 'constants/speech_urls';
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import * as THUMBNAIL_STATUSES from 'constants/thumbnail_upload_statuses'; import * as THUMBNAIL_STATUSES from 'constants/thumbnail_upload_statuses';
import Lbry from 'lbry'; import Lbry from 'lbry';
import LbryFirst from 'lbry-first';
import { batchActions } from 'util/batch-actions'; import { batchActions } from 'util/batch-actions';
import { creditsToString } from 'util/format-credits'; import { creditsToString } from 'util/format-credits';
import { doError } from 'redux/actions/notifications'; import { doError } from 'redux/actions/notifications';
import { isClaimNsfw } from 'util/claim'; import { isClaimNsfw } from 'util/claim';
import { import {
selectMyChannelClaims, selectMyChannelClaims,
selectPendingById,
selectMyClaimsWithoutChannels, selectMyClaimsWithoutChannels,
selectReflectingById,
} from 'redux/selectors/claims'; } from 'redux/selectors/claims';
import { selectPublishFormValues, selectMyClaimForUri } from 'redux/selectors/publish'; import { selectPublishFormValues, selectMyClaimForUri } from 'redux/selectors/publish';
@ -21,7 +20,6 @@ export const doResetThumbnailStatus = () => (dispatch: Dispatch) => {
type: ACTIONS.UPDATE_PUBLISH_FORM, type: ACTIONS.UPDATE_PUBLISH_FORM,
data: { data: {
thumbnailPath: '', thumbnailPath: '',
thumbnailError: undefined,
}, },
}); });
@ -69,10 +67,8 @@ export const doUploadThumbnail = (
thumbnailBlob?: File, thumbnailBlob?: File,
fsAdapter?: any, fsAdapter?: any,
fs?: any, fs?: any,
path?: any, path?: any
cb?: (string) => void
) => (dispatch: Dispatch) => { ) => (dispatch: Dispatch) => {
const downMessage = __('Thumbnail upload service may be down, try again later.');
let thumbnail, fileExt, fileName, fileType; let thumbnail, fileExt, fileName, fileType;
const makeid = () => { const makeid = () => {
@ -98,45 +94,6 @@ export const doUploadThumbnail = (
); );
}; };
dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
thumbnailError: undefined,
},
});
const doUpload = data => {
return fetch(SPEECH_PUBLISH, {
method: 'POST',
body: data,
})
.then(res => res.text())
.then(text => (text.length ? JSON.parse(text) : {}))
.then(json => {
if (!json.success) return uploadError(json.message || downMessage);
if (cb) {
cb(json.data.serveUrl);
}
return dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
thumbnail: json.data.serveUrl,
},
});
})
.catch(err => {
let message = err.message;
// This sucks but ¯\_(ツ)_/¯
if (message === 'Failed to fetch') {
message = downMessage;
}
uploadError(message);
});
};
dispatch({ dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM, type: ACTIONS.UPDATE_PUBLISH_FORM,
data: { uploadThumbnailStatus: THUMBNAIL_STATUSES.IN_PROGRESS }, data: { uploadThumbnailStatus: THUMBNAIL_STATUSES.IN_PROGRESS },
@ -153,7 +110,24 @@ export const doUploadThumbnail = (
data.append('name', name); data.append('name', name);
// $FlowFixMe // $FlowFixMe
data.append('file', { uri: 'file://' + filePath, type: fileType, name: fileName }); data.append('file', { uri: 'file://' + filePath, type: fileType, name: fileName });
return doUpload(data);
return fetch(SPEECH_PUBLISH, {
method: 'POST',
body: data,
})
.then(response => response.json())
.then(json =>
json.success
? dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
thumbnail: `${json.data.url}.${fileExt}`,
},
})
: uploadError(json.message)
)
.catch(err => uploadError(err.message));
}); });
} else { } else {
if (filePath && fs && path) { if (filePath && fs && path) {
@ -176,7 +150,24 @@ export const doUploadThumbnail = (
data.append('name', name); data.append('name', name);
// $FlowFixMe // $FlowFixMe
data.append('file', file); data.append('file', file);
return doUpload(data);
return fetch(SPEECH_PUBLISH, {
method: 'POST',
body: data,
})
.then(response => response.json())
.then(json =>
json.success
? dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
thumbnail: `${json.data.url}${fileExt}`,
},
})
: uploadError(json.message)
)
.catch(err => uploadError(err.message));
} }
}; };
@ -195,7 +186,6 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
currency: 'LBC', currency: 'LBC',
}, },
languages, languages,
release_time,
license, license,
license_url: licenseUrl, license_url: licenseUrl,
thumbnail, thumbnail,
@ -211,8 +201,6 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
description, description,
fee, fee,
languages, languages,
releaseTime: release_time,
releaseTimeEdited: undefined,
thumbnail: thumbnail ? thumbnail.url : null, thumbnail: thumbnail ? thumbnail.url : null,
title, title,
uri, uri,
@ -244,13 +232,11 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
dispatch({ type: ACTIONS.DO_PREPARE_EDIT, data: publishData }); dispatch({ type: ACTIONS.DO_PREPARE_EDIT, data: publishData });
}; };
export const doPublish = (success: Function, fail: Function, preview: Function) => ( export const doPublish = (success: Function, fail: Function) => (
dispatch: Dispatch, dispatch: Dispatch,
getState: () => {} getState: () => {}
) => { ) => {
if (!preview) { dispatch({ type: ACTIONS.PUBLISH_START });
dispatch({ type: ACTIONS.PUBLISH_START });
}
const state = getState(); const state = getState();
const myClaimForUri = selectMyClaimForUri(state); const myClaimForUri = selectMyClaimForUri(state);
@ -266,10 +252,8 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
filePath, filePath,
description, description,
language, language,
releaseTimeEdited,
license, license,
licenseUrl, licenseUrl,
useLBRYUploader,
licenseType, licenseType,
otherLicenseDescription, otherLicenseDescription,
thumbnail, thumbnail,
@ -281,10 +265,7 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
tags, tags,
locations, locations,
optimize, optimize,
isLivestreamPublish,
remoteFileUrl,
} = publishData; } = publishData;
// Handle scenario where we have a claim that has the same name as a channel we are publishing with. // Handle scenario where we have a claim that has the same name as a channel we are publishing with.
const myClaimForUriEditing = myClaimForUri && myClaimForUri.name === name ? myClaimForUri : null; const myClaimForUriEditing = myClaimForUri && myClaimForUri.name === name ? myClaimForUri : null;
@ -310,6 +291,7 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
description?: string, description?: string,
channel_id?: string, channel_id?: string,
file_path?: string, file_path?: string,
license_url?: string, license_url?: string,
license?: string, license?: string,
thumbnail_url?: string, thumbnail_url?: string,
@ -321,8 +303,6 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
locations?: Array<any>, locations?: Array<any>,
blocking: boolean, blocking: boolean,
optimize_file?: boolean, optimize_file?: boolean,
preview?: boolean,
remote_url?: string,
} = { } = {
name, name,
title, title,
@ -333,14 +313,10 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
tags: tags && tags.map(tag => tag.name), tags: tags && tags.map(tag => tag.name),
thumbnail_url: thumbnail, thumbnail_url: thumbnail,
blocking: true, blocking: true,
preview: false,
}; };
// Temporary solution to keep the same publish flow with the new tags api // Temporary solution to keep the same publish flow with the new tags api
// Eventually we will allow users to enter their own tags on publish // Eventually we will allow users to enter their own tags on publish
// `nsfw` will probably be removed // `nsfw` will probably be removed
if (remoteFileUrl) {
publishPayload.remote_url = remoteFileUrl;
}
if (publishingLicense) { if (publishingLicense) {
publishPayload.license = publishingLicense; publishPayload.license = publishingLicense;
@ -354,14 +330,8 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
publishPayload.thumbnail_url = thumbnail; publishPayload.thumbnail_url = thumbnail;
} }
if (useLBRYUploader) {
publishPayload.tags.push('lbry-first');
}
// Set release time to curret date. On edits, keep original release/transaction time as release_time // Set release time to curret date. On edits, keep original release/transaction time as release_time
if (releaseTimeEdited) { if (myClaimForUriEditing && myClaimForUriEditing.value.release_time) {
publishPayload.release_time = releaseTimeEdited;
} else if (myClaimForUriEditing && myClaimForUriEditing.value.release_time) {
publishPayload.release_time = Number(myClaimForUri.value.release_time); publishPayload.release_time = Number(myClaimForUri.value.release_time);
} else if (myClaimForUriEditing && myClaimForUriEditing.timestamp) { } else if (myClaimForUriEditing && myClaimForUriEditing.timestamp) {
publishPayload.release_time = Number(myClaimForUriEditing.timestamp); publishPayload.release_time = Number(myClaimForUriEditing.timestamp);
@ -388,107 +358,53 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
// Only pass file on new uploads, not metadata only edits. // Only pass file on new uploads, not metadata only edits.
// The sdk will figure it out // The sdk will figure it out
if (filePath && !isLivestreamPublish) publishPayload.file_path = filePath; if (filePath) publishPayload.file_path = filePath;
if (preview) { return Lbry.publish(publishPayload).then(success, fail);
publishPayload.preview = true;
publishPayload.optimize_file = false;
return Lbry.publish(publishPayload).then((previewResponse: PublishResponse) => {
return preview(previewResponse);
}, fail);
}
return Lbry.publish(publishPayload).then((response: PublishResponse) => {
if (!useLBRYUploader) {
return success(response);
}
// $FlowFixMe
publishPayload.permanent_url = response.outputs[0].permanent_url;
return LbryFirst.upload(publishPayload)
.then(() => {
// Return original publish response so app treats it like a normal publish
return success(response);
})
.catch(error => {
return success(response, error);
});
}, fail);
}; };
// Calls file_list until any reflecting files are done // Calls claim_list_mine until any pending publishes are confirmed
export const doCheckReflectingFiles = () => (dispatch: Dispatch, getState: GetState) => { export const doCheckPendingPublishes = (onConfirmed: Function) => (
dispatch: Dispatch,
getState: GetState
) => {
const state = getState(); const state = getState();
const { checkingReflector } = state.claims; const pendingById = selectPendingById(state);
let reflectorCheckInterval;
const checkFileList = async () => { if (!Object.keys(pendingById).length) {
const state = getState(); return;
const reflectingById = selectReflectingById(state);
const ids = Object.keys(reflectingById);
const newReflectingById = {};
const promises = [];
// TODO: just use file_list({claim_id: Array<claimId>})
if (Object.keys(reflectingById).length) {
ids.forEach(claimId => {
promises.push(Lbry.file_list({ claim_id: claimId }));
});
Promise.all(promises)
.then(results => {
results.forEach(res => {
if (res.items[0]) {
const fileListItem = res.items[0];
const fileClaimId = fileListItem.claim_id;
const {
is_fully_reflected: done,
uploading_to_reflector: uploading,
reflector_progress: progress,
} = fileListItem;
if (uploading) {
newReflectingById[fileClaimId] = {
fileListItem: fileListItem,
progress,
stalled: !done && !uploading,
};
}
}
});
})
.then(() => {
dispatch({
type: ACTIONS.UPDATE_FILES_REFLECTING,
data: newReflectingById,
});
if (!Object.keys(newReflectingById).length) {
dispatch({
type: ACTIONS.TOGGLE_CHECKING_REFLECTING,
data: false,
});
clearInterval(reflectorCheckInterval);
}
});
} else {
dispatch({
type: ACTIONS.TOGGLE_CHECKING_REFLECTING,
data: false,
});
clearInterval(reflectorCheckInterval);
}
};
// do it once...
checkFileList();
// then start the interval if it's not already started
if (!checkingReflector) {
dispatch({
type: ACTIONS.TOGGLE_CHECKING_REFLECTING,
data: true,
});
reflectorCheckInterval = setInterval(() => {
checkFileList();
}, 5000);
} }
let publishCheckInterval;
const checkFileList = () => {
Lbry.stream_list({ page: 1, page_size: 10 }).then(result => {
const claims = result.items;
claims.forEach(claim => {
// If it's confirmed, check if it was pending previously
if (claim.confirmations > 0 && pendingById[claim.claim_id]) {
delete pendingById[claim.claim_id];
if (onConfirmed) {
onConfirmed(claim);
}
}
});
dispatch({
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED,
data: {
claims,
},
});
if (!Object.keys(pendingById).length) {
clearInterval(publishCheckInterval);
}
});
};
publishCheckInterval = setInterval(() => {
checkFileList();
}, 30000);
}; };

278
src/redux/actions/search.js Normal file
View file

@ -0,0 +1,278 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { buildURI } from 'lbryURI';
import { doResolveUri } from 'redux/actions/claims';
import {
makeSelectSearchUris,
makeSelectResolvedSearchResults,
selectSuggestions,
makeSelectQueryWithOptions,
selectSearchValue,
} from 'redux/selectors/search';
import { batchActions } from 'util/batch-actions';
import debounce from 'util/debounce';
import handleFetchResponse from 'util/handle-fetch';
const DEBOUNCED_SEARCH_SUGGESTION_MS = 300;
type Dispatch = (action: any) => any;
type GetState = () => { search: SearchState };
type SearchOptions = {
size?: number,
from?: number,
related_to?: string,
nsfw?: boolean,
isBackgroundSearch?: boolean,
resolveResults?: boolean,
};
// We can't use env's because they aren't passed into node_modules
let CONNECTION_STRING = 'https://lighthouse.lbry.com/';
export const setSearchApi = (endpoint: string) => {
CONNECTION_STRING = endpoint.replace(/\/*$/, '/'); // exactly one slash at the end;
};
export const getSearchSuggestions = (value: string) => (dispatch: Dispatch, getState: GetState) => {
const query = value.trim();
// strip out any basic stuff for more accurate search results
let searchValue = query.replace(/lbry:\/\//g, '').replace(/-/g, ' ');
if (searchValue.includes('#')) {
// This should probably be more robust, but I think it's fine for now
// Remove everything after # to get rid of the claim id
searchValue = searchValue.substring(0, searchValue.indexOf('#'));
}
const suggestions = selectSuggestions(getState());
if (suggestions[searchValue]) {
return;
}
fetch(`${CONNECTION_STRING}autocomplete?s=${searchValue}`)
.then(handleFetchResponse)
.then(apiSuggestions => {
dispatch({
type: ACTIONS.UPDATE_SEARCH_SUGGESTIONS,
data: {
query: searchValue,
suggestions: apiSuggestions,
},
});
})
.catch(() => {
// If the fetch fails, do nothing
// Basic search suggestions are already populated at this point
});
};
const throttledSearchSuggestions = debounce((dispatch, query) => {
dispatch(getSearchSuggestions(query));
}, DEBOUNCED_SEARCH_SUGGESTION_MS);
export const doUpdateSearchQuery = (query: string, shouldSkipSuggestions: ?boolean) => (
dispatch: Dispatch
) => {
dispatch({
type: ACTIONS.UPDATE_SEARCH_QUERY,
data: { query },
});
// Don't fetch new suggestions if the user just added a space
if (!query.endsWith(' ') || !shouldSkipSuggestions) {
throttledSearchSuggestions(dispatch, query);
}
};
export const doSearch = (rawQuery: string, searchOptions: SearchOptions) => (
dispatch: Dispatch,
getState: GetState
) => {
const query = rawQuery.replace(/^lbry:\/\//i, '').replace(/\//, ' ');
const resolveResults = searchOptions && searchOptions.resolveResults;
const isBackgroundSearch = (searchOptions && searchOptions.isBackgroundSearch) || false;
if (!query) {
dispatch({
type: ACTIONS.SEARCH_FAIL,
});
return;
}
const state = getState();
let queryWithOptions = makeSelectQueryWithOptions(query, searchOptions)(state);
// If we have already searched for something, we don't need to do anything
const urisForQuery = makeSelectSearchUris(queryWithOptions)(state);
if (urisForQuery && !!urisForQuery.length) {
return;
}
dispatch({
type: ACTIONS.SEARCH_START,
});
// If the user is on the file page with a pre-populated uri and they select
// the search option without typing anything, searchQuery will be empty
// We need to populate it so the input is filled on the search page
// isBackgroundSearch means the search is happening in the background, don't update the search query
if (!state.search.searchQuery && !isBackgroundSearch) {
dispatch(doUpdateSearchQuery(query));
}
fetch(`${CONNECTION_STRING}search?${queryWithOptions}`)
.then(handleFetchResponse)
.then((data: Array<{ name: string, claimId: string }>) => {
const uris = [];
const actions = [];
data.forEach(result => {
if (result) {
const { name, claimId } = result;
const urlObj: LbryUrlObj = {};
if (name.startsWith('@')) {
urlObj.channelName = name;
urlObj.channelClaimId = claimId;
} else {
urlObj.streamName = name;
urlObj.streamClaimId = claimId;
}
const url = buildURI(urlObj);
if (resolveResults) {
actions.push(doResolveUri(url));
}
uris.push(url);
}
});
actions.push({
type: ACTIONS.SEARCH_SUCCESS,
data: {
query: queryWithOptions,
uris,
},
});
dispatch(batchActions(...actions));
})
.catch(e => {
dispatch({
type: ACTIONS.SEARCH_FAIL,
});
});
};
export const doResolvedSearch = (
rawQuery: string,
size: ?number, // only pass in if you don't want to use the users setting (ex: related content)
from: ?number,
isBackgroundSearch: boolean = false,
options: {
related_to?: string,
} = {},
nsfw: boolean
) => (dispatch: Dispatch, getState: GetState) => {
const query = rawQuery.replace(/^lbry:\/\//i, '').replace(/\//, ' ');
if (!query) {
dispatch({
type: ACTIONS.RESOLVED_SEARCH_FAIL,
});
return;
}
const optionsWithFrom: SearchOptions = {
...(size ? { size } : {}),
...(from ? { from } : {}),
isBackgroundSearch,
...options,
};
const optionsWithoutFrom: SearchOptions = {
...(size ? { size } : {}),
isBackgroundSearch,
...options,
};
const state = getState();
let queryWithOptions = makeSelectQueryWithOptions(query, optionsWithFrom)(state);
// make from null so that we can maintain a reference to the same query for multiple pages and simply append the found results
let queryWithoutFrom = makeSelectQueryWithOptions(query, optionsWithoutFrom)(state);
// If we have already searched for something, we don't need to do anything
// TODO: Tweak this check for multiple page results
/* const resultsForQuery = makeSelectResolvedSearchResults(queryWithOptions)(state);
if (resultsForQuery && resultsForQuery.length && resultsForQuery.length > (from * size)) {
return;
} */
dispatch({
type: ACTIONS.RESOLVED_SEARCH_START,
});
if (!state.search.searchQuery && !isBackgroundSearch) {
dispatch(doUpdateSearchQuery(query));
}
const fetchUrl = nsfw
? `${CONNECTION_STRING}search?resolve=true&${queryWithOptions}`
: `${CONNECTION_STRING}search?resolve=true&nsfw=false&${queryWithOptions}`;
fetch(fetchUrl)
.then(handleFetchResponse)
.then((data: Array<ResolvedSearchResult>) => {
const results = [];
data.forEach(result => {
if (result) {
results.push(result);
}
});
dispatch({
type: ACTIONS.RESOLVED_SEARCH_SUCCESS,
data: {
query: queryWithoutFrom,
results,
pageSize: size,
append: parseInt(from, 10) > parseInt(size, 10) - 1,
},
});
})
.catch(e => {
dispatch({
type: ACTIONS.RESOLVED_SEARCH_FAIL,
});
});
};
export const doFocusSearchInput = () => (dispatch: Dispatch) =>
dispatch({
type: ACTIONS.SEARCH_FOCUS,
});
export const doBlurSearchInput = () => (dispatch: Dispatch) =>
dispatch({
type: ACTIONS.SEARCH_BLUR,
});
export const doUpdateSearchOptions = (
newOptions: SearchOptions,
additionalOptions: SearchOptions
) => (dispatch: Dispatch, getState: GetState) => {
const state = getState();
const searchValue = selectSearchValue(state);
dispatch({
type: ACTIONS.UPDATE_SEARCH_OPTIONS,
data: newOptions,
});
if (searchValue) {
// After updating, perform a search with the new options
dispatch(doSearch(searchValue, additionalOptions));
}
};

View file

@ -6,17 +6,11 @@ type SharedData = {
version: '0.1', version: '0.1',
value: { value: {
subscriptions?: Array<string>, subscriptions?: Array<string>,
following?: Array<{ uri: string, notificationsDisabled: boolean }>,
tags?: Array<string>, tags?: Array<string>,
blocked?: Array<string>, blocked?: Array<string>,
coin_swap_codes?: Array<string>,
settings?: any, settings?: any,
app_welcome_version?: number, app_welcome_version?: number,
sharing_3P?: boolean, sharing_3P?: boolean,
unpublishedCollections: CollectionGroup,
editedCollections: CollectionGroup,
builtinCollections: CollectionGroup,
savedCollections: Array<string>,
}, },
}; };
@ -24,32 +18,20 @@ function extractUserState(rawObj: SharedData) {
if (rawObj && rawObj.version === '0.1' && rawObj.value) { if (rawObj && rawObj.version === '0.1' && rawObj.value) {
const { const {
subscriptions, subscriptions,
following,
tags, tags,
blocked, blocked,
coin_swap_codes,
settings, settings,
app_welcome_version, app_welcome_version,
sharing_3P, sharing_3P,
unpublishedCollections,
editedCollections,
builtinCollections,
savedCollections,
} = rawObj.value; } = rawObj.value;
return { return {
...(subscriptions ? { subscriptions } : {}), ...(subscriptions ? { subscriptions } : {}),
...(following ? { following } : {}),
...(tags ? { tags } : {}), ...(tags ? { tags } : {}),
...(blocked ? { blocked } : {}), ...(blocked ? { blocked } : {}),
...(coin_swap_codes ? { coin_swap_codes } : {}),
...(settings ? { settings } : {}), ...(settings ? { settings } : {}),
...(app_welcome_version ? { app_welcome_version } : {}), ...(app_welcome_version ? { app_welcome_version } : {}),
...(sharing_3P ? { sharing_3P } : {}), ...(sharing_3P ? { sharing_3P } : {}),
...(unpublishedCollections ? { unpublishedCollections } : {}),
...(editedCollections ? { editedCollections } : {}),
...(builtinCollections ? { builtinCollections } : {}),
...(savedCollections ? { savedCollections } : {}),
}; };
} }
@ -60,33 +42,21 @@ export function doPopulateSharedUserState(sharedSettings: any) {
return (dispatch: Dispatch) => { return (dispatch: Dispatch) => {
const { const {
subscriptions, subscriptions,
following,
tags, tags,
blocked, blocked,
coin_swap_codes,
settings, settings,
app_welcome_version, app_welcome_version,
sharing_3P, sharing_3P,
unpublishedCollections,
editedCollections,
builtinCollections,
savedCollections,
} = extractUserState(sharedSettings); } = extractUserState(sharedSettings);
dispatch({ dispatch({
type: ACTIONS.USER_STATE_POPULATE, type: ACTIONS.USER_STATE_POPULATE,
data: { data: {
subscriptions, subscriptions,
following,
tags, tags,
blocked, blocked,
coinSwapCodes: coin_swap_codes,
settings, settings,
welcomeVersion: app_welcome_version, welcomeVersion: app_welcome_version,
allowAnalytics: sharing_3P, allowAnalytics: sharing_3P,
unpublishedCollections,
editedCollections,
builtinCollections,
savedCollections,
}, },
}); });
}; };
@ -99,61 +69,45 @@ export function doPreferenceSet(
success: Function, success: Function,
fail: Function fail: Function
) { ) {
return (dispatch: Dispatch) => { const preference = {
const preference = { type: typeof value,
type: typeof value, version,
version, value,
value,
};
const options = {
key,
value: JSON.stringify(preference),
};
Lbry.preference_set(options)
.then(() => {
if (success) {
success(preference);
}
})
.catch(err => {
dispatch({
type: ACTIONS.SYNC_FATAL_ERROR,
error: err,
});
if (fail) {
fail();
}
});
}; };
const options = {
key,
value: JSON.stringify(preference),
};
Lbry.preference_set(options)
.then(() => {
success(preference);
})
.catch(() => {
if (fail) {
fail();
}
});
} }
export function doPreferenceGet(key: string, success: Function, fail?: Function) { export function doPreferenceGet(key: string, success: Function, fail?: Function) {
return (dispatch: Dispatch) => { const options = {
const options = { key,
key,
};
return Lbry.preference_get(options)
.then(result => {
if (result) {
const preference = result[key];
return success(preference);
}
return success(null);
})
.catch(err => {
dispatch({
type: ACTIONS.SYNC_FATAL_ERROR,
error: err,
});
if (fail) {
fail(err);
}
});
}; };
Lbry.preference_get(options)
.then(result => {
if (result) {
const preference = result[key];
return success(preference);
}
return success(null);
})
.catch(err => {
if (fail) {
fail(err);
}
});
} }

24
src/redux/actions/tags.js Normal file
View file

@ -0,0 +1,24 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry';
export const doToggleTagFollow = (name: string) => ({
type: ACTIONS.TOGGLE_TAG_FOLLOW,
data: {
name,
},
});
export const doAddTag = (name: string) => ({
type: ACTIONS.TAG_ADD,
data: {
name,
},
});
export const doDeleteTag = (name: string) => ({
type: ACTIONS.TAG_DELETE,
data: {
name,
},
});

View file

@ -1,21 +1,12 @@
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry'; import Lbry from 'lbry';
import { doToast } from 'redux/actions/notifications'; import { doToast } from 'redux/actions/notifications';
import { import { selectBalance, selectPendingSupportTransactions, selectTxoPageParams } from 'redux/selectors/wallet';
selectBalance,
selectPendingSupportTransactions,
selectTxoPageParams,
selectPendingOtherTransactions,
selectPendingConsolidateTxid,
selectPendingMassClaimTxid,
} from 'redux/selectors/wallet';
import { creditsToString } from 'util/format-credits'; import { creditsToString } from 'util/format-credits';
import { selectMyClaimsRaw, selectClaimsById } from 'redux/selectors/claims'; import { selectMyClaimsRaw } from 'redux/selectors/claims';
import { doFetchChannelListMine, doFetchClaimListMine, doClaimSearch } from 'redux/actions/claims'; import { doFetchChannelListMine, doFetchClaimListMine } from 'redux/actions/claims';
const FIFTEEN_SECONDS = 15000;
let walletBalancePromise = null; let walletBalancePromise = null;
export function doUpdateBalance() { export function doUpdateBalance() {
return (dispatch, getState) => { return (dispatch, getState) => {
const { const {
@ -57,92 +48,56 @@ export function doUpdateBalance() {
export function doBalanceSubscribe() { export function doBalanceSubscribe() {
return dispatch => { return dispatch => {
dispatch(doUpdateBalance()); dispatch(doUpdateBalance());
setInterval(() => dispatch(doUpdateBalance()), 10000); setInterval(() => dispatch(doUpdateBalance()), 5000);
}; };
} }
export function doFetchTransactions(page = 1, pageSize = 999999) { export function doFetchTransactions(page = 1, pageSize = 99999) {
return dispatch => { return dispatch => {
dispatch(doFetchSupports());
dispatch({ dispatch({
type: ACTIONS.FETCH_TRANSACTIONS_STARTED, type: ACTIONS.FETCH_TRANSACTIONS_STARTED,
}); });
Lbry.transaction_list({ page, page_size: pageSize }).then(result => { Lbry.utxo_release()
dispatch({ .then(() => Lbry.transaction_list({ page, page_size: pageSize }))
type: ACTIONS.FETCH_TRANSACTIONS_COMPLETED, .then(result => {
data: { dispatch({
transactions: result.items, type: ACTIONS.FETCH_TRANSACTIONS_COMPLETED,
}, data: {
transactions: result.items,
},
});
}); });
});
}; };
} }
export function doFetchTxoPage() { export function doFetchTxoPage() {
return (dispatch, getState) => { return (dispatch, getState) => {
const fetchId = Math.random()
.toString(36)
.substr(2, 9);
dispatch({ dispatch({
type: ACTIONS.FETCH_TXO_PAGE_STARTED, type: ACTIONS.FETCH_TXO_PAGE_STARTED,
data: fetchId,
}); });
const state = getState(); const state = getState();
const queryParams = selectTxoPageParams(state); const queryParams = selectTxoPageParams(state);
Lbry.txo_list(queryParams) Lbry.txo_list(queryParams)
.then(res => {
const items = res.items || [];
const claimsById = selectClaimsById(state);
const channelIds = items.reduce((acc, cur) => {
if (
cur.type === 'support' &&
cur.signing_channel &&
!claimsById[cur.signing_channel.channel_id]
) {
acc.push(cur.signing_channel.channel_id);
}
return acc;
}, []);
if (channelIds.length) {
const searchParams = {
page_size: 9999,
page: 1,
no_totals: true,
claim_ids: channelIds,
};
// make sure redux has these channels resolved
dispatch(doClaimSearch(searchParams));
}
return res;
})
.then(res => { .then(res => {
dispatch({ dispatch({
type: ACTIONS.FETCH_TXO_PAGE_COMPLETED, type: ACTIONS.FETCH_TXO_PAGE_COMPLETED,
data: { data: res,
result: res,
fetchId: fetchId,
},
}); });
}) })
.catch(e => { .catch(e => {
dispatch({ dispatch({
type: ACTIONS.FETCH_TXO_PAGE_COMPLETED, type: ACTIONS.FETCH_TXO_PAGE_COMPLETED,
data: { data: e.message,
error: e.message,
fetchId: fetchId,
},
}); });
}); });
}; };
} }
export function doUpdateTxoPageParams(params) { export function doUpdateTxoPageParams(params: TxoListParams) {
return dispatch => { return dispatch => {
dispatch({ dispatch({
type: ACTIONS.UPDATE_TXO_FETCH_PARAMS, type: ACTIONS.UPDATE_TXO_FETCH_PARAMS,
@ -170,74 +125,6 @@ export function doFetchSupports(page = 1, pageSize = 99999) {
}; };
} }
export function doFetchUtxoCounts() {
return async dispatch => {
dispatch({
type: ACTIONS.FETCH_UTXO_COUNT_STARTED,
});
let resultSets = await Promise.all([
Lbry.txo_list({ type: 'other', is_not_spent: true, page: 1, page_size: 1 }),
Lbry.txo_list({ type: 'support', is_not_spent: true, page: 1, page_size: 1 }),
]);
const counts = {};
const paymentCount = resultSets[0]['total_items'];
const supportCount = resultSets[1]['total_items'];
counts['other'] = typeof paymentCount === 'number' ? paymentCount : 0;
counts['support'] = typeof supportCount === 'number' ? supportCount : 0;
dispatch({
type: ACTIONS.FETCH_UTXO_COUNT_COMPLETED,
data: counts,
debug: { resultSets },
});
};
}
export function doUtxoConsolidate() {
return async dispatch => {
dispatch({
type: ACTIONS.DO_UTXO_CONSOLIDATE_STARTED,
});
const results = await Lbry.txo_spend({ type: 'other' });
const result = results[0];
dispatch({
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
data: { txids: [result.txid] },
});
dispatch({
type: ACTIONS.DO_UTXO_CONSOLIDATE_COMPLETED,
data: { txid: result.txid },
});
dispatch(doCheckPendingTxs());
};
}
export function doTipClaimMass() {
return async dispatch => {
dispatch({
type: ACTIONS.TIP_CLAIM_MASS_STARTED,
});
const results = await Lbry.txo_spend({ type: 'support', is_not_my_input: true });
const result = results[0];
dispatch({
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
data: { txids: [result.txid] },
});
dispatch({
type: ACTIONS.TIP_CLAIM_MASS_COMPLETED,
data: { txid: result.txid },
});
dispatch(doCheckPendingTxs());
};
}
export function doGetNewAddress() { export function doGetNewAddress() {
return dispatch => { return dispatch => {
dispatch({ dispatch({
@ -277,8 +164,8 @@ export function doSendDraftTransaction(address, amount) {
if (balance - amount <= 0) { if (balance - amount <= 0) {
dispatch( dispatch(
doToast({ doToast({
title: __('Insufficient credits'), title: 'Insufficient credits',
message: __('Insufficient credits'), message: 'Insufficient credits',
}) })
); );
return; return;
@ -295,8 +182,8 @@ export function doSendDraftTransaction(address, amount) {
}); });
dispatch( dispatch(
doToast({ doToast({
message: __('You sent %amount% LBRY Credits', { amount: amount }), message: `You sent ${amount} LBC`,
linkText: __('History'), linkText: 'History',
linkTarget: '/wallet', linkTarget: '/wallet',
}) })
); );
@ -307,7 +194,7 @@ export function doSendDraftTransaction(address, amount) {
}); });
dispatch( dispatch(
doToast({ doToast({
message: __('Transaction failed'), message: 'Transaction failed',
isError: true, isError: true,
}) })
); );
@ -321,7 +208,7 @@ export function doSendDraftTransaction(address, amount) {
}); });
dispatch( dispatch(
doToast({ doToast({
message: __('Transaction failed'), message: 'Transaction failed',
isError: true, isError: true,
}) })
); );
@ -348,16 +235,16 @@ export function doSetDraftTransactionAddress(address) {
}; };
} }
export function doSendTip(params, isSupport, successCallback, errorCallback, shouldNotify = true) { export function doSendTip(amount, claimId, isSupport, successCallback, errorCallback) {
return (dispatch, getState) => { return (dispatch, getState) => {
const state = getState(); const state = getState();
const balance = selectBalance(state); const balance = selectBalance(state);
const myClaims = selectMyClaimsRaw(state); const myClaims = selectMyClaimsRaw(state);
const shouldSupport = const shouldSupport =
isSupport || (myClaims ? myClaims.find(claim => claim.claim_id === params.claim_id) : false); isSupport || (myClaims ? myClaims.find(claim => claim.claim_id === claimId) : false);
if (balance - params.amount <= 0) { if (balance - amount <= 0) {
dispatch( dispatch(
doToast({ doToast({
message: __('Insufficient credits'), message: __('Insufficient credits'),
@ -367,25 +254,23 @@ export function doSendTip(params, isSupport, successCallback, errorCallback, sho
return; return;
} }
const success = response => { const success = () => {
if (shouldNotify) { dispatch(
dispatch( doToast({
doToast({ message: shouldSupport
message: shouldSupport ? __('You deposited %amount% LBC as a support!', { amount })
? __('You deposited %amount% LBRY Credits as a support!', { amount: params.amount }) : __('You sent %amount% LBC as a tip, Mahalo!', { amount }),
: __('You sent %amount% LBRY Credits as a tip, Mahalo!', { amount: params.amount }), linkText: __('History'),
linkText: __('History'), linkTarget: __('/wallet'),
linkTarget: '/wallet', })
}) );
);
}
dispatch({ dispatch({
type: ACTIONS.SUPPORT_TRANSACTION_COMPLETED, type: ACTIONS.SUPPORT_TRANSACTION_COMPLETED,
}); });
if (successCallback) { if (successCallback) {
successCallback(response); successCallback();
} }
}; };
@ -414,10 +299,10 @@ export function doSendTip(params, isSupport, successCallback, errorCallback, sho
}); });
Lbry.support_create({ Lbry.support_create({
...params, claim_id: claimId,
amount: creditsToString(amount),
tip: !shouldSupport, tip: !shouldSupport,
blocking: true, blocking: true,
amount: creditsToString(params.amount),
}).then(success, error); }).then(success, error);
}; };
} }
@ -494,8 +379,7 @@ export function doWalletLock() {
}; };
} }
// Collect all tips for a claim export function doSupportAbandonForClaim(claimId, claimType, keep, preview) {
export function doSupportAbandonForClaim(claimId, claimType, keep, preview) {
return dispatch => { return dispatch => {
if (preview) { if (preview) {
dispatch({ dispatch({
@ -507,26 +391,27 @@ export function doSupportAbandonForClaim(claimId, claimType, keep, preview) {
}); });
} }
const params = { claim_id: claimId }; const params = {claim_id: claimId};
if (preview) params['preview'] = true; if (preview) params['preview'] = true;
if (keep) params['keep'] = keep; if (keep) params['keep'] = keep;
return Lbry.support_abandon(params) return (
.then(res => { Lbry.support_abandon(params)
if (!preview) { .then((res) => {
if (!preview) {
dispatch({
type: ACTIONS.ABANDON_CLAIM_SUPPORT_COMPLETED,
data: { claimId, txid: res.txid, effective: res.outputs[0].amount, type: claimType}, // add to pendingSupportTransactions,
});
dispatch(doCheckPendingTxs());
}
return res;
})
.catch(e => {
dispatch({ dispatch({
type: ACTIONS.ABANDON_CLAIM_SUPPORT_COMPLETED, type: ACTIONS.ABANDON_CLAIM_SUPPORT_FAILED,
data: { claimId, txid: res.txid, effective: res.outputs[0].amount, type: claimType }, data: e.message,
}); });
dispatch(doCheckPendingTxs()); }));
}
return res;
})
.catch(e => {
dispatch({
type: ACTIONS.ABANDON_CLAIM_SUPPORT_FAILED,
data: e.message,
});
});
}; };
} }
@ -535,30 +420,15 @@ export function doWalletReconnect() {
dispatch({ dispatch({
type: ACTIONS.WALLET_RESTART, type: ACTIONS.WALLET_RESTART,
}); });
let failed = false;
// this basically returns null when it's done. :( // this basically returns null when it's done. :(
// might be good to dispatch ACTIONS.WALLET_RESTARTED // might be good to dispatch ACTIONS.WALLET_RESTARTED
const walletTimeout = setTimeout(() => { Lbry.wallet_reconnect().then(() =>
failed = true;
dispatch({ dispatch({
type: ACTIONS.WALLET_RESTART_COMPLETED, type: ACTIONS.WALLET_RESTART_COMPLETED,
}); })
dispatch( );
doToast({
message: __(
'Your servers were not available. Check your url and port, or switch back to defaults.'
),
isError: true,
})
);
}, FIFTEEN_SECONDS);
Lbry.wallet_reconnect().then(() => {
clearTimeout(walletTimeout);
if (!failed) dispatch({ type: ACTIONS.WALLET_RESTART_COMPLETED });
});
}; };
} }
export function doWalletDecrypt() { export function doWalletDecrypt() {
return dispatch => { return dispatch => {
dispatch({ dispatch({
@ -598,6 +468,7 @@ export function doWalletStatus() {
}; };
} }
export function doSetTransactionListFilter(filterOption) { export function doSetTransactionListFilter(filterOption) {
return { return {
type: ACTIONS.SET_TRANSACTION_LIST_FILTER, type: ACTIONS.SET_TRANSACTION_LIST_FILTER,
@ -618,53 +489,39 @@ export function doUpdateBlockHeight() {
} }
// Calls transaction_show on txes until any pending txes are confirmed // Calls transaction_show on txes until any pending txes are confirmed
export const doCheckPendingTxs = () => (dispatch, getState) => { export const doCheckPendingTxs = () => (
dispatch,
getState
) => {
const state = getState(); const state = getState();
const pendingTxsById = selectPendingSupportTransactions(state); // {} const pendingTxsById = selectPendingSupportTransactions(state); // {}
const pendingOtherTxes = selectPendingOtherTransactions(state); if (!Object.keys(pendingTxsById).length) {
if (!Object.keys(pendingTxsById).length && !pendingOtherTxes.length) {
return; return;
} }
let txCheckInterval; let txCheckInterval;
const checkTxList = () => { const checkTxList = () => {
const state = getState(); const state = getState();
const pendingSupportTxs = selectPendingSupportTransactions(state); // {} const pendingTxs = selectPendingSupportTransactions(state); // {}
const pendingConsolidateTxes = selectPendingOtherTransactions(state);
const pendingConsTxid = selectPendingConsolidateTxid(state);
const pendingMassCLaimTxid = selectPendingMassClaimTxid(state);
const promises = []; const promises = [];
const newPendingTxes = {}; const newPendingTxes = {};
const noLongerPendingConsolidate = [];
const types = new Set([]); const types = new Set([]);
// { claimId: {txid: 123, amount 12.3}, } let changed = false;
const entries = Object.entries(pendingSupportTxs); Object.entries(pendingTxs).forEach(([claim, data]) => {
entries.forEach(([claim, data]) => { promises.push(Lbry.transaction_show({txid: data.txid}));
promises.push(Lbry.transaction_show({ txid: data.txid }));
types.add(data.type); types.add(data.type);
}); });
if (pendingConsolidateTxes.length) {
pendingConsolidateTxes.forEach(txid => promises.push(Lbry.transaction_show({ txid })));
}
Promise.all(promises).then(txShows => { Promise.all(promises).then(txShows => {
let changed = false;
txShows.forEach(result => { txShows.forEach(result => {
if (pendingConsolidateTxes.includes(result.txid)) { if (result.height <= 0) {
if (result.height > 0) { const entries = Object.entries(pendingTxs);
noLongerPendingConsolidate.push(result.txid); const match = entries.find((entry) => entry[1].txid === result.txid);
} newPendingTxes[match[0]] = match[1];
} else { } else {
if (result.height <= 0) { changed = true;
const match = entries.find(entry => entry[1].txid === result.txid);
newPendingTxes[match[0]] = match[1];
} else {
changed = true;
}
} }
}); });
}).then(() => {
if (changed) { if (changed) {
dispatch({ dispatch({
type: ACTIONS.PENDING_SUPPORTS_UPDATED, type: ACTIONS.PENDING_SUPPORTS_UPDATED,
@ -677,31 +534,12 @@ export const doCheckPendingTxs = () => (dispatch, getState) => {
dispatch(doFetchClaimListMine()); dispatch(doFetchClaimListMine());
} }
} }
if (noLongerPendingConsolidate.length) { if (Object.keys(newPendingTxes).length === 0) clearInterval(txCheckInterval);
if (noLongerPendingConsolidate.includes(pendingConsTxid)) {
dispatch(
doToast({
message: __('Your wallet is finished consolidating'),
})
);
}
if (noLongerPendingConsolidate.includes(pendingMassCLaimTxid)) {
dispatch(
doToast({
message: __('Your tips have been collected'),
})
);
}
dispatch({
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
data: { txids: noLongerPendingConsolidate, remove: true },
});
}
if (!Object.keys(pendingTxsById).length && !pendingOtherTxes.length) {
clearInterval(txCheckInterval);
}
}); });
if (!Object.keys(pendingTxsById).length) {
clearInterval(txCheckInterval);
}
}; };
txCheckInterval = setInterval(() => { txCheckInterval = setInterval(() => {

View file

@ -2,64 +2,50 @@
import isEqual from 'util/deep-equal'; import isEqual from 'util/deep-equal';
import { doPreferenceSet } from 'redux/actions/sync'; import { doPreferenceSet } from 'redux/actions/sync';
const RUN_PREFERENCES_DELAY_MS = 2000; const SHARED_PREFERENCE_KEY = 'shared';
const SHARED_PREFERENCE_VERSION = '0.1'; const SHARED_PREFERENCE_VERSION = '0.1';
let oldShared = {}; let oldShared = {};
let timeout;
export const buildSharedStateMiddleware = ( export const buildSharedStateMiddleware = (
actions: Array<string>, actions: Array<string>,
sharedStateFilters: {}, sharedStateFilters: {},
sharedStateCb?: any => void sharedStateCb?: any => void
) => ({ ) => ({ getState, dispatch }: { getState: () => {}, dispatch: any => void }) => (
getState, next: ({}) => void
dispatch, ) => (action: { type: string, data: any }) => {
}: {
getState: () => { user: any, settings: any },
dispatch: any => void,
}) => (next: ({}) => void) => (action: { type: string, data: any }) => {
const currentState = getState(); const currentState = getState();
// We don't care if sync is disabled here, we always want to backup preferences to the wallet // We don't care if sync is disabled here, we always want to backup preferences to the wallet
if (!actions.includes(action.type) || typeof action === 'function') { if (!actions.includes(action.type)) {
return next(action); return next(action);
} }
clearTimeout(timeout);
const actionResult = next(action); const actionResult = next(action);
// Call `getState` after calling `next` to ensure the state has updated in response to the action // Call `getState` after calling `next` to ensure the state has updated in response to the action
function runPreferences() { const nextState = getState();
const nextState: { user: any, settings: any } = getState(); const shared = {};
const syncEnabled =
nextState.settings &&
nextState.settings.clientSettings &&
nextState.settings.clientSettings.enable_sync;
const hasVerifiedEmail =
nextState.user && nextState.user.user && nextState.user.user.has_verified_email;
const preferenceKey = syncEnabled && hasVerifiedEmail ? 'shared' : 'local';
const shared = {};
Object.keys(sharedStateFilters).forEach(key => { Object.keys(sharedStateFilters).forEach(key => {
const filter = sharedStateFilters[key]; const filter = sharedStateFilters[key];
const { source, property, transform } = filter; const { source, property, transform } = filter;
let value = nextState[source][property]; let value = nextState[source][property];
if (transform) { if (transform) {
value = transform(value); value = transform(value);
}
shared[key] = value;
});
if (!isEqual(oldShared, shared)) {
// only update if the preference changed from last call in the same session
oldShared = shared;
dispatch(doPreferenceSet(preferenceKey, shared, SHARED_PREFERENCE_VERSION));
} }
if (sharedStateCb) { shared[key] = value;
// Pass dispatch to the callback to consumers can dispatch actions in response to preference set });
sharedStateCb({ dispatch, getState });
} if (!isEqual(oldShared, shared)) {
clearTimeout(timeout); // only update if the preference changed from last call in the same session
return actionResult; oldShared = shared;
doPreferenceSet(SHARED_PREFERENCE_KEY, shared, SHARED_PREFERENCE_VERSION);
} }
timeout = setTimeout(runPreferences, RUN_PREFERENCES_DELAY_MS);
if (sharedStateCb) {
// Pass dispatch to the callback to consumers can dispatch actions in response to preference set
sharedStateCb({ dispatch, getState });
}
return actionResult;
}; };

View file

@ -9,35 +9,24 @@
// - Sean // - Sean
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import mergeClaim from 'util/merge-claim'; import { buildURI, parseURI } from 'lbryURI';
type State = { type State = {
createChannelError: ?string, createChannelError: ?string,
createCollectionError: ?string,
channelClaimCounts: { [string]: number }, channelClaimCounts: { [string]: number },
claimsByUri: { [string]: string }, claimsByUri: { [string]: string },
byId: { [string]: Claim }, byId: { [string]: Claim },
pendingById: { [string]: Claim }, // keep pending claims
resolvingUris: Array<string>, resolvingUris: Array<string>,
reflectingById: { [string]: ReflectingUpdate }, pendingById: { [string]: Claim },
myClaims: ?Array<string>, myClaims: ?Array<string>,
myChannelClaims: ?Array<string>, myChannelClaims: ?Array<string>,
myCollectionClaims: ?Array<string>,
abandoningById: { [string]: boolean }, abandoningById: { [string]: boolean },
fetchingChannelClaims: { [string]: number }, fetchingChannelClaims: { [string]: number },
fetchingMyChannels: boolean, fetchingMyChannels: boolean,
fetchingMyCollections: boolean,
fetchingClaimSearchByQuery: { [string]: boolean }, fetchingClaimSearchByQuery: { [string]: boolean },
purchaseUriSuccess: boolean,
myPurchases: ?Array<string>,
myPurchasesPageNumber: ?number,
myPurchasesPageTotalResults: ?number,
fetchingMyPurchases: boolean,
fetchingMyPurchasesError: ?string,
claimSearchByQuery: { [string]: Array<string> }, claimSearchByQuery: { [string]: Array<string> },
claimSearchByQueryLastPageReached: { [string]: Array<boolean> }, claimSearchByQueryLastPageReached: { [string]: Array<boolean> },
creatingChannel: boolean, creatingChannel: boolean,
creatingCollection: boolean,
paginatedClaimsByChannel: { paginatedClaimsByChannel: {
[string]: { [string]: {
all: Array<string>, all: Array<string>,
@ -46,21 +35,11 @@ type State = {
[number]: Array<string>, [number]: Array<string>,
}, },
}, },
updateChannelError: ?string, updateChannelError: string,
updateCollectionError: ?string,
updatingChannel: boolean, updatingChannel: boolean,
updatingCollection: boolean,
pendingChannelImport: string | boolean, pendingChannelImport: string | boolean,
repostLoading: boolean, repostLoading: boolean,
repostError: ?string, repostError: ?string,
fetchingClaimListMinePageError: ?string,
myClaimsPageResults: Array<string>,
myClaimsPageNumber: ?number,
myClaimsPageTotalResults: ?number,
isFetchingClaimListMine: boolean,
isCheckingNameForPublish: boolean,
checkingPending: boolean,
checkingReflecting: boolean,
}; };
const reducers = {}; const reducers = {};
@ -71,67 +50,51 @@ const defaultState = {
channelClaimCounts: {}, channelClaimCounts: {},
fetchingChannelClaims: {}, fetchingChannelClaims: {},
resolvingUris: [], resolvingUris: [],
// This should not be a Set
// Storing sets in reducers can cause issues
myChannelClaims: undefined, myChannelClaims: undefined,
myCollectionClaims: [],
myClaims: undefined, myClaims: undefined,
myPurchases: undefined,
myPurchasesPageNumber: undefined,
myPurchasesPageTotalResults: undefined,
purchaseUriSuccess: false,
fetchingMyPurchases: false,
fetchingMyPurchasesError: undefined,
fetchingMyChannels: false, fetchingMyChannels: false,
fetchingMyCollections: false,
abandoningById: {}, abandoningById: {},
pendingById: {}, pendingById: {},
reflectingById: {},
claimSearchError: false, claimSearchError: false,
claimSearchByQuery: {}, claimSearchByQuery: {},
claimSearchByQueryLastPageReached: {}, claimSearchByQueryLastPageReached: {},
fetchingClaimSearchByQuery: {}, fetchingClaimSearchByQuery: {},
updateChannelError: '', updateChannelError: '',
updateCollectionError: '',
updatingChannel: false, updatingChannel: false,
creatingChannel: false, creatingChannel: false,
createChannelError: undefined, createChannelError: undefined,
updatingCollection: false,
creatingCollection: false,
createCollectionError: undefined,
pendingChannelImport: false, pendingChannelImport: false,
repostLoading: false, repostLoading: false,
repostError: undefined, repostError: undefined,
fetchingClaimListMinePageError: undefined,
myClaimsPageResults: [],
myClaimsPageNumber: undefined,
myClaimsPageTotalResults: undefined,
isFetchingClaimListMine: false,
isFetchingMyPurchases: false,
isCheckingNameForPublish: false,
checkingPending: false,
checkingReflecting: false,
}; };
function handleClaimAction(state: State, action: any): State { function handleClaimAction(state: State, action: any): State {
const { resolveInfo }: ClaimActionResolveInfo = action.data; const {
resolveInfo,
}: {
[string]: {
stream: ?StreamClaim,
channel: ?ChannelClaim,
claimsInChannel: ?number,
},
} = action.data;
const byUri = Object.assign({}, state.claimsByUri); const byUri = Object.assign({}, state.claimsByUri);
const byId = Object.assign({}, state.byId); const byId = Object.assign({}, state.byId);
const channelClaimCounts = Object.assign({}, state.channelClaimCounts); const channelClaimCounts = Object.assign({}, state.channelClaimCounts);
const pendingById = state.pendingById;
let newResolvingUrls = new Set(state.resolvingUris); let newResolvingUrls = new Set(state.resolvingUris);
let myClaimIds = new Set(state.myClaims);
Object.entries(resolveInfo).forEach(([url: string, resolveResponse: ResolveResponse]) => { Object.entries(resolveInfo).forEach(([url: string, resolveResponse: ResolveResponse]) => {
// $FlowFixMe // $FlowFixMe
const { claimsInChannel, stream, channel: channelFromResolve, collection } = resolveResponse; const { claimsInChannel, stream, channel } = resolveResponse;
const channel = channelFromResolve || (stream && stream.signing_channel); if (claimsInChannel) {
channelClaimCounts[url] = claimsInChannel;
channelClaimCounts[channel.canonical_url] = claimsInChannel;
}
if (stream) { if (stream) {
if (pendingById[stream.claim_id]) { byId[stream.claim_id] = stream;
byId[stream.claim_id] = mergeClaim(stream, byId[stream.claim_id]);
} else {
byId[stream.claim_id] = stream;
}
byUri[url] = stream.claim_id; byUri[url] = stream.claim_id;
// If url isn't a canonical_url, make sure that is added too // If url isn't a canonical_url, make sure that is added too
@ -141,53 +104,23 @@ function handleClaimAction(state: State, action: any): State {
byUri[stream.permanent_url] = stream.claim_id; byUri[stream.permanent_url] = stream.claim_id;
newResolvingUrls.delete(stream.canonical_url); newResolvingUrls.delete(stream.canonical_url);
newResolvingUrls.delete(stream.permanent_url); newResolvingUrls.delete(stream.permanent_url);
if (stream.is_my_output) {
myClaimIds.add(stream.claim_id);
}
} }
if (channel && channel.claim_id) { if (channel) {
if (!stream) { if (!stream) {
byUri[url] = channel.claim_id; byUri[url] = channel.claim_id;
} }
if (claimsInChannel) { byId[channel.claim_id] = channel;
channelClaimCounts[url] = claimsInChannel; // Also add the permanent_url here until lighthouse returns canonical_url for search results
channelClaimCounts[channel.canonical_url] = claimsInChannel;
}
if (pendingById[channel.claim_id]) {
byId[channel.claim_id] = mergeClaim(channel, byId[channel.claim_id]);
} else {
byId[channel.claim_id] = channel;
}
byUri[channel.permanent_url] = channel.claim_id; byUri[channel.permanent_url] = channel.claim_id;
byUri[channel.canonical_url] = channel.claim_id; byUri[channel.canonical_url] = channel.claim_id;
newResolvingUrls.delete(channel.canonical_url); newResolvingUrls.delete(channel.canonical_url);
newResolvingUrls.delete(channel.permanent_url); newResolvingUrls.delete(channel.permanent_url);
} }
if (collection) {
if (pendingById[collection.claim_id]) {
byId[collection.claim_id] = mergeClaim(collection, byId[collection.claim_id]);
} else {
byId[collection.claim_id] = collection;
}
byUri[url] = collection.claim_id;
byUri[collection.canonical_url] = collection.claim_id;
byUri[collection.permanent_url] = collection.claim_id;
newResolvingUrls.delete(collection.canonical_url);
newResolvingUrls.delete(collection.permanent_url);
if (collection.is_my_output) {
myClaimIds.add(collection.claim_id);
}
}
newResolvingUrls.delete(url); newResolvingUrls.delete(url);
if (!stream && !channel && !collection && !pendingById[byUri[url]]) { if (!stream && !channel) {
byUri[url] = null; byUri[url] = null;
} }
}); });
@ -197,7 +130,6 @@ function handleClaimAction(state: State, action: any): State {
claimsByUri: byUri, claimsByUri: byUri,
channelClaimCounts, channelClaimCounts,
resolvingUris: Array.from(newResolvingUrls), resolvingUris: Array.from(newResolvingUrls),
myClaims: Array.from(myClaimIds),
}); });
} }
@ -230,46 +162,46 @@ reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED] = (state: State): State =>
}); });
reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any): State => { reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any): State => {
const { result }: { result: ClaimListResponse } = action.data; const { claims }: { claims: Array<Claim> } = action.data;
const claims = result.items;
const page = result.page;
const totalItems = result.total_items;
const byId = Object.assign({}, state.byId); const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri); const byUri = Object.assign({}, state.claimsByUri);
const pendingById = Object.assign({}, state.pendingById); const pendingById: { [string]: Claim } = Object.assign({}, state.pendingById);
let myClaimIds = new Set(state.myClaims); let myClaimIds = new Set(state.myClaims);
let urlsForCurrentPage = [];
claims.forEach((claim: Claim) => { claims.forEach((claim: Claim) => {
const { permanent_url: permanentUri, claim_id: claimId, canonical_url: canonicalUri } = claim; const uri = buildURI({ streamName: claim.name, streamClaimId: claim.claim_id });
const { claim_id: claimId } = claim;
if (claim.type && claim.type.match(/claim|update/)) { if (claim.type && claim.type.match(/claim|update/)) {
urlsForCurrentPage.push(permanentUri);
if (claim.confirmations < 1) { if (claim.confirmations < 1) {
pendingById[claimId] = claim; pendingById[claimId] = claim;
if (byId[claimId]) { delete byId[claimId];
byId[claimId] = mergeClaim(claim, byId[claimId]); delete byUri[claimId];
} else {
byId[claimId] = claim;
}
} else { } else {
byId[claimId] = claim; byId[claimId] = claim;
byUri[uri] = claimId;
} }
byUri[permanentUri] = claimId;
byUri[canonicalUri] = claimId;
myClaimIds.add(claimId); myClaimIds.add(claimId);
if (pendingById[claimId] && claim.confirmations > 0) {
delete pendingById[claimId];
}
} }
}); });
// Remove old pending publishes
Object.values(pendingById)
// $FlowFixMe
.filter(pendingClaim => byId[pendingClaim.claim_id])
.forEach(pendingClaim => {
// $FlowFixMe
delete pendingById[pendingClaim.claim_id];
});
return Object.assign({}, state, { return Object.assign({}, state, {
isFetchingClaimListMine: false, isFetchingClaimListMine: false,
myClaims: Array.from(myClaimIds), myClaims: Array.from(myClaimIds),
byId, byId,
pendingById,
claimsByUri: byUri, claimsByUri: byUri,
myClaimsPageResults: urlsForCurrentPage, pendingById,
myClaimsPageNumber: page,
myClaimsPageTotalResults: totalItems,
}); });
}; };
@ -278,8 +210,9 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_STARTED] = (state: State): State =>
reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): State => { reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): State => {
const { claims }: { claims: Array<ChannelClaim> } = action.data; const { claims }: { claims: Array<ChannelClaim> } = action.data;
const myClaims = state.myClaims || [];
let myClaimIds = new Set(state.myClaims); let myClaimIds = new Set(state.myClaims);
const pendingById = Object.assign({}, state.pendingById); const pendingById = Object.assign(state.pendingById);
let myChannelClaims; let myChannelClaims;
const byId = Object.assign({}, state.byId); const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri); const byUri = Object.assign({}, state.claimsByUri);
@ -287,18 +220,13 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
if (!claims.length) { if (!claims.length) {
// $FlowFixMe // $FlowFixMe
myChannelClaims = null; myChannelClaims = [];
} else { } else {
myChannelClaims = new Set(state.myChannelClaims); myChannelClaims = new Set(state.myChannelClaims);
claims.forEach(claim => { claims.forEach(claim => {
const { meta } = claim; const { meta } = claim;
const { claims_in_channel: claimsInChannel } = claim.meta; const { claims_in_channel: claimsInChannel } = claim.meta;
const { const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
canonical_url: canonicalUrl,
permanent_url: permanentUrl,
claim_id: claimId,
confirmations,
} = claim;
byUri[canonicalUrl] = claimId; byUri[canonicalUrl] = claimId;
byUri[permanentUrl] = claimId; byUri[permanentUrl] = claimId;
@ -307,97 +235,31 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
// $FlowFixMe // $FlowFixMe
myChannelClaims.add(claimId); myChannelClaims.add(claimId);
if (confirmations < 1) { if (!byId[claimId]) {
pendingById[claimId] = claim;
if (byId[claimId]) {
byId[claimId] = mergeClaim(claim, byId[claimId]);
} else {
byId[claimId] = claim;
}
} else {
byId[claimId] = claim; byId[claimId] = claim;
} }
myClaimIds.add(claimId); myClaimIds.add(claimId);
if (pendingById[claimId] && claim.confirmations > 0) {
delete pendingById[claimId];
}
if (pendingById[claimId] && claim.confirmations > 0) {
delete pendingById[claimId];
}
}); });
} }
return Object.assign({}, state, { return Object.assign({}, state, {
byId, byId,
pendingById,
claimsByUri: byUri, claimsByUri: byUri,
channelClaimCounts, channelClaimCounts,
fetchingMyChannels: false, fetchingMyChannels: false,
myChannelClaims: myChannelClaims ? Array.from(myChannelClaims) : null, myChannelClaims: Array.from(myChannelClaims),
myClaims: myClaimIds ? Array.from(myClaimIds) : null, myClaims: Array.from(myClaimIds),
}); });
}; };
reducers[ACTIONS.FETCH_CHANNEL_LIST_FAILED] = (state: State, action: any): State => {
return Object.assign({}, state, {
fetchingMyChannels: false,
});
};
reducers[ACTIONS.FETCH_COLLECTION_LIST_STARTED] = (state: State): State => ({
...state,
fetchingMyCollections: true,
});
reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any): State => {
const { claims }: { claims: Array<CollectionClaim> } = action.data;
const myClaims = state.myClaims || [];
let myClaimIds = new Set(myClaims);
const pendingById = Object.assign({}, state.pendingById);
let myCollectionClaimsSet = new Set([]);
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
if (claims.length) {
myCollectionClaimsSet = new Set(state.myCollectionClaims);
claims.forEach(claim => {
const { meta } = claim;
const {
canonical_url: canonicalUrl,
permanent_url: permanentUrl,
claim_id: claimId,
confirmations,
} = claim;
byUri[canonicalUrl] = claimId;
byUri[permanentUrl] = claimId;
// $FlowFixMe
myCollectionClaimsSet.add(claimId);
// we don't want to overwrite a pending result with a resolve
if (confirmations < 1) {
pendingById[claimId] = claim;
if (byId[claimId]) {
byId[claimId] = mergeClaim(claim, byId[claimId]);
} else {
byId[claimId] = claim;
}
} else {
byId[claimId] = claim;
}
myClaimIds.add(claimId);
});
}
return {
...state,
byId,
pendingById,
claimsByUri: byUri,
fetchingMyCollections: false,
myCollectionClaims: Array.from(myCollectionClaimsSet),
myClaims: myClaimIds ? Array.from(myClaimIds) : null,
};
};
reducers[ACTIONS.FETCH_COLLECTION_LIST_FAILED] = (state: State): State => {
return { ...state, fetchingMyCollections: false };
};
reducers[ACTIONS.FETCH_CHANNEL_CLAIMS_STARTED] = (state: State, action: any): State => { reducers[ACTIONS.FETCH_CHANNEL_CLAIMS_STARTED] = (state: State, action: any): State => {
const { uri, page } = action.data; const { uri, page } = action.data;
const fetchingChannelClaims = Object.assign({}, state.fetchingChannelClaims); const fetchingChannelClaims = Object.assign({}, state.fetchingChannelClaims);
@ -476,77 +338,12 @@ reducers[ACTIONS.ABANDON_CLAIM_STARTED] = (state: State, action: any): State =>
}); });
}; };
reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State => {
const { claims: pendingClaims }: { claims: Array<Claim> } = action.data;
const byId = Object.assign({}, state.byId);
const pendingById = Object.assign({}, state.pendingById);
const byUri = Object.assign({}, state.claimsByUri);
let myClaimIds = new Set(state.myClaims);
const myChannelClaims = new Set(state.myChannelClaims);
// $FlowFixMe
pendingClaims.forEach((claim: Claim) => {
let newClaim;
const { permanent_url: uri, claim_id: claimId, type, value_type: valueType } = claim;
pendingById[claimId] = claim; // make sure we don't need to merge?
const oldClaim = byId[claimId];
if (oldClaim && oldClaim.canonical_url) {
newClaim = mergeClaim(oldClaim, claim);
} else {
newClaim = claim;
}
if (valueType === 'channel') {
myChannelClaims.add(claimId);
}
if (type && type.match(/claim|update/)) {
byId[claimId] = newClaim;
byUri[uri] = claimId;
}
myClaimIds.add(claimId);
});
return Object.assign({}, state, {
myClaims: Array.from(myClaimIds),
byId,
pendingById,
myChannelClaims: Array.from(myChannelClaims),
claimsByUri: byUri,
});
};
reducers[ACTIONS.UPDATE_CONFIRMED_CLAIMS] = (state: State, action: any): State => {
const {
claims: confirmedClaims,
pending: pendingClaims,
}: { claims: Array<Claim>, pending: { [string]: Claim } } = action.data;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
//
confirmedClaims.forEach((claim: GenericClaim) => {
const { claim_id: claimId, type } = claim;
let newClaim = claim;
const oldClaim = byId[claimId];
if (oldClaim && oldClaim.canonical_url) {
newClaim = mergeClaim(oldClaim, claim);
}
if (type && type.match(/claim|update|channel/)) {
byId[claimId] = newClaim;
}
});
return Object.assign({}, state, {
pendingById: pendingClaims,
byId,
claimsByUri: byUri,
});
};
reducers[ACTIONS.ABANDON_CLAIM_SUCCEEDED] = (state: State, action: any): State => { reducers[ACTIONS.ABANDON_CLAIM_SUCCEEDED] = (state: State, action: any): State => {
const { claimId }: { claimId: string } = action.data; const { claimId }: { claimId: string } = action.data;
const byId = Object.assign({}, state.byId); const byId = Object.assign({}, state.byId);
const newMyClaims = state.myClaims ? state.myClaims.slice() : []; const newMyClaims = state.myClaims ? state.myClaims.slice() : [];
const newMyChannelClaims = state.myChannelClaims ? state.myChannelClaims.slice() : []; const newMyChannelClaims = state.myChannelClaims ? state.myChannelClaims.slice() : [];
const claimsByUri = Object.assign({}, state.claimsByUri); const claimsByUri = Object.assign({}, state.claimsByUri);
const newMyCollectionClaims = state.myCollectionClaims ? state.myCollectionClaims.slice() : [];
Object.keys(claimsByUri).forEach(uri => { Object.keys(claimsByUri).forEach(uri => {
if (claimsByUri[uri] === claimId) { if (claimsByUri[uri] === claimId) {
@ -555,25 +352,17 @@ reducers[ACTIONS.ABANDON_CLAIM_SUCCEEDED] = (state: State, action: any): State =
}); });
const myClaims = newMyClaims.filter(i => i !== claimId); const myClaims = newMyClaims.filter(i => i !== claimId);
const myChannelClaims = newMyChannelClaims.filter(i => i !== claimId); const myChannelClaims = newMyChannelClaims.filter(i => i !== claimId);
const myCollectionClaims = newMyCollectionClaims.filter(i => i !== claimId);
delete byId[claimId]; delete byId[claimId];
return Object.assign({}, state, { return Object.assign({}, state, {
myClaims, myClaims,
myChannelClaims, myChannelClaims,
myCollectionClaims,
byId, byId,
claimsByUri, claimsByUri,
}); });
}; };
reducers[ACTIONS.CLEAR_CHANNEL_ERRORS] = (state: State): State => ({
...state,
createChannelError: null,
updateChannelError: null,
});
reducers[ACTIONS.CREATE_CHANNEL_STARTED] = (state: State): State => ({ reducers[ACTIONS.CREATE_CHANNEL_STARTED] = (state: State): State => ({
...state, ...state,
creatingChannel: true, creatingChannel: true,
@ -581,7 +370,19 @@ reducers[ACTIONS.CREATE_CHANNEL_STARTED] = (state: State): State => ({
}); });
reducers[ACTIONS.CREATE_CHANNEL_COMPLETED] = (state: State, action: any): State => { reducers[ACTIONS.CREATE_CHANNEL_COMPLETED] = (state: State, action: any): State => {
const channelClaim: ChannelClaim = action.data.channelClaim;
const byId = Object.assign({}, state.byId);
const pendingById = Object.assign({}, state.pendingById);
const myChannelClaims = new Set(state.myChannelClaims);
byId[channelClaim.claim_id] = channelClaim;
pendingById[channelClaim.claim_id] = channelClaim;
myChannelClaims.add(channelClaim.claim_id);
return Object.assign({}, state, { return Object.assign({}, state, {
byId,
pendingById,
myChannelClaims: Array.from(myChannelClaims),
creatingChannel: false, creatingChannel: false,
}); });
}; };
@ -601,7 +402,13 @@ reducers[ACTIONS.UPDATE_CHANNEL_STARTED] = (state: State, action: any): State =>
}; };
reducers[ACTIONS.UPDATE_CHANNEL_COMPLETED] = (state: State, action: any): State => { reducers[ACTIONS.UPDATE_CHANNEL_COMPLETED] = (state: State, action: any): State => {
const channelClaim: ChannelClaim = action.data.channelClaim;
const byId = Object.assign({}, state.byId);
byId[channelClaim.claim_id] = channelClaim;
return Object.assign({}, state, { return Object.assign({}, state, {
byId,
updateChannelError: '', updateChannelError: '',
updatingChannel: false, updatingChannel: false,
}); });
@ -614,61 +421,6 @@ reducers[ACTIONS.UPDATE_CHANNEL_FAILED] = (state: State, action: any): State =>
}); });
}; };
reducers[ACTIONS.CLEAR_COLLECTION_ERRORS] = (state: State): State => ({
...state,
createCollectionError: null,
updateCollectionError: null,
});
reducers[ACTIONS.COLLECTION_PUBLISH_STARTED] = (state: State): State => ({
...state,
creatingCollection: true,
createCollectionError: null,
});
reducers[ACTIONS.COLLECTION_PUBLISH_COMPLETED] = (state: State, action: any): State => {
const myCollections = state.myCollectionClaims || [];
const myClaims = state.myClaims || [];
const { claimId } = action.data;
let myClaimIds = new Set(myClaims);
let myCollectionClaimsSet = new Set(myCollections);
myClaimIds.add(claimId);
myCollectionClaimsSet.add(claimId);
return Object.assign({}, state, {
creatingCollection: false,
myClaims: Array.from(myClaimIds),
myCollectionClaims: Array.from(myCollectionClaimsSet),
});
};
reducers[ACTIONS.COLLECTION_PUBLISH_FAILED] = (state: State, action: any): State => {
return Object.assign({}, state, {
creatingCollection: false,
createCollectionError: action.data.error,
});
};
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_STARTED] = (state: State, action: any): State => {
return Object.assign({}, state, {
updateCollectionError: '',
updatingCollection: true,
});
};
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_COMPLETED] = (state: State, action: any): State => {
return Object.assign({}, state, {
updateCollectionError: '',
updatingCollection: false,
});
};
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED] = (state: State, action: any): State => {
return Object.assign({}, state, {
updateCollectionError: action.data.error,
updatingCollection: false,
});
};
reducers[ACTIONS.IMPORT_CHANNEL_STARTED] = (state: State): State => reducers[ACTIONS.IMPORT_CHANNEL_STARTED] = (state: State): State =>
Object.assign({}, state, { pendingChannelImports: true }); Object.assign({}, state, { pendingChannelImports: true });
@ -720,23 +472,13 @@ reducers[ACTIONS.CLAIM_SEARCH_FAILED] = (state: State, action: any): State => {
const { query } = action.data; const { query } = action.data;
const claimSearchByQuery = Object.assign({}, state.claimSearchByQuery); const claimSearchByQuery = Object.assign({}, state.claimSearchByQuery);
const fetchingClaimSearchByQuery = Object.assign({}, state.fetchingClaimSearchByQuery); const fetchingClaimSearchByQuery = Object.assign({}, state.fetchingClaimSearchByQuery);
const claimSearchByQueryLastPageReached = Object.assign(
{},
state.claimSearchByQueryLastPageReached
);
delete fetchingClaimSearchByQuery[query]; delete fetchingClaimSearchByQuery[query];
claimSearchByQuery[query] = null;
if (claimSearchByQuery[query] && claimSearchByQuery[query].length !== 0) {
claimSearchByQueryLastPageReached[query] = true;
} else {
claimSearchByQuery[query] = null;
}
return Object.assign({}, state, { return Object.assign({}, state, {
fetchingClaimSearchByQuery, fetchingClaimSearchByQuery,
claimSearchByQuery, claimSearchByQuery,
claimSearchByQueryLastPageReached,
}); });
}; };
@ -780,133 +522,6 @@ reducers[ACTIONS.CLEAR_REPOST_ERROR] = (state: State): State => {
repostError: null, repostError: null,
}; };
}; };
reducers[ACTIONS.ADD_FILES_REFLECTING] = (state: State, action): State => {
const pendingClaim = action.data;
const { reflectingById } = state;
const claimId = pendingClaim && pendingClaim.claim_id;
reflectingById[claimId] = { fileListItem: pendingClaim, progress: 0, stalled: false };
return Object.assign({}, state, {
...state,
reflectingById: reflectingById,
});
};
reducers[ACTIONS.UPDATE_FILES_REFLECTING] = (state: State, action): State => {
const newReflectingById = action.data;
return Object.assign({}, state, {
...state,
reflectingById: newReflectingById,
});
};
reducers[ACTIONS.TOGGLE_CHECKING_REFLECTING] = (state: State, action): State => {
const checkingReflecting = action.data;
return Object.assign({}, state, {
...state,
checkingReflecting,
});
};
reducers[ACTIONS.TOGGLE_CHECKING_PENDING] = (state: State, action): State => {
const checking = action.data;
return Object.assign({}, state, {
...state,
checkingPending: checking,
});
};
reducers[ACTIONS.PURCHASE_LIST_STARTED] = (state: State): State => {
return {
...state,
fetchingMyPurchases: true,
fetchingMyPurchasesError: null,
};
};
reducers[ACTIONS.PURCHASE_LIST_COMPLETED] = (state: State, action: any): State => {
const { result }: { result: PurchaseListResponse, resolve: boolean } = action.data;
const page = result.page;
const totalItems = result.total_items;
let byId = Object.assign({}, state.byId);
let byUri = Object.assign({}, state.claimsByUri);
let urlsForCurrentPage = [];
result.items.forEach(item => {
if (!item.claim) {
// Abandoned claim
return;
}
const { claim, ...purchaseInfo } = item;
claim.purchase_receipt = purchaseInfo;
const claimId = claim.claim_id;
const uri = claim.canonical_url;
byId[claimId] = claim;
byUri[uri] = claimId;
urlsForCurrentPage.push(uri);
});
return Object.assign({}, state, {
byId,
claimsByUri: byUri,
myPurchases: urlsForCurrentPage,
myPurchasesPageNumber: page,
myPurchasesPageTotalResults: totalItems,
fetchingMyPurchases: false,
});
};
reducers[ACTIONS.PURCHASE_LIST_FAILED] = (state: State, action: any): State => {
const { error } = action.data;
return {
...state,
fetchingMyPurchases: false,
fetchingMyPurchasesError: error,
};
};
reducers[ACTIONS.PURCHASE_URI_COMPLETED] = (state: State, action: any): State => {
const { uri, purchaseReceipt } = action.data;
let byId = Object.assign({}, state.byId);
let byUri = Object.assign({}, state.claimsByUri);
let myPurchases = state.myPurchases ? state.myPurchases.slice() : [];
let urlsForCurrentPage = [];
const claimId = byUri[uri];
if (claimId) {
let claim = byId[claimId];
claim.purchase_receipt = purchaseReceipt;
}
myPurchases.push(uri);
return {
...state,
byId,
myPurchases,
purchaseUriSuccess: true,
};
};
reducers[ACTIONS.PURCHASE_URI_FAILED] = (state: State): State => {
return {
...state,
purchaseUriSuccess: false,
};
};
reducers[ACTIONS.CLEAR_PURCHASED_URI_SUCCESS] = (state: State): State => {
return {
...state,
purchaseUriSuccess: false,
};
};
export function claimsReducer(state: State = defaultState, action: any) { export function claimsReducer(state: State = defaultState, action: any) {
const handler = reducers[action.type]; const handler = reducers[action.type];

View file

@ -1,239 +0,0 @@
// @flow
import { handleActions } from 'util/redux-utils';
import * as ACTIONS from 'constants/action_types';
import * as COLS from 'constants/collections';
const getTimestamp = () => {
return Math.floor(Date.now() / 1000);
};
const defaultState: CollectionState = {
builtin: {
watchlater: {
items: [],
id: COLS.WATCH_LATER_ID,
name: 'Watch Later',
updatedAt: getTimestamp(),
type: COLS.COL_TYPE_PLAYLIST,
},
favorites: {
items: [],
id: COLS.FAVORITES_ID,
name: 'Favorites',
type: COLS.COL_TYPE_PLAYLIST,
updatedAt: getTimestamp(),
},
},
resolved: {},
unpublished: {}, // sync
edited: {},
pending: {},
saved: [],
isResolvingCollectionById: {},
error: null,
};
const collectionsReducer = handleActions(
{
[ACTIONS.COLLECTION_NEW]: (state, action) => {
const { entry: params } = action.data; // { id:, items: Array<string>}
// entry
const newListTemplate = {
id: params.id,
name: params.name,
items: [],
updatedAt: getTimestamp(),
type: params.type,
};
const newList = Object.assign({}, newListTemplate, { ...params });
const { unpublished: lists } = state;
const newLists = Object.assign({}, lists, { [params.id]: newList });
return {
...state,
unpublished: newLists,
};
},
[ACTIONS.COLLECTION_DELETE]: (state, action) => {
const { id, collectionKey } = action.data;
const { edited: editList, unpublished: unpublishedList, pending: pendingList } = state;
const newEditList = Object.assign({}, editList);
const newUnpublishedList = Object.assign({}, unpublishedList);
const newPendingList = Object.assign({}, pendingList);
if (collectionKey && state[collectionKey] && state[collectionKey][id]) {
const newList = Object.assign({}, state[collectionKey]);
delete newList[id];
return {
...state,
[collectionKey]: newList,
};
} else {
if (newEditList[id]) {
delete newEditList[id];
} else if (newUnpublishedList[id]) {
delete newUnpublishedList[id];
} else if (newPendingList[id]) {
delete newPendingList[id];
}
}
return {
...state,
edited: newEditList,
unpublished: newUnpublishedList,
pending: newPendingList,
};
},
[ACTIONS.COLLECTION_PENDING]: (state, action) => {
const { localId, claimId } = action.data;
const {
resolved: resolvedList,
edited: editList,
unpublished: unpublishedList,
pending: pendingList,
} = state;
const newEditList = Object.assign({}, editList);
const newResolvedList = Object.assign({}, resolvedList);
const newUnpublishedList = Object.assign({}, unpublishedList);
const newPendingList = Object.assign({}, pendingList);
if (localId) {
// new publish
newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {});
delete newUnpublishedList[localId];
} else {
// edit update
newPendingList[claimId] = Object.assign(
{},
newEditList[claimId] || newResolvedList[claimId]
);
delete newEditList[claimId];
}
return {
...state,
edited: newEditList,
unpublished: newUnpublishedList,
pending: newPendingList,
};
},
[ACTIONS.COLLECTION_EDIT]: (state, action) => {
const { id, collectionKey, collection } = action.data;
if (COLS.BUILTIN_LISTS.includes(id)) {
const { builtin: lists } = state;
return {
...state,
[collectionKey]: { ...lists, [id]: collection },
};
}
if (collectionKey === 'edited') {
const { edited: lists } = state;
return {
...state,
edited: { ...lists, [id]: collection },
};
}
const { unpublished: lists } = state;
return {
...state,
unpublished: { ...lists, [id]: collection },
};
},
[ACTIONS.COLLECTION_ERROR]: (state, action) => {
return Object.assign({}, state, {
error: action.data.message,
});
},
[ACTIONS.COLLECTION_ITEMS_RESOLVE_STARTED]: (state, action) => {
const { ids } = action.data;
const { isResolvingCollectionById } = state;
const newResolving = Object.assign({}, isResolvingCollectionById);
ids.forEach(id => {
newResolving[id] = true;
});
return Object.assign({}, state, {
...state,
error: '',
isResolvingCollectionById: newResolving,
});
},
[ACTIONS.USER_STATE_POPULATE]: (state, action) => {
const {
builtinCollections,
savedCollections,
unpublishedCollections,
editedCollections,
} = action.data;
return {
...state,
edited: editedCollections || state.edited,
unpublished: unpublishedCollections || state.unpublished,
builtin: builtinCollections || state.builtin,
saved: savedCollections || state.saved,
};
},
[ACTIONS.COLLECTION_ITEMS_RESOLVE_COMPLETED]: (state, action) => {
const { resolvedCollections, failedCollectionIds } = action.data;
const { pending, edited, isResolvingCollectionById, resolved } = state;
const newPending = Object.assign({}, pending);
const newEdited = Object.assign({}, edited);
const newResolved = Object.assign({}, resolved, resolvedCollections);
const resolvedIds = Object.keys(resolvedCollections);
const newResolving = Object.assign({}, isResolvingCollectionById);
if (resolvedCollections && Object.keys(resolvedCollections).length) {
resolvedIds.forEach(resolvedId => {
if (newEdited[resolvedId]) {
if (newEdited[resolvedId]['updatedAt'] < resolvedCollections[resolvedId]['updatedAt']) {
delete newEdited[resolvedId];
}
}
delete newResolving[resolvedId];
if (newPending[resolvedId]) {
delete newPending[resolvedId];
}
});
}
if (failedCollectionIds && Object.keys(failedCollectionIds).length) {
failedCollectionIds.forEach(failedId => {
delete newResolving[failedId];
});
}
return Object.assign({}, state, {
...state,
pending: newPending,
resolved: newResolved,
edited: newEdited,
isResolvingCollectionById: newResolving,
});
},
[ACTIONS.COLLECTION_ITEMS_RESOLVE_FAILED]: (state, action) => {
const { ids } = action.data;
const { isResolvingCollectionById } = state;
const newResolving = Object.assign({}, isResolvingCollectionById);
ids.forEach(id => {
delete newResolving[id];
});
return Object.assign({}, state, {
...state,
isResolvingCollectionById: newResolving,
error: action.data.message,
});
},
},
defaultState
);
export { collectionsReducer };

View file

@ -0,0 +1,153 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { handleActions } from 'util/redux-utils';
const defaultState: CommentsState = {
commentById: {}, // commentId -> Comment
byId: {}, // ClaimID -> list of comments
commentsByUri: {}, // URI -> claimId
isLoading: false,
myComments: undefined,
};
export const commentReducer = handleActions(
{
[ACTIONS.COMMENT_CREATE_STARTED]: (state: CommentsState, action: any): CommentsState => ({
...state,
isLoading: true,
}),
[ACTIONS.COMMENT_CREATE_FAILED]: (state: CommentsState, action: any) => ({
...state,
isLoading: false,
}),
[ACTIONS.COMMENT_CREATE_COMPLETED]: (state: CommentsState, action: any): CommentsState => {
const { comment, claimId }: { comment: Comment, claimId: string } = action.data;
const commentById = Object.assign({}, state.commentById);
const byId = Object.assign({}, state.byId);
const comments = byId[claimId];
const newCommentIds = comments.slice();
// add the comment by its ID
commentById[comment.comment_id] = comment;
// push the comment_id to the top of ID list
newCommentIds.unshift(comment.comment_id);
byId[claimId] = newCommentIds;
return {
...state,
commentById,
byId,
isLoading: false,
};
},
[ACTIONS.COMMENT_LIST_STARTED]: state => ({ ...state, isLoading: true }),
[ACTIONS.COMMENT_LIST_COMPLETED]: (state: CommentsState, action: any) => {
const { comments, claimId, uri } = action.data;
const commentById = Object.assign({}, state.commentById);
const byId = Object.assign({}, state.byId);
const commentsByUri = Object.assign({}, state.commentsByUri);
if (comments) {
// we use an Array to preserve order of listing
// in reality this doesn't matter and we can just
// sort comments by their timestamp
const commentIds = Array(comments.length);
// map the comment_ids to the new comments
for (let i = 0; i < comments.length; i++) {
commentIds[i] = comments[i].comment_id;
commentById[commentIds[i]] = comments[i];
}
byId[claimId] = commentIds;
commentsByUri[uri] = claimId;
}
return {
...state,
byId,
commentById,
commentsByUri,
isLoading: false,
};
},
[ACTIONS.COMMENT_LIST_FAILED]: (state: CommentsState, action: any) => ({
...state,
isLoading: false,
}),
[ACTIONS.COMMENT_ABANDON_STARTED]: (state: CommentsState, action: any) => ({
...state,
isLoading: true,
}),
[ACTIONS.COMMENT_ABANDON_COMPLETED]: (state: CommentsState, action: any) => {
const { comment_id } = action.data;
const commentById = Object.assign({}, state.commentById);
const byId = Object.assign({}, state.byId);
// to remove the comment and its references
const claimId = commentById[comment_id].claim_id;
for (let i = 0; i < byId[claimId].length; i++) {
if (byId[claimId][i] === comment_id) {
byId[claimId].splice(i, 1);
break;
}
}
delete commentById[comment_id];
return {
...state,
commentById,
byId,
isLoading: false,
};
},
// do nothing
[ACTIONS.COMMENT_ABANDON_FAILED]: (state: CommentsState, action: any) => ({
...state,
isLoading: false,
}),
// do nothing
[ACTIONS.COMMENT_UPDATE_STARTED]: (state: CommentsState, action: any) => ({
...state,
isLoading: true,
}),
// replace existing comment with comment returned here under its comment_id
[ACTIONS.COMMENT_UPDATE_COMPLETED]: (state: CommentsState, action: any) => {
const { comment } = action.data;
const commentById = Object.assign({}, state.commentById);
commentById[comment.comment_id] = comment;
return {
...state,
commentById,
isLoading: false,
};
},
// nothing can be done here
[ACTIONS.COMMENT_UPDATE_FAILED]: (state: CommentsState, action: any) => ({
...state,
isLoading: false,
}),
// nothing can really be done here
[ACTIONS.COMMENT_HIDE_STARTED]: (state: CommentsState, action: any) => ({
...state,
isLoading: true,
}),
[ACTIONS.COMMENT_HIDE_COMPLETED]: (state: CommentsState, action: any) => ({
...state, // todo: add HiddenComments state & create selectors
isLoading: false,
}),
// nothing can be done here
[ACTIONS.COMMENT_HIDE_FAILED]: (state: CommentsState, action: any) => ({
...state,
isLoading: false,
}),
},
defaultState
);

View file

@ -0,0 +1,89 @@
// @flow
import * as ACTIONS from 'constants/action_types';
const reducers = {};
const defaultState = {
failedPurchaseUris: [],
purchasedUris: [],
purchaseUriErrorMessage: '',
};
reducers[ACTIONS.PURCHASE_URI_STARTED] = (
state: FileState,
action: PurchaseUriStarted
): FileState => {
const { uri } = action.data;
const newFailedPurchaseUris = state.failedPurchaseUris.slice();
if (newFailedPurchaseUris.includes(uri)) {
newFailedPurchaseUris.splice(newFailedPurchaseUris.indexOf(uri), 1);
}
return {
...state,
failedPurchaseUris: newFailedPurchaseUris,
purchaseUriErrorMessage: '',
};
};
reducers[ACTIONS.PURCHASE_URI_COMPLETED] = (
state: FileState,
action: PurchaseUriCompleted
): FileState => {
const { uri } = action.data;
const newPurchasedUris = state.purchasedUris.slice();
const newFailedPurchaseUris = state.failedPurchaseUris.slice();
if (!newPurchasedUris.includes(uri)) {
newPurchasedUris.push(uri);
}
if (newFailedPurchaseUris.includes(uri)) {
newFailedPurchaseUris.splice(newFailedPurchaseUris.indexOf(uri), 1);
}
return {
...state,
failedPurchaseUris: newFailedPurchaseUris,
purchasedUris: newPurchasedUris,
purchaseUriErrorMessage: '',
};
};
reducers[ACTIONS.PURCHASE_URI_FAILED] = (
state: FileState,
action: PurchaseUriFailed
): FileState => {
const { uri, error } = action.data;
const newFailedPurchaseUris = state.failedPurchaseUris.slice();
if (!newFailedPurchaseUris.includes(uri)) {
newFailedPurchaseUris.push(uri);
}
return {
...state,
failedPurchaseUris: newFailedPurchaseUris,
purchaseUriErrorMessage: error,
};
};
reducers[ACTIONS.DELETE_PURCHASED_URI] = (
state: FileState,
action: DeletePurchasedUri
): FileState => {
const { uri } = action.data;
const newPurchasedUris = state.purchasedUris.slice();
if (newPurchasedUris.includes(uri)) {
newPurchasedUris.splice(newPurchasedUris.indexOf(uri), 1);
}
return {
...state,
purchasedUris: newPurchasedUris,
};
};
export function fileReducer(state: FileState = defaultState, action: any) {
const handler = reducers[action.type];
if (handler) return handler(state, action);
return state;
}

View file

@ -7,9 +7,7 @@ import { CHANNEL_ANONYMOUS } from 'constants/claim';
type PublishState = { type PublishState = {
editingURI: ?string, editingURI: ?string,
fileText: ?string,
filePath: ?string, filePath: ?string,
remoteFileUrl: ?string,
contentIsFree: boolean, contentIsFree: boolean,
fileDur: number, fileDur: number,
fileSize: number, fileSize: number,
@ -22,11 +20,8 @@ type PublishState = {
thumbnail_url: string, thumbnail_url: string,
thumbnailPath: string, thumbnailPath: string,
uploadThumbnailStatus: string, uploadThumbnailStatus: string,
thumbnailError: ?boolean,
description: string, description: string,
language: string, language: string,
releaseTime: ?number,
releaseTimeEdited: ?number,
channel: string, channel: string,
channelId: ?string, channelId: ?string,
name: string, name: string,
@ -37,17 +32,14 @@ type PublishState = {
licenseUrl: string, licenseUrl: string,
tags: Array<string>, tags: Array<string>,
optimize: boolean, optimize: boolean,
useLBRYUploader: boolean,
}; };
const defaultState: PublishState = { const defaultState: PublishState = {
editingURI: undefined, editingURI: undefined,
fileText: '',
filePath: undefined, filePath: undefined,
fileDur: 0, fileDur: 0,
fileSize: 0, fileSize: 0,
fileVid: false, fileVid: false,
remoteFileUrl: undefined,
contentIsFree: true, contentIsFree: true,
fee: { fee: {
amount: 1, amount: 1,
@ -57,17 +49,14 @@ const defaultState: PublishState = {
thumbnail_url: '', thumbnail_url: '',
thumbnailPath: '', thumbnailPath: '',
uploadThumbnailStatus: THUMBNAIL_STATUSES.API_DOWN, uploadThumbnailStatus: THUMBNAIL_STATUSES.API_DOWN,
thumbnailError: undefined,
description: '', description: '',
language: '', language: '',
releaseTime: undefined,
releaseTimeEdited: undefined,
nsfw: false, nsfw: false,
channel: CHANNEL_ANONYMOUS, channel: CHANNEL_ANONYMOUS,
channelId: '', channelId: '',
name: '', name: '',
nameError: undefined, nameError: undefined,
bid: 0.01, bid: 0.1,
bidError: undefined, bidError: undefined,
licenseType: 'None', licenseType: 'None',
otherLicenseDescription: 'All rights reserved', otherLicenseDescription: 'All rights reserved',
@ -77,7 +66,6 @@ const defaultState: PublishState = {
publishSuccess: false, publishSuccess: false,
publishError: undefined, publishError: undefined,
optimize: false, optimize: false,
useLBRYUploader: false,
}; };
export const publishReducer = handleActions( export const publishReducer = handleActions(
@ -91,11 +79,8 @@ export const publishReducer = handleActions(
}, },
[ACTIONS.CLEAR_PUBLISH]: (state: PublishState): PublishState => ({ [ACTIONS.CLEAR_PUBLISH]: (state: PublishState): PublishState => ({
...defaultState, ...defaultState,
uri: undefined,
channel: state.channel,
bid: state.bid, bid: state.bid,
optimize: state.optimize, optimize: state.optimize,
language: state.language,
}), }),
[ACTIONS.PUBLISH_START]: (state: PublishState): PublishState => ({ [ACTIONS.PUBLISH_START]: (state: PublishState): PublishState => ({
...state, ...state,

View file

@ -0,0 +1,137 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { handleActions } from 'util/redux-utils';
import { SEARCH_OPTIONS } from 'constants/search';
const defaultState = {
isActive: false, // does the user have any typed text in the search input
focused: false, // is the search input focused
searchQuery: '', // needs to be an empty string for input focusing
options: {
[SEARCH_OPTIONS.RESULT_COUNT]: 30,
[SEARCH_OPTIONS.CLAIM_TYPE]: SEARCH_OPTIONS.INCLUDE_FILES_AND_CHANNELS,
[SEARCH_OPTIONS.MEDIA_AUDIO]: true,
[SEARCH_OPTIONS.MEDIA_VIDEO]: true,
[SEARCH_OPTIONS.MEDIA_TEXT]: true,
[SEARCH_OPTIONS.MEDIA_IMAGE]: true,
[SEARCH_OPTIONS.MEDIA_APPLICATION]: true,
},
suggestions: {},
urisByQuery: {},
resolvedResultsByQuery: {},
resolvedResultsByQueryLastPageReached: {},
};
export const searchReducer = handleActions(
{
[ACTIONS.SEARCH_START]: (state: SearchState): SearchState => ({
...state,
searching: true,
}),
[ACTIONS.SEARCH_SUCCESS]: (state: SearchState, action: SearchSuccess): SearchState => {
const { query, uris } = action.data;
return {
...state,
searching: false,
urisByQuery: Object.assign({}, state.urisByQuery, { [query]: uris }),
};
},
[ACTIONS.SEARCH_FAIL]: (state: SearchState): SearchState => ({
...state,
searching: false,
}),
[ACTIONS.RESOLVED_SEARCH_START]: (state: SearchState): SearchState => ({
...state,
searching: true,
}),
[ACTIONS.RESOLVED_SEARCH_SUCCESS]: (
state: SearchState,
action: ResolvedSearchSuccess
): SearchState => {
const resolvedResultsByQuery = Object.assign({}, state.resolvedResultsByQuery);
const resolvedResultsByQueryLastPageReached = Object.assign(
{},
state.resolvedResultsByQueryLastPageReached
);
const { append, query, results, pageSize } = action.data;
if (append) {
// todo: check for duplicates when concatenating?
resolvedResultsByQuery[query] =
resolvedResultsByQuery[query] && resolvedResultsByQuery[query].length
? resolvedResultsByQuery[query].concat(results)
: results;
} else {
resolvedResultsByQuery[query] = results;
}
// the returned number of urls is less than the page size, so we're on the last page
resolvedResultsByQueryLastPageReached[query] = results.length < pageSize;
return {
...state,
searching: false,
resolvedResultsByQuery,
resolvedResultsByQueryLastPageReached,
};
},
[ACTIONS.RESOLVED_SEARCH_FAIL]: (state: SearchState): SearchState => ({
...state,
searching: false,
}),
[ACTIONS.UPDATE_SEARCH_QUERY]: (
state: SearchState,
action: UpdateSearchQuery
): SearchState => ({
...state,
searchQuery: action.data.query,
isActive: true,
}),
[ACTIONS.UPDATE_SEARCH_SUGGESTIONS]: (
state: SearchState,
action: UpdateSearchSuggestions
): SearchState => ({
...state,
suggestions: {
...state.suggestions,
[action.data.query]: action.data.suggestions,
},
}),
// sets isActive to false so the uri will be populated correctly if the
// user is on a file page. The search query will still be present on any
// other page
[ACTIONS.DISMISS_NOTIFICATION]: (state: SearchState): SearchState => ({
...state,
isActive: false,
}),
[ACTIONS.SEARCH_FOCUS]: (state: SearchState): SearchState => ({
...state,
focused: true,
}),
[ACTIONS.SEARCH_BLUR]: (state: SearchState): SearchState => ({
...state,
focused: false,
}),
[ACTIONS.UPDATE_SEARCH_OPTIONS]: (
state: SearchState,
action: UpdateSearchOptions
): SearchState => {
const { options: oldOptions } = state;
const newOptions = action.data;
const options = { ...oldOptions, ...newOptions };
return {
...state,
options,
};
},
},
defaultState
);

View file

@ -0,0 +1,86 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { handleActions } from 'util/redux-utils';
import { DEFAULT_KNOWN_TAGS, DEFAULT_FOLLOWED_TAGS } from 'constants/tags';
function getDefaultKnownTags() {
return DEFAULT_FOLLOWED_TAGS.concat(DEFAULT_KNOWN_TAGS).reduce(
(tagsMap, tag) => ({
...tagsMap,
[tag]: { name: tag },
}),
{}
);
}
const defaultState: TagState = {
followedTags: [],
knownTags: getDefaultKnownTags(),
};
export const tagsReducer = handleActions(
{
[ACTIONS.TOGGLE_TAG_FOLLOW]: (state: TagState, action: TagAction): TagState => {
const { followedTags } = state;
const { name } = action.data;
let newFollowedTags = followedTags.slice();
if (newFollowedTags.includes(name)) {
newFollowedTags = newFollowedTags.filter(tag => tag !== name);
} else {
newFollowedTags.push(name);
}
return {
...state,
followedTags: newFollowedTags,
};
},
[ACTIONS.TAG_ADD]: (state: TagState, action: TagAction) => {
const { knownTags } = state;
const { name } = action.data;
let newKnownTags = { ...knownTags };
newKnownTags[name] = { name };
return {
...state,
knownTags: newKnownTags,
};
},
[ACTIONS.TAG_DELETE]: (state: TagState, action: TagAction) => {
const { knownTags, followedTags } = state;
const { name } = action.data;
let newKnownTags = { ...knownTags };
delete newKnownTags[name];
const newFollowedTags = followedTags.filter(tag => tag !== name);
return {
...state,
knownTags: newKnownTags,
followedTags: newFollowedTags,
};
},
[ACTIONS.USER_STATE_POPULATE]: (
state: TagState,
action: { data: { tags: ?Array<string> } }
) => {
const { tags } = action.data;
if (Array.isArray(tags)) {
return {
...state,
followedTags: tags,
};
}
return {
...state,
};
},
},
defaultState
);

View file

@ -45,17 +45,10 @@ type WalletState = {
walletLockResult: ?boolean, walletLockResult: ?boolean,
walletReconnecting: boolean, walletReconnecting: boolean,
txoFetchParams: {}, txoFetchParams: {},
utxoCounts: {},
txoPage: any, txoPage: any,
fetchId: string,
fetchingTxos: boolean, fetchingTxos: boolean,
fetchingTxosError?: string, fetchingTxosError?: string,
consolidatingUtxos: boolean,
pendingConsolidateTxid?: string,
massClaimingTips: boolean,
pendingMassClaimTxid?: string,
pendingSupportTransactions: {}, // { claimId: {txid: 123, amount 12.3}, } pendingSupportTransactions: {}, // { claimId: {txid: 123, amount 12.3}, }
pendingTxos: Array<string>,
abandonClaimSupportError?: string, abandonClaimSupportError?: string,
}; };
@ -92,20 +85,10 @@ const defaultState = {
transactionListFilter: 'all', transactionListFilter: 'all',
walletReconnecting: false, walletReconnecting: false,
txoFetchParams: {}, txoFetchParams: {},
utxoCounts: {},
fetchingUtxoCounts: false,
fetchingUtxoError: undefined,
consolidatingUtxos: false,
pendingConsolidateTxid: null,
massClaimingTips: false,
pendingMassClaimTxid: null,
txoPage: {}, txoPage: {},
fetchId: '',
fetchingTxos: false, fetchingTxos: false,
fetchingTxosError: undefined, fetchingTxosError: undefined,
pendingSupportTransactions: {}, pendingSupportTransactions: {},
pendingTxos: [],
abandonClaimSupportError: undefined, abandonClaimSupportError: undefined,
}; };
@ -131,26 +114,18 @@ export const walletReducer = handleActions(
}; };
}, },
[ACTIONS.FETCH_TXO_PAGE_STARTED]: (state: WalletState, action) => { [ACTIONS.FETCH_TXO_PAGE_STARTED]: (state: WalletState) => {
return { return {
...state, ...state,
fetchId: action.data,
fetchingTxos: true, fetchingTxos: true,
fetchingTxosError: undefined, fetchingTxosError: undefined,
}; };
}, },
[ACTIONS.FETCH_TXO_PAGE_COMPLETED]: (state: WalletState, action) => { [ACTIONS.FETCH_TXO_PAGE_COMPLETED]: (state: WalletState, action) => {
if (state.fetchId !== action.data.fetchId) {
// Leave 'state' and 'fetchingTxos' alone. The latter would ensure
// the spiner would continue spinning for the latest transaction.
return { ...state };
}
return { return {
...state, ...state,
txoPage: action.data.result, txoPage: action.data,
fetchId: '',
fetchingTxos: false, fetchingTxos: false,
}; };
}, },
@ -159,104 +134,10 @@ export const walletReducer = handleActions(
return { return {
...state, ...state,
txoPage: {}, txoPage: {},
fetchId: '',
fetchingTxos: false, fetchingTxos: false,
fetchingTxosError: action.data, fetchingTxosError: action.data,
}; };
}, },
[ACTIONS.FETCH_UTXO_COUNT_STARTED]: (state: WalletState) => {
return {
...state,
fetchingUtxoCounts: true,
fetchingUtxoError: undefined,
};
},
[ACTIONS.FETCH_UTXO_COUNT_COMPLETED]: (state: WalletState, action) => {
return {
...state,
utxoCounts: action.data,
fetchingUtxoCounts: false,
};
},
[ACTIONS.FETCH_UTXO_COUNT_FAILED]: (state: WalletState, action) => {
return {
...state,
utxoCounts: {},
fetchingUtxoCounts: false,
fetchingUtxoError: action.data,
};
},
[ACTIONS.DO_UTXO_CONSOLIDATE_STARTED]: (state: WalletState) => {
return {
...state,
consolidatingUtxos: true,
};
},
[ACTIONS.DO_UTXO_CONSOLIDATE_COMPLETED]: (state: WalletState, action) => {
const { txid } = action.data;
return {
...state,
consolidatingUtxos: false,
pendingConsolidateTxid: txid,
};
},
[ACTIONS.DO_UTXO_CONSOLIDATE_FAILED]: (state: WalletState, action) => {
return {
...state,
consolidatingUtxos: false,
};
},
[ACTIONS.TIP_CLAIM_MASS_STARTED]: (state: WalletState) => {
return {
...state,
massClaimingTips: true,
};
},
[ACTIONS.TIP_CLAIM_MASS_COMPLETED]: (state: WalletState, action) => {
const { txid } = action.data;
return {
...state,
massClaimingTips: false,
pendingMassClaimTxid: txid,
};
},
[ACTIONS.TIP_CLAIM_MASS_FAILED]: (state: WalletState, action) => {
return {
...state,
massClaimingTips: false,
};
},
[ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED]: (state: WalletState, action) => {
const { pendingTxos, pendingMassClaimTxid, pendingConsolidateTxid } = state;
const { txids, remove } = action.data;
if (remove) {
const newTxos = pendingTxos.filter(txo => !txids.includes(txo));
const newPendingMassClaimTxid = txids.includes(pendingMassClaimTxid)
? undefined
: pendingMassClaimTxid;
const newPendingConsolidateTxid = txids.includes(pendingConsolidateTxid)
? undefined
: pendingConsolidateTxid;
return {
...state,
pendingTxos: newTxos,
pendingMassClaimTxid: newPendingMassClaimTxid,
pendingConsolidateTxid: newPendingConsolidateTxid,
};
} else {
const newPendingSet = new Set([...pendingTxos, ...txids]);
return { ...state, pendingTxos: Array.from(newPendingSet) };
}
},
[ACTIONS.UPDATE_TXO_FETCH_PARAMS]: (state: WalletState, action) => { [ACTIONS.UPDATE_TXO_FETCH_PARAMS]: (state: WalletState, action) => {
return { return {
@ -305,7 +186,7 @@ export const walletReducer = handleActions(
return { return {
...state, ...state,
supports: byOutpoint, supports: byOutpoint,
abandoningSupportsByOutpoint: currentlyAbandoning, abandoningSupportsById: currentlyAbandoning,
}; };
}, },
@ -324,15 +205,10 @@ export const walletReducer = handleActions(
}, },
[ACTIONS.ABANDON_CLAIM_SUPPORT_COMPLETED]: (state: WalletState, action: any): WalletState => { [ACTIONS.ABANDON_CLAIM_SUPPORT_COMPLETED]: (state: WalletState, action: any): WalletState => {
const { const { claimId, type, txid, effective }: { claimId: string, type: string, txid: string, effective: string } = action.data;
claimId,
type,
txid,
effective,
}: { claimId: string, type: string, txid: string, effective: string } = action.data;
const pendingtxs = Object.assign({}, state.pendingSupportTransactions); const pendingtxs = Object.assign({}, state.pendingSupportTransactions);
pendingtxs[claimId] = { txid, type, effective }; pendingtxs[claimId] = {txid, type, effective};
return { return {
...state, ...state,
@ -349,6 +225,7 @@ export const walletReducer = handleActions(
}, },
[ACTIONS.PENDING_SUPPORTS_UPDATED]: (state: WalletState, action: any): WalletState => { [ACTIONS.PENDING_SUPPORTS_UPDATED]: (state: WalletState, action: any): WalletState => {
return { return {
...state, ...state,
pendingSupportTransactions: action.data, pendingSupportTransactions: action.data,

View file

@ -1,30 +1,21 @@
// @flow // @flow
import { normalizeURI, parseURI } from 'lbryURI'; import { normalizeURI, buildURI, parseURI } from 'lbryURI';
import {
selectResolvedSearchResultsByQuery,
selectSearchUrisByQuery,
} from 'redux/selectors/search';
import { selectSupportsByOutpoint } from 'redux/selectors/wallet'; import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
import { createSelector } from 'reselect'; import { createSelector } from 'reselect';
import { isClaimNsfw, filterClaims } from 'util/claim'; import { isClaimNsfw, createNormalizedClaimSearchKey } from 'util/claim';
import * as CLAIM from 'constants/claim'; import { getSearchQueryString } from 'util/query-params';
import { PAGE_SIZE } from 'constants/claim';
const selectState = state => state.claims || {}; const selectState = state => state.claims || {};
export const selectById = createSelector( export const selectClaimsById = createSelector(
selectState, selectState,
state => state.byId || {} state => state.byId || {}
); );
export const selectPendingClaimsById = createSelector(
selectState,
state => state.pendingById || {}
);
export const selectClaimsById = createSelector(
selectById,
selectPendingClaimsById,
(byId, pendingById) => {
return Object.assign(byId, pendingById); // do I need merged to keep metadata?
}
);
export const selectClaimIdsByUri = createSelector( export const selectClaimIdsByUri = createSelector(
selectState, selectState,
state => state.claimsByUri || {} state => state.claimsByUri || {}
@ -56,9 +47,10 @@ export const selectRepostError = createSelector(
); );
export const selectClaimsByUri = createSelector( export const selectClaimsByUri = createSelector(
selectClaimIdsByUri, selectState,
selectClaimsById, selectClaimsById,
(byUri, byId) => { (state, byId) => {
const byUri = state.claimsByUri || {};
const claims = {}; const claims = {};
Object.keys(byUri).forEach(uri => { Object.keys(byUri).forEach(uri => {
@ -83,91 +75,82 @@ export const selectAllClaimsByChannel = createSelector(
state => state.paginatedClaimsByChannel || {} state => state.paginatedClaimsByChannel || {}
); );
export const selectPendingIds = createSelector( export const selectPendingById = createSelector(
selectState, selectState,
state => Object.keys(state.pendingById) || [] state => state.pendingById || {}
); );
export const selectPendingClaims = createSelector( export const selectPendingClaims = createSelector(
selectPendingClaimsById, selectState,
pendingById => Object.values(pendingById) state => Object.values(state.pendingById || [])
); );
export const makeSelectClaimIsPending = (uri: string) => export const makeSelectClaimIsPending = (uri: string) =>
createSelector( createSelector(
selectClaimIdsByUri, selectPendingById,
selectPendingClaimsById, pendingById => {
(idsByUri, pendingById) => { let claimId;
const claimId = idsByUri[normalizeURI(uri)];
try {
const { isChannel, channelClaimId, streamClaimId } = parseURI(uri);
claimId = isChannel ? channelClaimId : streamClaimId;
} catch (e) {}
if (claimId) { if (claimId) {
return Boolean(pendingById[claimId]); return Boolean(pendingById[claimId]);
} }
return false;
} }
); );
export const makeSelectClaimIdIsPending = (claimId: string) => export const makeSelectPendingByUri = (uri: string) =>
createSelector( createSelector(
selectPendingClaimsById, selectPendingById,
pendingById => { pendingById => {
return Boolean(pendingById[claimId]); const { isChannel, channelClaimId, streamClaimId } = parseURI(uri);
const claimId = isChannel ? channelClaimId : streamClaimId;
return pendingById[claimId];
} }
); );
export const makeSelectClaimIdForUri = (uri: string) =>
createSelector(
selectClaimIdsByUri,
claimIds => claimIds[uri]
);
export const selectReflectingById = createSelector(
selectState,
state => state.reflectingById
);
export const makeSelectClaimForClaimId = (claimId: string) =>
createSelector(
selectClaimsById,
byId => byId[claimId]
);
export const makeSelectClaimForUri = (uri: string, returnRepost: boolean = true) => export const makeSelectClaimForUri = (uri: string, returnRepost: boolean = true) =>
createSelector( createSelector(
selectClaimIdsByUri, selectClaimsByUri,
selectClaimsById, selectPendingById,
(byUri, byId) => { (byUri, pendingById) => {
let validUri; // Check if a claim is pending first
// It won't be in claimsByUri because resolving it will return nothing
let valid;
let channelClaimId; let channelClaimId;
let streamClaimId; let streamClaimId;
let isChannel; let isChannel;
try { try {
({ isChannel, channelClaimId, streamClaimId } = parseURI(uri)); ({ isChannel, channelClaimId, streamClaimId } = parseURI(uri));
validUri = true; valid = true;
} catch (e) {} } catch (e) {}
if (validUri && byUri) { if (valid && byUri) {
const claimId = uri && byUri[normalizeURI(uri)]; const claimId = isChannel ? channelClaimId : streamClaimId;
const claim = byId[claimId]; const pendingClaim = pendingById[claimId];
// Make sure to return the claim as is so apps can check if it's been resolved before (null) or still needs to be resolved (undefined) if (pendingClaim) {
if (claimId === null) { return pendingClaim;
return null;
} else if (claimId === undefined) {
return undefined;
} }
const repostedClaim = claim && claim.reposted_claim; const claim = byUri[normalizeURI(uri)];
if (claim === undefined || claim === null) {
// Make sure to return the claim as is so apps can check if it's been resolved before (null) or still needs to be resolved (undefined)
return claim;
}
const repostedClaim = claim.reposted_claim;
if (repostedClaim && returnRepost) { if (repostedClaim && returnRepost) {
const channelUrl = const channelUrl = claim.signing_channel && claim.signing_channel.canonical_url;
claim.signing_channel &&
(claim.signing_channel.canonical_url || claim.signing_channel.permanent_url);
return { return {
...repostedClaim, ...repostedClaim,
repost_url: normalizeURI(uri), repost_url: uri,
repost_channel_url: channelUrl, repost_channel_url: channelUrl,
repost_bid_amount: claim && claim.meta && claim.meta.effective_amount,
}; };
} else { } else {
return claim; return claim;
@ -201,22 +184,6 @@ export const selectAbandoningIds = createSelector(
state => Object.keys(state.abandoningById || {}) state => Object.keys(state.abandoningById || {})
); );
export const makeSelectAbandoningClaimById = (claimId: string) =>
createSelector(
selectAbandoningIds,
ids => ids.includes(claimId)
);
export const makeSelectIsAbandoningClaimForUri = (uri: string) =>
createSelector(
selectClaimIdsByUri,
selectAbandoningIds,
(claimIdsByUri, abandoningById) => {
const claimId = claimIdsByUri[normalizeURI(uri)];
return abandoningById.indexOf(claimId) >= 0;
}
);
export const selectMyActiveClaims = createSelector( export const selectMyActiveClaims = createSelector(
selectMyClaimsRaw, selectMyClaimsRaw,
selectAbandoningIds, selectAbandoningIds,
@ -245,74 +212,11 @@ export const makeSelectClaimIsMine = (rawUri: string) => {
return false; return false;
} }
return ( return claims && claims[uri] && claims[uri].claim_id && myClaims.has(claims[uri].claim_id);
claims &&
claims[uri] &&
(claims[uri].is_my_output || (claims[uri].claim_id && myClaims.has(claims[uri].claim_id)))
);
} }
); );
}; };
export const selectMyPurchases = createSelector(
selectState,
state => state.myPurchases
);
export const selectPurchaseUriSuccess = createSelector(
selectState,
state => state.purchaseUriSuccess
);
export const selectMyPurchasesCount = createSelector(
selectState,
state => state.myPurchasesPageTotalResults
);
export const selectIsFetchingMyPurchases = createSelector(
selectState,
state => state.fetchingMyPurchases
);
export const selectFetchingMyPurchasesError = createSelector(
selectState,
state => state.fetchingMyPurchasesError
);
export const makeSelectMyPurchasesForPage = (query: ?string, page: number = 1) =>
createSelector(
selectMyPurchases,
selectClaimsByUri,
(myPurchases: Array<string>, claimsByUri: { [string]: Claim }) => {
if (!myPurchases) {
return undefined;
}
if (!query) {
// ensure no duplicates from double purchase bugs
return [...new Set(myPurchases)];
}
const fileInfos = myPurchases.map(uri => claimsByUri[uri]);
const matchingFileInfos = filterClaims(fileInfos, query);
const start = (Number(page) - 1) * Number(CLAIM.PAGE_SIZE);
const end = Number(page) * Number(CLAIM.PAGE_SIZE);
return matchingFileInfos && matchingFileInfos.length
? matchingFileInfos
.slice(start, end)
.map(fileInfo => fileInfo.canonical_url || fileInfo.permanent_url)
: [];
}
);
export const makeSelectClaimWasPurchased = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri),
claim => {
return claim && claim.purchase_receipt !== undefined;
}
);
export const selectAllFetchingChannelClaims = createSelector( export const selectAllFetchingChannelClaims = createSelector(
selectState, selectState,
state => state.fetchingChannelClaims || {} state => state.fetchingChannelClaims || {}
@ -338,7 +242,6 @@ export const makeSelectClaimsInChannelForPage = (uri: string, page?: number) =>
} }
); );
// THIS IS LEFT OVER FROM ONE TAB CHANNEL_CONTENT
export const makeSelectTotalClaimsInChannelSearch = (uri: string) => export const makeSelectTotalClaimsInChannelSearch = (uri: string) =>
createSelector( createSelector(
selectClaimsById, selectClaimsById,
@ -349,7 +252,6 @@ export const makeSelectTotalClaimsInChannelSearch = (uri: string) =>
} }
); );
// THIS IS LEFT OVER FROM ONE_TAB CHANNEL CONTENT
export const makeSelectTotalPagesInChannelSearch = (uri: string) => export const makeSelectTotalPagesInChannelSearch = (uri: string) =>
createSelector( createSelector(
selectClaimsById, selectClaimsById,
@ -360,6 +262,21 @@ export const makeSelectTotalPagesInChannelSearch = (uri: string) =>
} }
); );
export const makeSelectClaimsInChannelForCurrentPageState = (uri: string) =>
createSelector(
selectClaimsById,
selectAllClaimsByChannel,
selectCurrentChannelPage,
(byId, allClaims, page) => {
const byChannel = allClaims[uri] || {};
const claimIds = byChannel[page || 1];
if (!claimIds) return claimIds;
return claimIds.map(claimId => byId[claimId]);
}
);
export const makeSelectMetadataForUri = (uri: string) => export const makeSelectMetadataForUri = (uri: string) =>
createSelector( createSelector(
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
@ -393,8 +310,8 @@ export const makeSelectDateForUri = (uri: string) =>
(claim.value.release_time (claim.value.release_time
? claim.value.release_time * 1000 ? claim.value.release_time * 1000
: claim.meta && claim.meta.creation_timestamp : claim.meta && claim.meta.creation_timestamp
? claim.meta.creation_timestamp * 1000 ? claim.meta.creation_timestamp * 1000
: null); : null);
if (!timestamp) { if (!timestamp) {
return undefined; return undefined;
} }
@ -411,19 +328,6 @@ export const makeSelectAmountForUri = (uri: string) =>
} }
); );
export const makeSelectEffectiveAmountForUri = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri, false),
claim => {
return (
claim &&
claim.meta &&
typeof claim.meta.effective_amount === 'string' &&
Number(claim.meta.effective_amount)
);
}
);
export const makeSelectContentTypeForUri = (uri: string) => export const makeSelectContentTypeForUri = (uri: string) =>
createSelector( createSelector(
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
@ -438,9 +342,7 @@ export const makeSelectThumbnailForUri = (uri: string) =>
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
claim => { claim => {
const thumbnail = claim && claim.value && claim.value.thumbnail; const thumbnail = claim && claim.value && claim.value.thumbnail;
return thumbnail && thumbnail.url return thumbnail && thumbnail.url ? thumbnail.url.trim() : undefined;
? thumbnail.url.trim().replace(/^http:\/\//i, 'https://')
: undefined;
} }
); );
@ -449,7 +351,7 @@ export const makeSelectCoverForUri = (uri: string) =>
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
claim => { claim => {
const cover = claim && claim.value && claim.value.cover; const cover = claim && claim.value && claim.value.cover;
return cover && cover.url ? cover.url.trim().replace(/^http:\/\//i, 'https://') : undefined; return cover && cover.url ? cover.url.trim() : undefined;
} }
); );
@ -458,33 +360,12 @@ export const selectIsFetchingClaimListMine = createSelector(
state => state.isFetchingClaimListMine state => state.isFetchingClaimListMine
); );
export const selectMyClaimsPage = createSelector(
selectState,
state => state.myClaimsPageResults || []
);
export const selectMyClaimsPageNumber = createSelector(
selectState,
state => (state.claimListMinePage && state.claimListMinePage.items) || [],
state => (state.txoPage && state.txoPage.page) || 1
);
export const selectMyClaimsPageItemCount = createSelector(
selectState,
state => state.myClaimsPageTotalResults || 1
);
export const selectFetchingMyClaimsPageError = createSelector(
selectState,
state => state.fetchingClaimListMinePageError
);
export const selectMyClaims = createSelector( export const selectMyClaims = createSelector(
selectMyActiveClaims, selectMyActiveClaims,
selectClaimsById, selectClaimsById,
selectAbandoningIds, selectAbandoningIds,
(myClaimIds, byId, abandoningIds) => { selectPendingClaims,
(myClaimIds, byId, abandoningIds, pendingClaims) => {
const claims = []; const claims = [];
myClaimIds.forEach(id => { myClaimIds.forEach(id => {
@ -493,16 +374,14 @@ export const selectMyClaims = createSelector(
if (claim && abandoningIds.indexOf(id) === -1) claims.push(claim); if (claim && abandoningIds.indexOf(id) === -1) claims.push(claim);
}); });
return [...claims]; return [...claims, ...pendingClaims];
} }
); );
export const selectMyClaimsWithoutChannels = createSelector( export const selectMyClaimsWithoutChannels = createSelector(
selectMyClaims, selectMyClaims,
myClaims => myClaims =>
myClaims myClaims.filter(claim => !claim.name.match(/^@/)).sort((a, b) => a.timestamp - b.timestamp)
.filter(claim => claim && !claim.name.match(/^@/))
.sort((a, b) => a.timestamp - b.timestamp)
); );
export const selectMyClaimUrisWithoutChannels = createSelector( export const selectMyClaimUrisWithoutChannels = createSelector(
@ -546,11 +425,6 @@ export const selectFetchingMyChannels = createSelector(
state => state.fetchingMyChannels state => state.fetchingMyChannels
); );
export const selectFetchingMyCollections = createSelector(
selectState,
state => state.fetchingMyCollections
);
export const selectMyChannelClaims = createSelector( export const selectMyChannelClaims = createSelector(
selectState, selectState,
selectClaimsById, selectClaimsById,
@ -572,16 +446,6 @@ export const selectMyChannelClaims = createSelector(
} }
); );
export const selectMyChannelUrls = createSelector(
selectMyChannelClaims,
claims => (claims ? claims.map(claim => claim.canonical_url || claim.permanent_url) : undefined)
);
export const selectMyCollectionIds = createSelector(
selectState,
state => state.myCollectionClaims
);
export const selectResolvingUris = createSelector( export const selectResolvingUris = createSelector(
selectState, selectState,
state => state.resolvingUris || [] state => state.resolvingUris || []
@ -608,35 +472,16 @@ export const selectChannelClaimCounts = createSelector(
state => state.channelClaimCounts || {} state => state.channelClaimCounts || {}
); );
export const makeSelectPendingClaimForUri = (uri: string) =>
createSelector(
selectPendingClaimsById,
pendingById => {
let uriStreamName;
let uriChannelName;
try {
({ streamName: uriStreamName, channelName: uriChannelName } = parseURI(uri));
} catch (e) {
return null;
}
const pendingClaims = (Object.values(pendingById): any);
const matchingClaim = pendingClaims.find((claim: GenericClaim) => {
return claim.normalized_name === uriChannelName || claim.normalized_name === uriStreamName;
});
return matchingClaim || null;
}
);
export const makeSelectTotalItemsForChannel = (uri: string) => export const makeSelectTotalItemsForChannel = (uri: string) =>
createSelector( createSelector(
selectChannelClaimCounts, selectChannelClaimCounts,
byUri => byUri && byUri[normalizeURI(uri)] byUri => byUri && byUri[uri]
); );
export const makeSelectTotalPagesForChannel = (uri: string, pageSize: number = 10) => export const makeSelectTotalPagesForChannel = (uri: string, pageSize: number = 10) =>
createSelector( createSelector(
selectChannelClaimCounts, selectChannelClaimCounts,
byUri => byUri && byUri[uri] && Math.ceil(byUri[normalizeURI(uri)] / pageSize) byUri => byUri && byUri[uri] && Math.ceil(byUri[uri] / pageSize)
); );
export const makeSelectNsfwCountFromUris = (uris: Array<string>) => export const makeSelectNsfwCountFromUris = (uris: Array<string>) =>
@ -652,6 +497,27 @@ export const makeSelectNsfwCountFromUris = (uris: Array<string>) =>
}, 0) }, 0)
); );
export const makeSelectNsfwCountForChannel = (uri: string) =>
createSelector(
selectClaimsById,
selectAllClaimsByChannel,
selectCurrentChannelPage,
(byId, allClaims, page) => {
const byChannel = allClaims[uri] || {};
const claimIds = byChannel[page || 1];
if (!claimIds) return 0;
return claimIds.reduce((acc, claimId) => {
const claim = byId[claimId];
if (isClaimNsfw(claim)) {
return acc + 1;
}
return acc;
}, 0);
}
);
export const makeSelectOmittedCountForChannel = (uri: string) => export const makeSelectOmittedCountForChannel = (uri: string) =>
createSelector( createSelector(
makeSelectTotalItemsForChannel(uri), makeSelectTotalItemsForChannel(uri),
@ -679,6 +545,53 @@ export const makeSelectClaimIsNsfw = (uri: string): boolean =>
} }
); );
export const makeSelectRecommendedContentForUri = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri),
selectSearchUrisByQuery,
makeSelectClaimIsNsfw(uri),
(claim, searchUrisByQuery, isMature) => {
const atVanityURI = !uri.includes('#');
let recommendedContent;
if (claim) {
// always grab full URL - this can change once search returns canonical
const currentUri = buildURI({ streamClaimId: claim.claim_id, streamName: claim.name });
const { title } = claim.value;
if (!title) {
return;
}
const options: {
related_to?: string,
nsfw?: boolean,
isBackgroundSearch?: boolean,
} = { related_to: claim.claim_id, isBackgroundSearch: true };
if (!isMature) {
options['nsfw'] = false;
}
const searchQuery = getSearchQueryString(title.replace(/\//, ' '), options);
let searchUris = searchUrisByQuery[searchQuery];
if (searchUris) {
searchUris = searchUris.filter(searchUri => searchUri !== currentUri);
recommendedContent = searchUris;
}
}
return recommendedContent;
}
);
export const makeSelectFirstRecommendedFileForUri = (uri: string) =>
createSelector(
makeSelectRecommendedContentForUri(uri),
recommendedContent => (recommendedContent ? recommendedContent[0] : null)
);
// Returns the associated channel uri for a given claim uri // Returns the associated channel uri for a given claim uri
// accepts a regular claim uri lbry://something // accepts a regular claim uri lbry://something
// returns the channel uri that created this claim lbry://@channel // returns the channel uri that created this claim lbry://@channel
@ -700,29 +613,6 @@ export const makeSelectChannelForClaimUri = (uri: string, includePrefix: boolean
} }
); );
export const makeSelectChannelPermUrlForClaimUri = (uri: string, includePrefix: boolean = false) =>
createSelector(
makeSelectClaimForUri(uri),
(claim: ?Claim) => {
if (claim && claim.value_type === 'channel') {
return claim.permanent_url;
}
if (!claim || !claim.signing_channel || !claim.is_channel_signature_valid) {
return null;
}
return claim.signing_channel.permanent_url;
}
);
export const makeSelectMyChannelPermUrlForName = (name: string) =>
createSelector(
selectMyChannelClaims,
claims => {
const matchingClaim = claims && claims.find(claim => claim.name === name);
return matchingClaim ? matchingClaim.permanent_url : null;
}
);
export const makeSelectTagsForUri = (uri: string) => export const makeSelectTagsForUri = (uri: string) =>
createSelector( createSelector(
makeSelectMetadataForUri(uri), makeSelectMetadataForUri(uri),
@ -774,7 +664,7 @@ export const makeSelectSupportsForUri = (uri: string) =>
selectSupportsByOutpoint, selectSupportsByOutpoint,
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
(byOutpoint, claim: ?StreamClaim) => { (byOutpoint, claim: ?StreamClaim) => {
if (!claim || !claim.is_my_output) { if (!claim || !claim.is_mine) {
return null; return null;
} }
@ -801,22 +691,12 @@ export const selectUpdateChannelError = createSelector(
state => state.updateChannelError state => state.updateChannelError
); );
export const makeSelectReflectingClaimForUri = (uri: string) =>
createSelector(
selectClaimIdsByUri,
selectReflectingById,
(claimIdsByUri, reflectingById) => {
const claimId = claimIdsByUri[normalizeURI(uri)];
return reflectingById[claimId];
}
);
export const makeSelectMyStreamUrlsForPage = (page: number = 1) => export const makeSelectMyStreamUrlsForPage = (page: number = 1) =>
createSelector( createSelector(
selectMyClaimUrisWithoutChannels, selectMyClaimUrisWithoutChannels,
urls => { urls => {
const start = (Number(page) - 1) * Number(CLAIM.PAGE_SIZE); const start = (Number(page) - 1) * Number(PAGE_SIZE);
const end = Number(page) * Number(CLAIM.PAGE_SIZE); const end = Number(page) * Number(PAGE_SIZE);
return urls && urls.length ? urls.slice(start, end) : []; return urls && urls.length ? urls.slice(start, end) : [];
} }
@ -827,96 +707,53 @@ export const selectMyStreamUrlsCount = createSelector(
channels => channels.length channels => channels.length
); );
export const makeSelectTagInClaimOrChannelForUri = (uri: string, tag: string) => export const makeSelectResolvedRecommendedContentForUri = (
uri: string,
size: number,
claimId: string,
claimName: string,
claimTitle: string
) =>
createSelector( createSelector(
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
claim => { selectResolvedSearchResultsByQuery,
const claimTags = (claim && claim.value && claim.value.tags) || []; makeSelectClaimIsNsfw(uri),
const channelTags = (claim, resolvedResultsByQuery, isMature) => {
(claim && const atVanityURI = !uri.includes('#');
claim.signing_channel &&
claim.signing_channel.value &&
claim.signing_channel.value.tags) ||
[];
return claimTags.includes(tag) || channelTags.includes(tag);
}
);
export const makeSelectClaimHasSource = (uri: string) => let currentUri;
createSelector( let recommendedContent;
makeSelectClaimForUri(uri), let title;
claim => { if (claim) {
if (!claim) { // always grab full URL - this can change once search returns canonical
return false; currentUri = buildURI({ streamClaimId: claim.claim_id, streamName: claim.name });
title = claim.value ? claim.value.title : null;
} else {
// for cases on mobile where the claim may not have been resolved ()
currentUri = buildURI({ streamClaimId: claimId, streamName: claimName });
title = claimTitle;
} }
return Boolean(claim.value.source); if (!title) {
} return;
);
export const makeSelectClaimIsStreamPlaceholder = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri),
claim => {
if (!claim) {
return false;
} }
return Boolean(claim.value_type === 'stream' && !claim.value.source); const options: {
} related_to?: string,
); nsfw?: boolean,
isBackgroundSearch?: boolean,
} = { related_to: claim ? claim.claim_id : claimId, size, isBackgroundSearch: false };
export const makeSelectTotalStakedAmountForChannelUri = (uri: string) => const searchQuery = getSearchQueryString(title.replace(/\//, ' '), options);
createSelector( let results = resolvedResultsByQuery[searchQuery];
makeSelectClaimForUri(uri), if (results) {
claim => { results = results.filter(
if (!claim || !claim.amount || !claim.meta || !claim.meta.support_amount) { result =>
return 0; buildURI({ streamClaimId: result.claimId, streamName: result.name }) !== currentUri
);
recommendedContent = results;
} }
return parseFloat(claim.amount) + parseFloat(claim.meta.support_amount) || 0; return recommendedContent;
} }
); );
export const makeSelectStakedLevelForChannelUri = (uri: string) =>
createSelector(
makeSelectTotalStakedAmountForChannelUri(uri),
amount => {
let level = 1;
switch (true) {
case amount >= CLAIM.LEVEL_2_STAKED_AMOUNT && amount < CLAIM.LEVEL_3_STAKED_AMOUNT:
level = 2;
break;
case amount >= CLAIM.LEVEL_3_STAKED_AMOUNT && amount < CLAIM.LEVEL_4_STAKED_AMOUNT:
level = 3;
break;
case amount >= CLAIM.LEVEL_4_STAKED_AMOUNT && amount < CLAIM.LEVEL_5_STAKED_AMOUNT:
level = 4;
break;
case amount >= CLAIM.LEVEL_5_STAKED_AMOUNT:
level = 5;
break;
}
return level;
}
);
export const selectUpdatingCollection = createSelector(
selectState,
state => state.updatingCollection
);
export const selectUpdateCollectionError = createSelector(
selectState,
state => state.updateCollectionError
);
export const selectCreatingCollection = createSelector(
selectState,
state => state.creatingCollection
);
export const selectCreateCollectionError = createSelector(
selectState,
state => state.createCollectionError
);

View file

@ -1,311 +0,0 @@
// @flow
import fromEntries from '@ungap/from-entries';
import { createSelector } from 'reselect';
import {
selectMyCollectionIds,
makeSelectClaimForUri,
selectClaimsByUri,
} from 'redux/selectors/claims';
import { parseURI } from 'lbryURI';
const selectState = (state: { collections: CollectionState }) => state.collections;
export const selectSavedCollectionIds = createSelector(
selectState,
collectionState => collectionState.saved
);
export const selectBuiltinCollections = createSelector(
selectState,
state => state.builtin
);
export const selectResolvedCollections = createSelector(
selectState,
state => state.resolved
);
export const selectMyUnpublishedCollections = createSelector(
selectState,
state => state.unpublished
);
export const selectMyEditedCollections = createSelector(
selectState,
state => state.edited
);
export const selectPendingCollections = createSelector(
selectState,
state => state.pending
);
export const makeSelectEditedCollectionForId = (id: string) =>
createSelector(
selectMyEditedCollections,
eLists => eLists[id]
);
export const makeSelectPendingCollectionForId = (id: string) =>
createSelector(
selectPendingCollections,
pending => pending[id]
);
export const makeSelectPublishedCollectionForId = (id: string) =>
createSelector(
selectResolvedCollections,
rLists => rLists[id]
);
export const makeSelectUnpublishedCollectionForId = (id: string) =>
createSelector(
selectMyUnpublishedCollections,
rLists => rLists[id]
);
export const makeSelectCollectionIsMine = (id: string) =>
createSelector(
selectMyCollectionIds,
selectMyUnpublishedCollections,
selectBuiltinCollections,
(publicIds, privateIds, builtinIds) => {
return Boolean(publicIds.includes(id) || privateIds[id] || builtinIds[id]);
}
);
export const selectMyPublishedCollections = createSelector(
selectResolvedCollections,
selectPendingCollections,
selectMyEditedCollections,
selectMyCollectionIds,
(resolved, pending, edited, myIds) => {
// all resolved in myIds, plus those in pending and edited
const myPublishedCollections = fromEntries(
Object.entries(pending).concat(
Object.entries(resolved).filter(
([key, val]) =>
myIds.includes(key) &&
// $FlowFixMe
!pending[key]
)
)
);
// now add in edited:
Object.entries(edited).forEach(([id, item]) => {
myPublishedCollections[id] = item;
});
return myPublishedCollections;
}
);
export const selectMyPublishedMixedCollections = createSelector(
selectMyPublishedCollections,
published => {
const myCollections = fromEntries(
// $FlowFixMe
Object.entries(published).filter(([key, collection]) => {
// $FlowFixMe
return collection.type === 'collection';
})
);
return myCollections;
}
);
export const selectMyPublishedPlaylistCollections = createSelector(
selectMyPublishedCollections,
published => {
const myCollections = fromEntries(
// $FlowFixMe
Object.entries(published).filter(([key, collection]) => {
// $FlowFixMe
return collection.type === 'playlist';
})
);
return myCollections;
}
);
export const makeSelectMyPublishedCollectionForId = (id: string) =>
createSelector(
selectMyPublishedCollections,
myPublishedCollections => myPublishedCollections[id]
);
// export const selectSavedCollections = createSelector(
// selectResolvedCollections,
// selectSavedCollectionIds,
// (resolved, myIds) => {
// const mySavedCollections = fromEntries(
// Object.entries(resolved).filter(([key, val]) => myIds.includes(key))
// );
// return mySavedCollections;
// }
// );
export const makeSelectIsResolvingCollectionForId = (id: string) =>
createSelector(
selectState,
state => {
return state.isResolvingCollectionById[id];
}
);
export const makeSelectCollectionForId = (id: string) =>
createSelector(
selectBuiltinCollections,
selectResolvedCollections,
selectMyUnpublishedCollections,
selectMyEditedCollections,
selectPendingCollections,
(bLists, rLists, uLists, eLists, pLists) => {
const collection = bLists[id] || uLists[id] || eLists[id] || pLists[id] || rLists[id];
return collection;
}
);
export const makeSelectClaimUrlInCollection = (url: string) =>
createSelector(
selectBuiltinCollections,
selectMyPublishedCollections,
selectMyUnpublishedCollections,
selectMyEditedCollections,
selectPendingCollections,
(bLists, myRLists, uLists, eLists, pLists) => {
const collections = [bLists, uLists, eLists, myRLists, pLists];
const itemsInCollections = [];
collections.map(list => {
Object.entries(list).forEach(([key, value]) => {
// $FlowFixMe
value.items.map(item => {
itemsInCollections.push(item);
});
});
});
return itemsInCollections.includes(url);
}
);
export const makeSelectCollectionForIdHasClaimUrl = (id: string, url: string) =>
createSelector(
makeSelectCollectionForId(id),
collection => collection && collection.items.includes(url)
);
export const makeSelectUrlsForCollectionId = (id: string) =>
createSelector(
makeSelectCollectionForId(id),
collection => collection && collection.items
);
export const makeSelectClaimIdsForCollectionId = (id: string) =>
createSelector(
makeSelectCollectionForId(id),
collection => {
const items = (collection && collection.items) || [];
const ids = items.map(item => {
const { claimId } = parseURI(item);
return claimId;
});
return ids;
}
);
export const makeSelectIndexForUrlInCollection = (url: string, id: string) =>
createSelector(
state => state.content.shuffleList,
makeSelectUrlsForCollectionId(id),
makeSelectClaimForUri(url),
(shuffleState, urls, claim) => {
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
const listUrls = shuffleUrls || urls;
const index = listUrls && listUrls.findIndex(u => u === url);
if (index > -1) {
return index;
} else if (claim) {
const index = listUrls && listUrls.findIndex(u => u === claim.permanent_url);
if (index > -1) return index;
return claim;
}
return null;
}
);
export const makeSelectPreviousUrlForCollectionAndUrl = (id: string, url: string) =>
createSelector(
state => state.content.shuffleList,
state => state.content.loopList,
makeSelectIndexForUrlInCollection(url, id),
makeSelectUrlsForCollectionId(id),
(shuffleState, loopState, index, urls) => {
const loopList = loopState && loopState.collectionId === id && loopState.loop;
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
if (index > -1) {
const listUrls = shuffleUrls || urls;
let nextUrl;
if (index === 0 && loopList) {
nextUrl = listUrls[listUrls.length - 1];
} else {
nextUrl = listUrls[index - 1];
}
return nextUrl || null;
} else {
return null;
}
}
);
export const makeSelectNextUrlForCollectionAndUrl = (id: string, url: string) =>
createSelector(
state => state.content.shuffleList,
state => state.content.loopList,
makeSelectIndexForUrlInCollection(url, id),
makeSelectUrlsForCollectionId(id),
(shuffleState, loopState, index, urls) => {
const loopList = loopState && loopState.collectionId === id && loopState.loop;
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
if (index > -1) {
const listUrls = shuffleUrls || urls;
// We'll get the next playble url
let remainingUrls = listUrls.slice(index + 1);
if (!remainingUrls.length && loopList) {
remainingUrls = listUrls.slice(0);
}
const nextUrl = remainingUrls && remainingUrls[0];
return nextUrl || null;
} else {
return null;
}
}
);
export const makeSelectNameForCollectionId = (id: string) =>
createSelector(
makeSelectCollectionForId(id),
collection => {
return (collection && collection.name) || '';
}
);
export const makeSelectCountForCollectionId = (id: string) =>
createSelector(
makeSelectCollectionForId(id),
collection => {
if (collection) {
if (collection.itemCount !== undefined) {
return collection.itemCount;
}
let itemCount = 0;
collection.items.map(item => {
if (item) {
itemCount += 1;
}
});
return itemCount;
}
return null;
}
);

View file

@ -0,0 +1,66 @@
// @flow
import { createSelector } from 'reselect';
const selectState = state => state.comments || {};
export const selectCommentsById = createSelector(
selectState,
state => state.commentById || {}
);
export const selectCommentsByClaimId = createSelector(
selectState,
selectCommentsById,
(state, byId) => {
const byClaimId = state.byId || {};
const comments = {};
// replace every comment_id in the list with the actual comment object
Object.keys(byClaimId).forEach(claimId => {
const commentIds = byClaimId[claimId];
comments[claimId] = Array(commentIds === null ? 0 : commentIds.length);
for (let i = 0; i < commentIds.length; i++) {
comments[claimId][i] = byId[commentIds[i]];
}
});
return comments;
}
);
// previously this used a mapping from claimId -> Array<Comments>
/* export const selectCommentsById = createSelector(
selectState,
state => state.byId || {}
); */
export const selectCommentsByUri = createSelector(
selectState,
state => {
const byUri = state.commentsByUri || {};
const comments = {};
Object.keys(byUri).forEach(uri => {
const claimId = byUri[uri];
if (claimId === null) {
comments[uri] = null;
} else {
comments[uri] = claimId;
}
});
return comments;
}
);
export const makeSelectCommentsForUri = (uri: string) =>
createSelector(
selectCommentsByClaimId,
selectCommentsByUri,
(byClaimId, byUri) => {
const claimId = byUri[uri];
return byClaimId && byClaimId[claimId];
}
);
// todo: allow SDK to retrieve user comments through comment_list
// todo: implement selectors for selecting comments owned by user

View file

@ -0,0 +1,36 @@
// @flow
import { createSelector } from 'reselect';
import { makeSelectFileInfoForUri } from 'redux/selectors/file_info';
type State = { file: FileState };
export const selectState = (state: State): FileState => state.file || {};
export const selectPurchaseUriErrorMessage: (state: State) => string = createSelector(
selectState,
state => state.purchaseUriErrorMessage
);
export const selectFailedPurchaseUris: (state: State) => Array<string> = createSelector(
selectState,
state => state.failedPurchaseUris
);
export const selectPurchasedUris: (state: State) => Array<string> = createSelector(
selectState,
state => state.purchasedUris
);
export const selectLastPurchasedUri: (state: State) => string = createSelector(
selectState,
state =>
state.purchasedUris.length > 0 ? state.purchasedUris[state.purchasedUris.length - 1] : null
);
export const makeSelectStreamingUrlForUri = (uri: string) =>
createSelector(
makeSelectFileInfoForUri(uri),
fileInfo => {
return fileInfo && fileInfo.streaming_url;
}
);

View file

@ -212,7 +212,6 @@ function filterFileInfos(fileInfos, query) {
const queryMatchRegExp = new RegExp(query, 'i'); const queryMatchRegExp = new RegExp(query, 'i');
return fileInfos.filter(fileInfo => { return fileInfos.filter(fileInfo => {
const { metadata } = fileInfo; const { metadata } = fileInfo;
return ( return (
(metadata.title && metadata.title.match(queryMatchRegExp)) || (metadata.title && metadata.title.match(queryMatchRegExp)) ||
(fileInfo.channel_name && fileInfo.channel_name.match(queryMatchRegExp)) || (fileInfo.channel_name && fileInfo.channel_name.match(queryMatchRegExp)) ||
@ -251,11 +250,3 @@ export const makeSelectSearchDownloadUrlsCount = query =>
return fileInfos && fileInfos.length ? filterFileInfos(fileInfos, query).length : 0; return fileInfos && fileInfos.length ? filterFileInfos(fileInfos, query).length : 0;
} }
); );
export const makeSelectStreamingUrlForUri = uri =>
createSelector(
makeSelectFileInfoForUri(uri),
fileInfo => {
return fileInfo && fileInfo.streaming_url;
}
);

View file

@ -40,22 +40,17 @@ export const selectIsStillEditing = createSelector(
export const selectPublishFormValues = createSelector( export const selectPublishFormValues = createSelector(
selectState, selectState,
state => state.settings,
selectIsStillEditing, selectIsStillEditing,
(publishState, settingsState, isStillEditing) => { (state, isStillEditing) => {
const { languages, ...formValues } = publishState; const { pendingPublish, language, languages, ...formValues } = state;
const language = languages && languages.length && languages[0];
const { clientSettings } = settingsState;
const { language: languageSet } = clientSettings;
let actualLanguage; let actualLanguage;
// Sets default if editing a claim with a set language // Sets default if editing a claim with a set language
if (!language && isStillEditing && languageSet) { if (!language && isStillEditing && languages && languages[0]) {
actualLanguage = languageSet; actualLanguage = languages[0];
} else { } else {
actualLanguage = language || languageSet || 'en'; actualLanguage = language || 'en';
} }
return { ...formValues, language: actualLanguage }; return { ...formValues, language: actualLanguage };
} }
); );
@ -82,10 +77,10 @@ export const selectMyClaimForUri = createSelector(
return isStillEditing return isStillEditing
? claimsById[editClaimId] ? claimsById[editClaimId]
: myClaims.find(claim => : myClaims.find(claim =>
!contentName !contentName
? claim.name === claimName ? claim.name === claimName
: claim.name === contentName || claim.name === claimName : claim.name === contentName || claim.name === claimName
); );
} }
); );

View file

@ -0,0 +1,187 @@
// @flow
import { SEARCH_TYPES, SEARCH_OPTIONS } from 'constants/search';
import { getSearchQueryString } from 'util/query-params';
import { normalizeURI, parseURI } from 'lbryURI';
import { createSelector } from 'reselect';
type State = { search: SearchState };
export const selectState = (state: State): SearchState => state.search;
export const selectSearchValue: (state: State) => string = createSelector(
selectState,
state => state.searchQuery
);
export const selectSearchOptions: (state: State) => SearchOptions = createSelector(
selectState,
state => state.options
);
export const selectSuggestions: (
state: State
) => { [string]: Array<SearchSuggestion> } = createSelector(
selectState,
state => state.suggestions
);
export const selectIsSearching: (state: State) => boolean = createSelector(
selectState,
state => state.searching
);
export const selectSearchUrisByQuery: (
state: State
) => { [string]: Array<string> } = createSelector(
selectState,
state => state.urisByQuery
);
export const makeSelectSearchUris = (query: string): ((state: State) => Array<string>) =>
// replace statement below is kind of ugly, and repeated in doSearch action
createSelector(
selectSearchUrisByQuery,
byQuery => byQuery[query ? query.replace(/^lbry:\/\//i, '').replace(/\//, ' ') : query]
);
export const selectResolvedSearchResultsByQuery: (
state: State
) => { [string]: Array<ResolvedSearchResult> } = createSelector(
selectState,
state => state.resolvedResultsByQuery
);
export const selectResolvedSearchResultsByQueryLastPageReached: (
state: State
) => { [string]: Array<boolean> } = createSelector(
selectState,
state => state.resolvedResultsByQueryLastPageReached
);
export const makeSelectResolvedSearchResults = (
query: string
): ((state: State) => Array<ResolvedSearchResult>) =>
// replace statement below is kind of ugly, and repeated in doSearch action
createSelector(
selectResolvedSearchResultsByQuery,
byQuery => byQuery[query ? query.replace(/^lbry:\/\//i, '').replace(/\//, ' ') : query]
);
export const makeSelectResolvedSearchResultsLastPageReached = (
query: string
): ((state: State) => boolean) =>
// replace statement below is kind of ugly, and repeated in doSearch action
createSelector(
selectResolvedSearchResultsByQueryLastPageReached,
byQuery => byQuery[query ? query.replace(/^lbry:\/\//i, '').replace(/\//, ' ') : query]
);
export const selectSearchBarFocused: boolean = createSelector(
selectState,
state => state.focused
);
export const selectSearchSuggestions: Array<SearchSuggestion> = createSelector(
selectSearchValue,
selectSuggestions,
(query: string, suggestions: { [string]: Array<string> }) => {
if (!query) {
return [];
}
const queryIsPrefix =
query === 'lbry:' || query === 'lbry:/' || query === 'lbry://' || query === 'lbry://@';
if (queryIsPrefix) {
// If it is a prefix, wait until something else comes to figure out what to do
return [];
} else if (query.startsWith('lbry://')) {
// If it starts with a prefix, don't show any autocomplete results
// They are probably typing/pasting in a lbry uri
return [
{
value: query,
type: query[7] === '@' ? SEARCH_TYPES.CHANNEL : SEARCH_TYPES.FILE,
},
];
}
let searchSuggestions = [];
try {
const uri = normalizeURI(query);
const { channelName, streamName, isChannel } = parseURI(uri);
searchSuggestions.push(
{
value: query,
type: SEARCH_TYPES.SEARCH,
},
{
value: uri,
shorthand: isChannel ? channelName : streamName,
type: isChannel ? SEARCH_TYPES.CHANNEL : SEARCH_TYPES.FILE,
}
);
} catch (e) {
searchSuggestions.push({
value: query,
type: SEARCH_TYPES.SEARCH,
});
}
searchSuggestions.push({
value: query,
type: SEARCH_TYPES.TAG,
});
const apiSuggestions = suggestions[query] || [];
if (apiSuggestions.length) {
searchSuggestions = searchSuggestions.concat(
apiSuggestions
.filter(suggestion => suggestion !== query)
.map(suggestion => {
// determine if it's a channel
try {
const uri = normalizeURI(suggestion);
const { channelName, streamName, isChannel } = parseURI(uri);
return {
value: uri,
shorthand: isChannel ? channelName : streamName,
type: isChannel ? SEARCH_TYPES.CHANNEL : SEARCH_TYPES.FILE,
};
} catch (e) {
// search result includes some character that isn't valid in claim names
return {
value: suggestion,
type: SEARCH_TYPES.SEARCH,
};
}
})
);
}
return searchSuggestions;
}
);
// Creates a query string based on the state in the search reducer
// Can be overrided by passing in custom sizes/from values for other areas pagination
type CustomOptions = {
isBackgroundSearch?: boolean,
size?: number,
from?: number,
related_to?: string,
nsfw?: boolean,
};
export const makeSelectQueryWithOptions = (customQuery: ?string, options: CustomOptions) =>
createSelector(
selectSearchValue,
selectSearchOptions,
(query, defaultOptions) => {
const searchOptions = { ...defaultOptions, ...options };
const queryString = getSearchQueryString(customQuery || query, searchOptions);
return queryString;
}
);

View file

@ -0,0 +1,47 @@
// @flow
import { createSelector } from 'reselect';
const selectState = (state: { tags: TagState }) => state.tags || {};
export const selectKnownTagsByName = createSelector(
selectState,
(state: TagState): KnownTags => state.knownTags
);
export const selectFollowedTagsList = createSelector(
selectState,
(state: TagState): Array<string> => state.followedTags.filter(tag => typeof tag === 'string')
);
export const selectFollowedTags = createSelector(
selectFollowedTagsList,
(followedTags: Array<string>): Array<Tag> =>
followedTags
.map(tag => ({ name: tag.toLowerCase() }))
.sort((a, b) => a.name.localeCompare(b.name))
);
export const selectUnfollowedTags = createSelector(
selectKnownTagsByName,
selectFollowedTagsList,
(tagsByName: KnownTags, followedTags: Array<string>): Array<Tag> => {
const followedTagsSet = new Set(followedTags);
let tagsToReturn = [];
Object.keys(tagsByName).forEach(key => {
if (!followedTagsSet.has(key)) {
const { name } = tagsByName[key];
tagsToReturn.push({ name: name.toLowerCase() });
}
});
return tagsToReturn;
}
);
export const makeSelectIsFollowingTag = (tag: string) =>
createSelector(
selectFollowedTags,
followedTags => {
return followedTags.some(followedTag => followedTag.name === tag.toLowerCase());
}
);

View file

@ -2,7 +2,6 @@ import { createSelector } from 'reselect';
import * as TRANSACTIONS from 'constants/transaction_types'; import * as TRANSACTIONS from 'constants/transaction_types';
import { PAGE_SIZE, LATEST_PAGE_SIZE } from 'constants/transaction_list'; import { PAGE_SIZE, LATEST_PAGE_SIZE } from 'constants/transaction_list';
import { selectClaimIdsByUri } from 'redux/selectors/claims'; import { selectClaimIdsByUri } from 'redux/selectors/claims';
import parseData from 'util/parse-data';
export const selectState = state => state.wallet || {}; export const selectState = state => state.wallet || {};
export const selectWalletState = selectState; export const selectWalletState = selectState;
@ -27,27 +26,21 @@ export const selectPendingSupportTransactions = createSelector(
state => state.pendingSupportTransactions state => state.pendingSupportTransactions
); );
export const selectPendingOtherTransactions = createSelector(
selectState,
state => state.pendingTxos
);
export const selectAbandonClaimSupportError = createSelector( export const selectAbandonClaimSupportError = createSelector(
selectState, selectState,
state => state.abandonClaimSupportError state => state.abandonClaimSupportError
); );
export const makeSelectPendingAmountByUri = uri => export const makeSelectPendingAmountByUri = (uri) => createSelector(
createSelector( selectClaimIdsByUri,
selectClaimIdsByUri, selectPendingSupportTransactions,
selectPendingSupportTransactions, (claimIdsByUri, pendingSupports) => {
(claimIdsByUri, pendingSupports) => { const uriEntry = Object.entries(claimIdsByUri).find(([u, cid]) => u === uri);
const uriEntry = Object.entries(claimIdsByUri).find(([u, cid]) => u === uri); const claimId = uriEntry && uriEntry[1];
const claimId = uriEntry && uriEntry[1]; const pendingSupport = claimId && pendingSupports[claimId];
const pendingSupport = claimId && pendingSupports[claimId]; return pendingSupport ? pendingSupport.effective : undefined;
return pendingSupport ? pendingSupport.effective : undefined; }
} );
);
export const selectWalletEncryptResult = createSelector( export const selectWalletEncryptResult = createSelector(
selectState, selectState,
@ -268,27 +261,6 @@ export const selectIsFetchingTransactions = createSelector(
state => state.fetchingTransactions state => state.fetchingTransactions
); );
/**
* CSV of 'selectTransactionItems'.
*/
export const selectTransactionsFile = createSelector(
selectTransactionItems,
transactions => {
if (!transactions || transactions.length === 0) {
// No data.
return undefined;
}
const parsed = parseData(transactions, 'csv');
if (!parsed) {
// Invalid data, or failed to parse.
return null;
}
return parsed;
}
);
export const selectIsSendingSupport = createSelector( export const selectIsSendingSupport = createSelector(
selectState, selectState,
state => state.sendingSupport state => state.sendingSupport
@ -356,27 +328,27 @@ export const selectTxoPageParams = createSelector(
export const selectTxoPage = createSelector( export const selectTxoPage = createSelector(
selectState, selectState,
state => (state.txoPage && state.txoPage.items) || [] state => (state.txoPage && state.txoPage.items) || [],
); );
export const selectTxoPageNumber = createSelector( export const selectTxoPageNumber = createSelector(
selectState, selectState,
state => (state.txoPage && state.txoPage.page) || 1 state => (state.txoPage && state.txoPage.page) || 1,
); );
export const selectTxoItemCount = createSelector( export const selectTxoItemCount = createSelector(
selectState, selectState,
state => (state.txoPage && state.txoPage.total_items) || 1 state => (state.txoPage && state.txoPage.total_items) || 1,
); );
export const selectFetchingTxosError = createSelector( export const selectFetchingTxosError = createSelector(
selectState, selectState,
state => state.fetchingTxosError state => state.fetchingTxosError,
); );
export const selectIsFetchingTxos = createSelector( export const selectIsFetchingTxos = createSelector(
selectState, selectState,
state => state.fetchingTxos state => state.fetchingTxos,
); );
export const makeSelectFilteredTransactionsForPage = (page = 1) => export const makeSelectFilteredTransactionsForPage = (page = 1) =>
@ -407,33 +379,3 @@ export const selectIsWalletReconnecting = createSelector(
selectState, selectState,
state => state.walletReconnecting state => state.walletReconnecting
); );
export const selectIsFetchingUtxoCounts = createSelector(
selectState,
state => state.fetchingUtxoCounts
);
export const selectIsConsolidatingUtxos = createSelector(
selectState,
state => state.consolidatingUtxos
);
export const selectIsMassClaimingTips = createSelector(
selectState,
state => state.massClaimingTips
);
export const selectPendingConsolidateTxid = createSelector(
selectState,
state => state.pendingConsolidateTxid
);
export const selectPendingMassClaimTxid = createSelector(
selectState,
state => state.pendingMassClaimTxid
);
export const selectUtxoCounts = createSelector(
selectState,
state => state.utxoCounts
);

View file

@ -51,20 +51,3 @@ export function concatClaims(
return claims; return claims;
} }
export function filterClaims(claims: Array<Claim>, query: ?string): Array<Claim> {
if (query) {
const queryMatchRegExp = new RegExp(query, 'i');
return claims.filter(claim => {
const { value } = claim;
return (
(value.title && value.title.match(queryMatchRegExp)) ||
(claim.signing_channel && claim.signing_channel.name.match(queryMatchRegExp)) ||
(claim.name && claim.name.match(queryMatchRegExp))
);
});
}
return claims;
}

View file

@ -1,7 +0,0 @@
/*
new claim = { ...maybeResolvedClaim, ...pendingClaim, meta: maybeResolvedClaim['meta'] }
*/
export default function mergeClaims(maybeResolved, pending){
return { ...maybeResolved, ...pending, meta: maybeResolved.meta };
}

View file

@ -1,61 +0,0 @@
// JSON parser
const parseJson = (data, filters = []) => {
const list = data.map(item => {
const temp = {};
// Apply filters
Object.entries(item).forEach(([key, value]) => {
if (!filters.includes(key)) temp[key] = value;
});
return temp;
});
// Beautify JSON
return JSON.stringify(list, null, '\t');
};
// CSV Parser
// No need for an external module:
// https://gist.github.com/btzr-io/55c3450ea3d709fc57540e762899fb85
const parseCsv = (data, filters = []) => {
// Get items for header
const getHeaders = item => {
const list = [];
// Apply filters
Object.entries(item).forEach(([key]) => {
if (!filters.includes(key)) list.push(key);
});
// return headers
return list.join(',');
};
// Get rows content
const getData = list =>
list
.map(item => {
const row = [];
// Apply filters
Object.entries(item).forEach(([key, value]) => {
if (!filters.includes(key)) row.push(value);
});
// return rows
return row.join(',');
})
.join('\n');
// Return CSV string
return `${getHeaders(data[0])} \n ${getData(data)}`;
};
const parseData = (data, format, filters = []) => {
// Check for validation
const valid = data && data[0] && format;
// Pick a format
const formats = {
csv: list => parseCsv(list, filters),
json: list => parseJson(list, filters),
};
// Return parsed data: JSON || CSV
return valid && formats[format] ? formats[format](data) : undefined;
};
export default parseData;

View file

@ -1,4 +1,8 @@
// @flow // @flow
import { SEARCH_OPTIONS } from 'constants/search';
const DEFAULT_SEARCH_RESULT_FROM = 0;
const DEFAULT_SEARCH_SIZE = 20;
export function parseQueryParams(queryString: string) { export function parseQueryParams(queryString: string) {
if (queryString === '') return {}; if (queryString === '') return {};
@ -28,3 +32,54 @@ export function toQueryString(params: { [string]: string | number }) {
return parts.join('&'); return parts.join('&');
} }
export const getSearchQueryString = (query: string, options: any = {}) => {
const encodedQuery = encodeURIComponent(query);
const queryParams = [
`s=${encodedQuery}`,
`size=${options.size || DEFAULT_SEARCH_SIZE}`,
`from=${options.from || DEFAULT_SEARCH_RESULT_FROM}`,
];
const { isBackgroundSearch } = options;
const includeUserOptions =
typeof isBackgroundSearch === 'undefined' ? false : !isBackgroundSearch;
if (includeUserOptions) {
const claimType = options[SEARCH_OPTIONS.CLAIM_TYPE];
if (claimType) {
queryParams.push(`claimType=${claimType}`);
// If they are only searching for channels, strip out the media info
if (!claimType.includes(SEARCH_OPTIONS.INCLUDE_CHANNELS)) {
queryParams.push(
`mediaType=${[
SEARCH_OPTIONS.MEDIA_FILE,
SEARCH_OPTIONS.MEDIA_AUDIO,
SEARCH_OPTIONS.MEDIA_VIDEO,
SEARCH_OPTIONS.MEDIA_TEXT,
SEARCH_OPTIONS.MEDIA_IMAGE,
SEARCH_OPTIONS.MEDIA_APPLICATION,
].reduce(
(acc, currentOption) => (options[currentOption] ? `${acc}${currentOption},` : acc),
''
)}`
);
}
}
}
const additionalOptions = {};
const { related_to } = options;
const { nsfw } = options;
if (related_to) additionalOptions['related_to'] = related_to;
if (typeof nsfw !== 'undefined') additionalOptions['nsfw'] = nsfw;
if (additionalOptions) {
Object.keys(additionalOptions).forEach(key => {
const option = additionalOptions[key];
queryParams.push(`${key}=${option}`);
});
}
return queryParams.join('&');
};

View file

@ -1,19 +0,0 @@
const config = {
babelrc: false,
presets: [
[
"@babel/env",
{
modules: false
}
],
"@babel/react"
],
plugins: [
["@babel/plugin-proposal-decorators", { legacy: true }],
["@babel/plugin-proposal-class-properties", { loose: true }],
"@babel/plugin-transform-flow-strip-types",
"transform-es2015-modules-commonjs"
]
};
module.exports = require("babel-jest").createTransformer(config);

View file

@ -1,44 +0,0 @@
import * as lbryURI from '../src/lbryURI.js';
import {describe, test} from "@jest/globals";
describe('parseURI tests', () => {
test('Correctly parses channel URI', () => {
let result = lbryURI.parseURI('lbry://@ChannelName');
expect(result.isChannel).toBeTruthy();
expect(result.path).toStrictEqual("@ChannelName");
expect(result.channelName).toStrictEqual("ChannelName");
expect(result.claimName).toStrictEqual("@ChannelName");
});
test('Correctly parses test case channel/stream lbry URI', () => {
let result = lbryURI.parseURI('lbry://@CryptoGnome#1/whale-pool-how-to#e');
expect(result.isChannel).toStrictEqual(false);;
expect(result.path).toStrictEqual("@CryptoGnome#1/whale-pool-how-to#e");
expect(result.claimId).toStrictEqual("1");
expect(result.streamClaimId).toStrictEqual("e");
expect(result.streamName).toStrictEqual("whale-pool-how-to");
expect(result.channelName).toStrictEqual("CryptoGnome");
expect(result.contentName).toStrictEqual("whale-pool-how-to");
});
test('Correctly parses lbry URI without protocol', () => {
let result = lbryURI.parseURI('@CryptoGnome#1/whale-pool-how-to#e');
expect(result.isChannel).toStrictEqual(false);;
expect(result.streamName).toStrictEqual("whale-pool-how-to");
expect(result.channelName).toStrictEqual("CryptoGnome");
});
test('Throws error for http protocol', () => {
// TODO - this catches wrong type of error..
let uri = 'http://@CryptoGnome#1/whale-pool-how-to#e';
expect(() => lbryURI.parseURI(uri)).toThrowError();
});
test('Correctly parses search', () => {
let result = lbryURI.parseURI('CryptoGn%ome');
expect(result.isChannel).toStrictEqual(false);
expect(result.path).toStrictEqual("CryptoGn%ome");
expect(result.contentName).toStrictEqual("CryptoGn%ome");
});
})

3331
yarn.lock

File diff suppressed because it is too large Load diff