Compare commits
339 commits
mobile-rel
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
0f930c4a7b | ||
|
32b5787071 | ||
|
129b0ea3fa | ||
|
e3bc848263 | ||
|
372e559cae | ||
|
49b9db5aae | ||
|
12a2ffc708 | ||
|
95fa26f836 | ||
|
dc264ec50c | ||
|
aeb1f533b5 | ||
|
d016d8057b | ||
|
0302a2f8d6 | ||
|
8fa92d872d | ||
|
e4d0662100 | ||
|
036aa59086 | ||
|
c76dfbde27 | ||
|
7cc9923ed9 | ||
|
60bd918d5e | ||
|
9ebfc927d0 | ||
|
54ca8c4320 | ||
|
bee9bf38dd | ||
|
aabae5ce59 | ||
|
a327385cdf | ||
|
64ce7aa99c | ||
|
34dfd384e4 | ||
|
707c60b813 | ||
|
8f66a2fe7c | ||
|
729a4831ad | ||
|
88370997b4 | ||
|
b93598b0ff | ||
|
4cbb9a35c3 | ||
|
04ce1df03d | ||
|
347fe25e85 | ||
|
e66698eadc | ||
|
0b505fb0f4 | ||
|
508e8d36fd | ||
|
0758827e6d | ||
|
166c3b2832 | ||
|
d298c00f24 | ||
|
06b09f5a81 | ||
|
4dfc4689c6 | ||
|
85ad697e0a | ||
|
609f13991f | ||
|
503e18be1b | ||
|
32a85a9ff3 | ||
|
40fc75320d | ||
|
e20baa0683 | ||
|
a1cb16400d | ||
|
9461cf1bee | ||
|
7e049487c3 | ||
|
f7775fd837 | ||
|
06531c6b48 | ||
|
b280d66f5d | ||
|
bfb50ebeb5 | ||
|
06ce8c623c | ||
|
f8ff4cfc8f | ||
|
e34f451025 | ||
|
d3c045b037 | ||
|
63946a0a6d | ||
|
dd697ed70e | ||
|
fd2551e764 | ||
|
8d0f9c18fd | ||
|
757e8c24ec | ||
|
ecfcc95beb | ||
|
b2ad71fb74 | ||
|
35dd7650fb | ||
|
babfec7d43 | ||
|
6fc11454eb | ||
|
3b853b6ddd | ||
|
41ef1117e5 | ||
|
66c77fc39b | ||
|
e5c0b5f0a6 | ||
|
7e17344683 | ||
|
b511282c35 | ||
|
eb37009a98 | ||
|
c2e03fa71d | ||
|
4e37ab6580 | ||
|
a0bfbee958 | ||
|
3ca0c8d204 | ||
|
5f3a40a420 | ||
|
8335c9d2de | ||
|
9f7902aa0b | ||
|
9a17013728 | ||
|
35088a6d10 | ||
|
8e74e3137a | ||
|
2cf645ab14 | ||
|
c494c92505 | ||
|
e9712dc954 | ||
|
5416b6bc42 | ||
|
86c7741d4c | ||
|
a5a326e73a | ||
|
4e2a6c8201 | ||
|
629c3273f5 | ||
|
638a78695a | ||
|
d75e7725fe | ||
|
d91ec1773c | ||
|
e5b79a8400 | ||
|
f449d7916c | ||
|
87e67aa714 | ||
|
74ab5bbf84 | ||
|
bf728f8716 | ||
|
bf3645df44 | ||
|
0d2d64aca7 | ||
|
c31161c41a | ||
|
251c646851 | ||
|
5ea369ee76 | ||
|
37f17fae0c | ||
|
f8a4264307 | ||
|
f37fd9bf92 | ||
|
bcaedbcd9c | ||
|
f0849b4ce1 | ||
|
a2f8646f95 | ||
|
92acb9a6c9 | ||
|
5d41b0656c | ||
|
7c926ad8de | ||
|
9e2d80909f | ||
|
7bd6ae1824 | ||
|
746acef66a | ||
|
0274f7e13e | ||
|
acc54f157f | ||
|
3b523980de | ||
|
b5cc1f8818 | ||
|
f80c71a2ab | ||
|
ce9f720bbd | ||
|
e3c05268e5 | ||
|
a37f195d73 | ||
|
8adf3dada3 | ||
|
664df6237d | ||
|
4f57992762 | ||
|
f683af3b99 | ||
|
612acc6e7f | ||
|
034838a23c | ||
|
92a4263c90 | ||
|
eb40d2c058 | ||
|
70c52e42e8 | ||
|
88dbef2cd0 | ||
|
8344379bfd | ||
|
8093d69807 | ||
|
c86810038c | ||
|
dc70da1be2 | ||
|
4d11f31914 | ||
|
04789190b0 | ||
|
1fc5afa0c4 | ||
|
3cb3859baf | ||
|
823197af37 | ||
|
fb4bcdb4f5 | ||
|
15737f9b09 | ||
|
77b27fea99 | ||
|
16c6ba1a24 | ||
|
3f14b93cbc | ||
|
22a26be26f | ||
|
a13ddadba4 | ||
|
7fbb87d38f | ||
|
fee1834bbc | ||
|
ba5d6b84be | ||
|
90012bf47c | ||
|
3bfdde4629 | ||
|
437c54f164 | ||
|
3eee65146b | ||
|
01df9522d5 | ||
|
1c02ca2b6b | ||
|
3df916548f | ||
|
316dfcf06a | ||
|
5c1a00b103 | ||
|
7d90cba8a0 | ||
|
aca15fe3e9 | ||
|
12ba291c3b | ||
|
903d425188 | ||
|
0dbe6efc75 | ||
|
210bb80f2c | ||
|
05b949d470 | ||
|
7df96d4767 | ||
|
27da80083e | ||
|
04e3ca8250 | ||
|
a1d5ce7e7e | ||
|
8c29c7e912 | ||
|
3a140c2318 | ||
|
5547f53f48 | ||
|
3ce73c6646 | ||
|
11a43bae79 | ||
|
7847224c89 | ||
|
3dfbb7de0f | ||
|
6306639c34 | ||
|
7a7a1aad32 | ||
|
c57d5ea664 | ||
|
83e20d3e6e | ||
|
10a508aaa5 | ||
|
9c68623047 | ||
|
f21f797ae3 | ||
|
a1673ebfa9 | ||
|
c29123e815 | ||
|
0be1b75343 | ||
|
85077f6f00 | ||
|
e6d89b0690 | ||
|
ff1f95c101 | ||
|
31647e4ee7 | ||
|
1523bbd33f | ||
|
0f80568acd | ||
|
8eb071d1e4 | ||
|
906199d866 | ||
|
0b867cbbdc | ||
|
c0bfa4d320 | ||
|
85f0f574bd | ||
|
bcf4146e34 | ||
|
eb47b7e5b6 | ||
|
cf0b135a15 | ||
|
cca78e9341 | ||
|
60e41e7c9f | ||
|
ebf0d49fb0 | ||
|
fc217d58d7 | ||
|
e4c05cebe9 | ||
|
5ac2065b61 | ||
|
d915f965b4 | ||
|
0ecf719daa | ||
|
c7ca11f327 | ||
|
e9d0363588 | ||
|
273090d42f | ||
|
2ec145c50f | ||
|
d00744a8b5 | ||
|
36f36cea8e | ||
|
b32f6d0ddc | ||
|
72f9d57134 | ||
|
c949252243 | ||
|
f8ac5359d9 | ||
|
7216d2befd | ||
|
70c2ffc0bd | ||
|
a883c4f56c | ||
|
a9f1f7b61d | ||
|
4e0c59ee8d | ||
|
c7de10be2d | ||
|
b2d49c2755 | ||
|
f379c724bb | ||
|
7f1fc91b8a | ||
|
09ff7b0b99 | ||
|
65346c5977 | ||
|
a6dce0eccf | ||
|
82b1c8c51b | ||
|
ed68f01ff5 | ||
|
d2079111b3 | ||
|
6c494eaf80 | ||
|
aa2cfa7896 | ||
|
910b55f059 | ||
|
f6e5b69e5a | ||
|
58e59acb10 | ||
|
02831fe359 | ||
|
0730becb35 | ||
|
4a59abf30c | ||
|
f660f1070c | ||
|
486e24621e | ||
|
cd9c15567f | ||
|
278e12dcbe | ||
|
259317250a | ||
|
3af0b55b8e | ||
|
9b4bf30755 | ||
|
7bb6bb7ea2 | ||
|
3be6fa52ac | ||
|
1e27a854d0 | ||
|
88d785a844 | ||
|
8d1ebfb9c5 | ||
|
c30e1eee2c | ||
|
efbc95f383 | ||
|
7d3563f856 | ||
|
58ff4d8086 | ||
|
f8c26fbe34 | ||
|
17f611888c | ||
|
14c8764925 | ||
|
ce642bbae6 | ||
|
562a9bd40c | ||
|
b3b15ab0a3 | ||
|
4b4c7f9710 | ||
|
a64d5039b9 | ||
|
a65d09a919 | ||
|
9b63b1c7e3 | ||
|
3b30cd2e2d | ||
|
ee29e9a024 | ||
|
28c9c3e338 | ||
|
22c9e3563e | ||
|
df043f3ef6 | ||
|
4d374432cd | ||
|
5994f9fb9e | ||
|
0ade1de9e7 | ||
|
1bd142caa1 | ||
|
07dff852f1 | ||
|
a5f93bd2f7 | ||
|
cd3fa33066 | ||
|
1e505184dc | ||
|
677dd25643 | ||
|
320c6c5b70 | ||
|
f18fc99933 | ||
|
fde58f6d03 | ||
|
1097a63d44 | ||
|
de41f6bb8e | ||
|
90ba18d060 | ||
|
273c325d37 | ||
|
07adf4aab3 | ||
|
1d461ec088 | ||
|
aa6830d9f3 | ||
|
625a624b9c | ||
|
1818839133 | ||
|
b7ae238606 | ||
|
af48f6ecd6 | ||
|
430f989809 | ||
|
81138e1a7a | ||
|
6c0436cf14 | ||
|
7e093c31f3 | ||
|
369a0046ce | ||
|
7517aceb07 | ||
|
db0f48b56f | ||
|
5d2fc0d22e | ||
|
d2f0cf1ca6 | ||
|
69ffd110db | ||
|
81f58e3ac7 | ||
|
2f896bac53 | ||
|
211e0830be | ||
|
6ed0dde5cb | ||
|
4081322b44 | ||
|
37a01f56c1 | ||
|
8245b05574 | ||
|
8c10601239 | ||
|
a097c14c31 | ||
|
0ce0cf1de3 | ||
|
1dd2d4cff5 | ||
|
b2897c3cd5 | ||
|
a2304c00ca | ||
|
81d3befa03 | ||
|
971ec8cbb0 | ||
|
09a6b6ce17 | ||
|
9009708fff | ||
|
76207fb3c8 | ||
|
fd615dbb26 | ||
|
f36b91496b | ||
|
85b58eea00 | ||
|
9c48cce570 | ||
|
2c2ac7130a | ||
|
6f9ee589a7 | ||
|
1771d9c39d | ||
|
929c5de0e2 | ||
|
b4fbc212ca | ||
|
0bbebb186e |
83 changed files with 12214 additions and 4512 deletions
|
@ -1,5 +1,6 @@
|
|||
[ignore]
|
||||
|
||||
|
||||
[include]
|
||||
|
||||
[libs]
|
||||
|
@ -12,4 +13,5 @@ module.name_mapper='^redux\(.*\)$' -> '<PROJECT_ROOT>/src/redux\1'
|
|||
module.name_mapper='^util\(.*\)$' -> '<PROJECT_ROOT>/src/util\1'
|
||||
module.name_mapper='^constants\(.*\)$' -> '<PROJECT_ROOT>/src/constants\1'
|
||||
module.name_mapper='^lbry\(.*\)$' -> '<PROJECT_ROOT>/src/lbry\1'
|
||||
module.name_mapper='^lbry-first\(.*\)$' -> '<PROJECT_ROOT>/src/lbry-first\1'
|
||||
module.name_mapper='^lbryURI\(.*\)$' -> '<PROJECT_ROOT>/src/lbryURI\1'
|
||||
|
|
2
LICENSE
2
LICENSE
|
@ -1,6 +1,6 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017-2020 LBRY Inc
|
||||
Copyright (c) 2017-2021 LBRY Inc
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish,
|
||||
|
|
|
@ -20,6 +20,9 @@ yarn link lbry-redux
|
|||
### Build
|
||||
Run `$ yarn build`. If the symlink does not work, just build the file and move the `bundle.js` file into the `node_modules/` folder.
|
||||
|
||||
### Tests
|
||||
Run `$ yarn test`.
|
||||
|
||||
## Contributing
|
||||
We :heart: contributions from everyone! We welcome [bug reports](https://github.com/lbryio/lbry-redux/issues/), [bug fixes](https://github.com/lbryio/lbry-redux/pulls) and feedback on the module is always appreciated.
|
||||
|
||||
|
|
5686
dist/bundle.es.js
vendored
5686
dist/bundle.es.js
vendored
File diff suppressed because one or more lines are too long
102
dist/flow-typed/Claim.js
vendored
102
dist/flow-typed/Claim.js
vendored
|
@ -1,14 +1,16 @@
|
|||
// @flow
|
||||
|
||||
declare type Claim = StreamClaim | ChannelClaim;
|
||||
declare type Claim = StreamClaim | ChannelClaim | CollectionClaim;
|
||||
|
||||
declare type ChannelClaim = GenericClaim & {
|
||||
is_channel_signature_valid?: boolean, // we may have signed channels in the future
|
||||
value: ChannelMetadata,
|
||||
};
|
||||
|
||||
declare type CollectionClaim = GenericClaim & {
|
||||
value: CollectionMetadata,
|
||||
};
|
||||
|
||||
declare type StreamClaim = GenericClaim & {
|
||||
is_channel_signature_valid?: boolean,
|
||||
value: StreamMetadata,
|
||||
};
|
||||
|
||||
|
@ -23,7 +25,8 @@ declare type GenericClaim = {
|
|||
decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044
|
||||
timestamp?: number, // date of last transaction
|
||||
height: number, // block height the tx was confirmed
|
||||
is_mine: boolean,
|
||||
is_channel_signature_valid?: boolean,
|
||||
is_my_output: boolean,
|
||||
name: string,
|
||||
normalized_name: string, // `name` normalized via unicode NFD spec,
|
||||
nout: number, // index number for an output of a tx
|
||||
|
@ -31,9 +34,13 @@ declare type GenericClaim = {
|
|||
short_url: string, // permanent_url with short id, no channel
|
||||
txid: string, // unique tx id
|
||||
type: 'claim' | 'update' | 'support',
|
||||
value_type: 'stream' | 'channel',
|
||||
value_type: 'stream' | 'channel' | 'collection',
|
||||
signing_channel?: ChannelClaim,
|
||||
reposted_claim?: GenericClaim,
|
||||
repost_channel_url?: string,
|
||||
repost_url?: string,
|
||||
repost_bid_amount?: string,
|
||||
purchase_receipt?: PurchaseReceipt,
|
||||
meta: {
|
||||
activation_height: number,
|
||||
claims_in_channel?: number,
|
||||
|
@ -43,6 +50,7 @@ declare type GenericClaim = {
|
|||
expiration_height: number,
|
||||
is_controlling: boolean,
|
||||
support_amount: string,
|
||||
reposted: number,
|
||||
trending_global: number,
|
||||
trending_group: number,
|
||||
trending_local: number,
|
||||
|
@ -70,6 +78,10 @@ declare type ChannelMetadata = GenericMetadata & {
|
|||
featured?: Array<string>,
|
||||
};
|
||||
|
||||
declare type CollectionMetadata = GenericMetadata & {
|
||||
claims: Array<string>,
|
||||
}
|
||||
|
||||
declare type StreamMetadata = GenericMetadata & {
|
||||
license?: string, // License "title" ex: Creative Commons, Custom copyright
|
||||
license_url?: string, // Link to full license
|
||||
|
@ -120,3 +132,83 @@ declare type Fee = {
|
|||
currency: string,
|
||||
address: string,
|
||||
};
|
||||
|
||||
declare type PurchaseReceipt = {
|
||||
address: string,
|
||||
amount: string,
|
||||
claim_id: string,
|
||||
confirmations: number,
|
||||
height: number,
|
||||
nout: number,
|
||||
timestamp: number,
|
||||
txid: string,
|
||||
type: 'purchase',
|
||||
};
|
||||
|
||||
declare type ClaimActionResolveInfo = {
|
||||
[string]: {
|
||||
stream: ?StreamClaim,
|
||||
channel: ?ChannelClaim,
|
||||
claimsInChannel: ?number,
|
||||
collection: ?CollectionClaim,
|
||||
},
|
||||
}
|
||||
|
||||
declare type ChannelUpdateParams = {
|
||||
claim_id: string,
|
||||
bid?: string,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
replace?: boolean,
|
||||
languages?: Array<string>,
|
||||
locations?: Array<string>,
|
||||
blocking?: boolean,
|
||||
}
|
||||
|
||||
declare type ChannelPublishParams = {
|
||||
name: string,
|
||||
bid: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
}
|
||||
|
||||
declare type CollectionUpdateParams = {
|
||||
claim_id: string,
|
||||
claim_ids?: Array<string>,
|
||||
bid?: string,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
replace?: boolean,
|
||||
languages?: Array<string>,
|
||||
locations?: Array<string>,
|
||||
blocking?: boolean,
|
||||
}
|
||||
|
||||
declare type CollectionPublishParams = {
|
||||
name: string,
|
||||
bid: string,
|
||||
claim_ids: Array<string>,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
}
|
||||
|
|
29
dist/flow-typed/CoinSwap.js
vendored
Normal file
29
dist/flow-typed/CoinSwap.js
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
declare type CoinSwapInfo = {
|
||||
chargeCode: string,
|
||||
coins: Array<string>,
|
||||
sendAddresses: { [string]: string},
|
||||
sendAmounts: { [string]: any },
|
||||
lbcAmount: number,
|
||||
status?: {
|
||||
status: string,
|
||||
receiptCurrency: string,
|
||||
receiptTxid: string,
|
||||
lbcTxid: string,
|
||||
},
|
||||
}
|
||||
|
||||
declare type CoinSwapState = {
|
||||
coinSwaps: Array<CoinSwapInfo>,
|
||||
};
|
||||
|
||||
declare type CoinSwapAddAction = {
|
||||
type: string,
|
||||
data: CoinSwapInfo,
|
||||
};
|
||||
|
||||
declare type CoinSwapRemoveAction = {
|
||||
type: string,
|
||||
data: {
|
||||
chargeCode: string,
|
||||
},
|
||||
};
|
34
dist/flow-typed/Collections.js
vendored
Normal file
34
dist/flow-typed/Collections.js
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
declare type Collection = {
|
||||
id: string,
|
||||
items: Array<?string>,
|
||||
name: string,
|
||||
type: string,
|
||||
updatedAt: number,
|
||||
totalItems?: number,
|
||||
sourceId?: string, // if copied, claimId of original collection
|
||||
};
|
||||
|
||||
declare type CollectionState = {
|
||||
unpublished: CollectionGroup,
|
||||
resolved: CollectionGroup,
|
||||
pending: CollectionGroup,
|
||||
edited: CollectionGroup,
|
||||
builtin: CollectionGroup,
|
||||
saved: Array<string>,
|
||||
isResolvingCollectionById: { [string]: boolean },
|
||||
error?: string | null,
|
||||
};
|
||||
|
||||
declare type CollectionGroup = {
|
||||
[string]: Collection,
|
||||
}
|
||||
|
||||
declare type CollectionEditParams = {
|
||||
claims?: Array<Claim>,
|
||||
remove?: boolean,
|
||||
claimIds?: Array<string>,
|
||||
replace?: boolean,
|
||||
order?: { from: number, to: number },
|
||||
type?: string,
|
||||
name?: string,
|
||||
}
|
23
dist/flow-typed/Comment.js
vendored
23
dist/flow-typed/Comment.js
vendored
|
@ -1,23 +0,0 @@
|
|||
declare type Comment = {
|
||||
comment: string, // comment body
|
||||
comment_id: string, // sha256 digest
|
||||
claim_id: string, // id linking to the claim this comment
|
||||
timestamp: number, // integer representing unix-time
|
||||
is_hidden: boolean, // claim owner may enable/disable this
|
||||
channel_id?: string, // claimId of channel signing this comment
|
||||
channel_name?: string, // name of channel claim
|
||||
channel_url?: string, // full lbry url to signing channel
|
||||
signature?: string, // signature of comment by originating channel
|
||||
signing_ts?: string, // timestamp used when signing this comment
|
||||
is_channel_signature_valid?: boolean, // whether or not the signature could be validated
|
||||
parent_id?: number, // comment_id of comment this is in reply to
|
||||
};
|
||||
|
||||
// todo: relate individual comments to their commentId
|
||||
declare type CommentsState = {
|
||||
commentsByUri: { [string]: string },
|
||||
byId: { [string]: Array<string> },
|
||||
commentById: { [string]: Comment },
|
||||
isLoading: boolean,
|
||||
myComments: ?Set<string>,
|
||||
};
|
7
dist/flow-typed/File.js
vendored
7
dist/flow-typed/File.js
vendored
|
@ -11,6 +11,8 @@ declare type FileListItem = {
|
|||
claim_id: string,
|
||||
claim_name: string,
|
||||
completed: false,
|
||||
content_fee?: { txid: string },
|
||||
purchase_receipt?: { txid: string, amount: string },
|
||||
download_directory: string,
|
||||
download_path: string,
|
||||
file_name: string,
|
||||
|
@ -20,6 +22,7 @@ declare type FileListItem = {
|
|||
outpoint: string,
|
||||
points_paid: number,
|
||||
protobuf: string,
|
||||
reflector_progress: number,
|
||||
sd_hash: string,
|
||||
status: string,
|
||||
stopped: false,
|
||||
|
@ -29,10 +32,12 @@ declare type FileListItem = {
|
|||
suggested_file_name: string,
|
||||
total_bytes: number,
|
||||
total_bytes_lower_bound: number,
|
||||
is_fully_reflected: boolean,
|
||||
// TODO: sdk plans to change `tx`
|
||||
// It isn't currently used by the apps
|
||||
tx: {},
|
||||
txid: string,
|
||||
uploading_to_reflector: boolean,
|
||||
written_bytes: number,
|
||||
};
|
||||
|
||||
|
@ -66,7 +71,7 @@ declare type PurchaseUriStarted = {
|
|||
};
|
||||
|
||||
declare type DeletePurchasedUri = {
|
||||
type: ACTIONS.DELETE_PURCHASED_URI,
|
||||
type: ACTIONS.CLEAR_PURCHASED_URI_SUCCESS,
|
||||
data: {
|
||||
uri: string,
|
||||
},
|
||||
|
|
100
dist/flow-typed/Lbry.js
vendored
100
dist/flow-typed/Lbry.js
vendored
|
@ -7,10 +7,6 @@ declare type StatusResponse = {
|
|||
download_progress: number,
|
||||
downloading_headers: boolean,
|
||||
},
|
||||
connection_status: {
|
||||
code: string,
|
||||
message: string,
|
||||
},
|
||||
dht: {
|
||||
node_id: string,
|
||||
peers_in_routing_table: number,
|
||||
|
@ -45,6 +41,7 @@ declare type StatusResponse = {
|
|||
redirects: {},
|
||||
},
|
||||
wallet: ?{
|
||||
connected: string,
|
||||
best_blockhash: string,
|
||||
blocks: number,
|
||||
blocks_behind: number,
|
||||
|
@ -78,7 +75,7 @@ declare type BalanceResponse = {
|
|||
|
||||
declare type ResolveResponse = {
|
||||
// Keys are the url(s) passed to resolve
|
||||
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, claimsInChannel?: number },
|
||||
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, collection?: CollectionClaim, claimsInChannel?: number },
|
||||
};
|
||||
|
||||
declare type GetResponse = FileListItem & { error?: string };
|
||||
|
@ -127,12 +124,22 @@ declare type ChannelUpdateResponse = GenericTxResponse & {
|
|||
declare type CommentCreateResponse = Comment;
|
||||
declare type CommentUpdateResponse = Comment;
|
||||
|
||||
declare type CommentListResponse = {
|
||||
items: Array<Comment>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
declare type MyReactions = {
|
||||
// Keys are the commentId
|
||||
[string]: Array<string>,
|
||||
};
|
||||
|
||||
declare type OthersReactions = {
|
||||
// Keys are the commentId
|
||||
[string]: {
|
||||
// Keys are the reaction_type, e.g. 'like'
|
||||
[string]: number,
|
||||
},
|
||||
};
|
||||
|
||||
declare type CommentReactListResponse = {
|
||||
my_reactions: Array<MyReactions>,
|
||||
others_reactions: Array<OthersReactions>,
|
||||
};
|
||||
|
||||
declare type CommentHideResponse = {
|
||||
|
@ -140,6 +147,11 @@ declare type CommentHideResponse = {
|
|||
[string]: { hidden: boolean },
|
||||
};
|
||||
|
||||
declare type CommentPinResponse = {
|
||||
// keyed by the CommentIds entered
|
||||
items: Comment,
|
||||
};
|
||||
|
||||
declare type CommentAbandonResponse = {
|
||||
// keyed by the CommentId given
|
||||
abandoned: boolean,
|
||||
|
@ -153,6 +165,42 @@ declare type ChannelListResponse = {
|
|||
total_pages: number,
|
||||
};
|
||||
|
||||
declare type ChannelSignResponse = {
|
||||
signature: string,
|
||||
signing_ts: string,
|
||||
};
|
||||
|
||||
declare type CollectionCreateResponse = {
|
||||
outputs: Array<Claim>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
}
|
||||
|
||||
declare type CollectionListResponse = {
|
||||
items: Array<Claim>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
};
|
||||
|
||||
declare type CollectionResolveResponse = {
|
||||
items: Array<Claim>,
|
||||
total_items: number,
|
||||
};
|
||||
|
||||
declare type CollectionResolveOptions = {
|
||||
claim_id: string,
|
||||
};
|
||||
|
||||
declare type CollectionListOptions = {
|
||||
page: number,
|
||||
page_size: number,
|
||||
resolve?: boolean,
|
||||
};
|
||||
|
||||
declare type FileListResponse = {
|
||||
items: Array<FileListItem>,
|
||||
page: number,
|
||||
|
@ -187,6 +235,7 @@ declare type WalletListResponse = Array<{
|
|||
declare type WalletStatusResponse = {
|
||||
is_encrypted: boolean,
|
||||
is_locked: boolean,
|
||||
is_syncing: boolean,
|
||||
};
|
||||
|
||||
declare type SyncApplyResponse = {
|
||||
|
@ -208,11 +257,27 @@ declare type StreamRepostOptions = {
|
|||
name: string,
|
||||
bid: string,
|
||||
claim_id: string,
|
||||
channel_id: string,
|
||||
channel_id?: string,
|
||||
};
|
||||
|
||||
declare type StreamRepostResponse = GenericTxResponse;
|
||||
|
||||
declare type PurchaseListResponse = {
|
||||
items: Array<PurchaseReceipt & { claim: StreamClaim }>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
};
|
||||
|
||||
declare type PurchaseListOptions = {
|
||||
page: number,
|
||||
page_size: number,
|
||||
resolve: boolean,
|
||||
claim_id?: string,
|
||||
channel_id?: string,
|
||||
};
|
||||
|
||||
//
|
||||
// Types used in the generic Lbry object that is exported
|
||||
//
|
||||
|
@ -221,6 +286,8 @@ declare type LbryTypes = {
|
|||
connectPromise: ?Promise<any>,
|
||||
connect: () => void,
|
||||
daemonConnectionString: string,
|
||||
alternateConnectionString: string,
|
||||
methodsUsingAlternateConnectionString: Array<string>,
|
||||
apiRequestHeaders: { [key: string]: string },
|
||||
setDaemonConnectionString: string => void,
|
||||
setApiHeader: (string, string) => void,
|
||||
|
@ -243,6 +310,7 @@ declare type LbryTypes = {
|
|||
channel_update: (params: {}) => Promise<ChannelUpdateResponse>,
|
||||
channel_import: (params: {}) => Promise<string>,
|
||||
channel_list: (params: {}) => Promise<ChannelListResponse>,
|
||||
channel_sign: (params: {}) => Promise<ChannelSignResponse>,
|
||||
stream_abandon: (params: {}) => Promise<GenericTxResponse>,
|
||||
stream_list: (params: {}) => Promise<StreamListResponse>,
|
||||
channel_abandon: (params: {}) => Promise<GenericTxResponse>,
|
||||
|
@ -250,6 +318,11 @@ declare type LbryTypes = {
|
|||
support_list: (params: {}) => Promise<SupportListResponse>,
|
||||
support_abandon: (params: {}) => Promise<SupportAbandonResponse>,
|
||||
stream_repost: (params: StreamRepostOptions) => Promise<StreamRepostResponse>,
|
||||
purchase_list: (params: PurchaseListOptions) => Promise<PurchaseListResponse>,
|
||||
collection_resolve: (params: CollectionResolveOptions) => Promise<CollectionResolveResponse>,
|
||||
collection_list: (params: CollectionListOptions) => Promise<CollectionListResponse>,
|
||||
collection_create: (params: {}) => Promise<CollectionCreateResponse>,
|
||||
collection_update: (params: {}) => Promise<CollectionCreateResponse>,
|
||||
|
||||
// File fetching and manipulation
|
||||
file_list: (params: {}) => Promise<FileListResponse>,
|
||||
|
@ -262,8 +335,6 @@ declare type LbryTypes = {
|
|||
preference_set: (params: {}) => Promise<any>,
|
||||
|
||||
// Commenting
|
||||
comment_list: (params: {}) => Promise<CommentListResponse>,
|
||||
comment_create: (params: {}) => Promise<CommentCreateResponse>,
|
||||
comment_update: (params: {}) => Promise<CommentUpdateResponse>,
|
||||
comment_hide: (params: {}) => Promise<CommentHideResponse>,
|
||||
comment_abandon: (params: {}) => Promise<CommentAbandonResponse>,
|
||||
|
@ -280,6 +351,7 @@ declare type LbryTypes = {
|
|||
address_unused: (params: {}) => Promise<string>, // New address
|
||||
address_list: (params: {}) => Promise<string>,
|
||||
transaction_list: (params: {}) => Promise<TxListResponse>,
|
||||
txo_list: (params: {}) => Promise<any>,
|
||||
|
||||
// Sync
|
||||
sync_hash: (params: {}) => Promise<string>,
|
||||
|
|
99
dist/flow-typed/LbryFirst.js
vendored
Normal file
99
dist/flow-typed/LbryFirst.js
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
// @flow
|
||||
declare type LbryFirstStatusResponse = {
|
||||
Version: string,
|
||||
Message: string,
|
||||
Running: boolean,
|
||||
Commit: string,
|
||||
};
|
||||
|
||||
declare type LbryFirstVersionResponse = {
|
||||
build: string,
|
||||
lbrynet_version: string,
|
||||
os_release: string,
|
||||
os_system: string,
|
||||
platform: string,
|
||||
processor: string,
|
||||
python_version: string,
|
||||
};
|
||||
/* SAMPLE UPLOAD RESPONSE (FULL)
|
||||
"Video": {
|
||||
"etag": "\"Dn5xIderbhAnUk5TAW0qkFFir0M/xlGLrlTox7VFTRcR8F77RbKtaU4\"",
|
||||
"id": "8InjtdvVmwE",
|
||||
"kind": "youtube#video",
|
||||
"snippet": {
|
||||
"categoryId": "22",
|
||||
"channelId": "UCXiVsGTU88fJjheB2rqF0rA",
|
||||
"channelTitle": "Mark Beamer",
|
||||
"liveBroadcastContent": "none",
|
||||
"localized": {
|
||||
"title": "my title"
|
||||
},
|
||||
"publishedAt": "2020-05-05T04:17:53.000Z",
|
||||
"thumbnails": {
|
||||
"default": {
|
||||
"height": 90,
|
||||
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/default.jpg?sqp=CMTQw_UF&rs=AOn4CLB6dlhZMSMrazDlWRsitPgCsn8fVw",
|
||||
"width": 120
|
||||
},
|
||||
"high": {
|
||||
"height": 360,
|
||||
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/hqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLB-Je_7l6qvASRAR_bSGWZHaXaJWQ",
|
||||
"width": 480
|
||||
},
|
||||
"medium": {
|
||||
"height": 180,
|
||||
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/mqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLCvSnDLqVznRNMKuvJ_0misY_chPQ",
|
||||
"width": 320
|
||||
}
|
||||
},
|
||||
"title": "my title"
|
||||
},
|
||||
"status": {
|
||||
"embeddable": true,
|
||||
"license": "youtube",
|
||||
"privacyStatus": "private",
|
||||
"publicStatsViewable": true,
|
||||
"uploadStatus": "uploaded"
|
||||
}
|
||||
}
|
||||
*/
|
||||
declare type UploadResponse = {
|
||||
Video: {
|
||||
id: string,
|
||||
snippet: {
|
||||
channelId: string,
|
||||
},
|
||||
status: {
|
||||
uploadStatus: string,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
declare type HasYTAuthResponse = {
|
||||
HashAuth: boolean,
|
||||
};
|
||||
|
||||
declare type YTSignupResponse = {};
|
||||
|
||||
//
|
||||
// Types used in the generic LbryFirst object that is exported
|
||||
//
|
||||
declare type LbryFirstTypes = {
|
||||
isConnected: boolean,
|
||||
connectPromise: ?Promise<any>,
|
||||
connect: () => void,
|
||||
lbryFirstConnectionString: string,
|
||||
apiRequestHeaders: { [key: string]: string },
|
||||
setApiHeader: (string, string) => void,
|
||||
unsetApiHeader: string => void,
|
||||
overrides: { [string]: ?Function },
|
||||
setOverride: (string, Function) => void,
|
||||
|
||||
// LbryFirst Methods
|
||||
stop: () => Promise<string>,
|
||||
status: () => Promise<StatusResponse>,
|
||||
version: () => Promise<VersionResponse>,
|
||||
upload: any => Promise<?UploadResponse>,
|
||||
hasYTAuth: string => Promise<HasYTAuthResponse>,
|
||||
ytSignup: () => Promise<YTSignupResponse>,
|
||||
};
|
5
dist/flow-typed/Reflector.js
vendored
Normal file
5
dist/flow-typed/Reflector.js
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
declare type ReflectingUpdate = {
|
||||
fileListItem: FileListItem,
|
||||
progress: number | boolean,
|
||||
stalled: boolean,
|
||||
};
|
84
dist/flow-typed/Search.js
vendored
84
dist/flow-typed/Search.js
vendored
|
@ -1,84 +0,0 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
|
||||
declare type SearchSuggestion = {
|
||||
value: string,
|
||||
shorthand: string,
|
||||
type: string,
|
||||
};
|
||||
|
||||
declare type SearchOptions = {
|
||||
// :(
|
||||
// https://github.com/facebook/flow/issues/6492
|
||||
RESULT_COUNT: number,
|
||||
CLAIM_TYPE: string,
|
||||
INCLUDE_FILES: string,
|
||||
INCLUDE_CHANNELS: string,
|
||||
INCLUDE_FILES_AND_CHANNELS: string,
|
||||
MEDIA_AUDIO: string,
|
||||
MEDIA_VIDEO: string,
|
||||
MEDIA_TEXT: string,
|
||||
MEDIA_IMAGE: string,
|
||||
MEDIA_APPLICATION: string,
|
||||
};
|
||||
|
||||
declare type SearchState = {
|
||||
isActive: boolean,
|
||||
searchQuery: string,
|
||||
options: SearchOptions,
|
||||
suggestions: { [string]: Array<SearchSuggestion> },
|
||||
urisByQuery: {},
|
||||
resolvedResultsByQuery: {},
|
||||
resolvedResultsByQueryLastPageReached: {},
|
||||
};
|
||||
|
||||
declare type SearchSuccess = {
|
||||
type: ACTIONS.SEARCH_SUCCESS,
|
||||
data: {
|
||||
query: string,
|
||||
uris: Array<string>,
|
||||
},
|
||||
};
|
||||
|
||||
declare type UpdateSearchQuery = {
|
||||
type: ACTIONS.UPDATE_SEARCH_QUERY,
|
||||
data: {
|
||||
query: string,
|
||||
},
|
||||
};
|
||||
|
||||
declare type UpdateSearchSuggestions = {
|
||||
type: ACTIONS.UPDATE_SEARCH_SUGGESTIONS,
|
||||
data: {
|
||||
query: string,
|
||||
suggestions: Array<SearchSuggestion>,
|
||||
},
|
||||
};
|
||||
|
||||
declare type UpdateSearchOptions = {
|
||||
type: ACTIONS.UPDATE_SEARCH_OPTIONS,
|
||||
data: SearchOptions,
|
||||
};
|
||||
|
||||
declare type ResolvedSearchResult = {
|
||||
channel: string,
|
||||
channel_claim_id: string,
|
||||
claimId: string,
|
||||
duration: number,
|
||||
fee: number,
|
||||
name: string,
|
||||
nsfw: boolean,
|
||||
release_time: string,
|
||||
thumbnail_url: string,
|
||||
title: string,
|
||||
};
|
||||
|
||||
declare type ResolvedSearchSuccess = {
|
||||
type: ACTIONS.RESOLVED_SEARCH_SUCCESS,
|
||||
data: {
|
||||
append: boolean,
|
||||
pageSize: number,
|
||||
results: Array<ResolvedSearchResult>,
|
||||
query: string,
|
||||
},
|
||||
};
|
27
dist/flow-typed/Txo.js
vendored
Normal file
27
dist/flow-typed/Txo.js
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
declare type Txo = {
|
||||
amount: number,
|
||||
claim_id: string,
|
||||
normalized_name: string,
|
||||
nout: number,
|
||||
txid: string,
|
||||
type: string,
|
||||
value_type: string,
|
||||
timestamp: number,
|
||||
is_my_output: boolean,
|
||||
is_my_input: boolean,
|
||||
is_spent: boolean,
|
||||
signing_channel?: {
|
||||
channel_id: string,
|
||||
},
|
||||
};
|
||||
|
||||
declare type TxoListParams = {
|
||||
page: number,
|
||||
page_size: number,
|
||||
type: string,
|
||||
is_my_input?: boolean,
|
||||
is_my_output?: boolean,
|
||||
is_not_my_input?: boolean,
|
||||
is_not_my_output?: boolean,
|
||||
is_spent?: boolean,
|
||||
};
|
1
dist/flow-typed/lbryURI.js
vendored
1
dist/flow-typed/lbryURI.js
vendored
|
@ -12,6 +12,7 @@ declare type LbryUrlObj = {
|
|||
secondaryClaimSequence?: number,
|
||||
primaryBidPosition?: number,
|
||||
secondaryBidPosition?: number,
|
||||
startTime?: number,
|
||||
|
||||
// Below are considered deprecated and should not be used due to unreliableness with claim.canonical_url
|
||||
claimName?: string,
|
||||
|
|
5
dist/flow-typed/npm/from-entries.js
vendored
Normal file
5
dist/flow-typed/npm/from-entries.js
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
// @flow
|
||||
|
||||
declare module '@ungap/from-entries' {
|
||||
declare module.exports: any;
|
||||
}
|
5
dist/flow-typed/npm/uuid.js
vendored
Normal file
5
dist/flow-typed/npm/uuid.js
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
// @flow
|
||||
|
||||
declare module 'uuid' {
|
||||
declare module.exports: any;
|
||||
}
|
102
dist/flow-typed/npm/uuid_v3.x.x.js
vendored
102
dist/flow-typed/npm/uuid_v3.x.x.js
vendored
|
@ -1,102 +0,0 @@
|
|||
// flow-typed signature: 3cf668e64747095cab0bb360cf2fb34f
|
||||
// flow-typed version: d659bd0cb8/uuid_v3.x.x/flow_>=v0.32.x
|
||||
|
||||
declare module "uuid" {
|
||||
declare class uuid {
|
||||
static (
|
||||
options?: {|
|
||||
random?: number[],
|
||||
rng?: () => number[] | Buffer
|
||||
|},
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string,
|
||||
|
||||
static v1(
|
||||
options?: {|
|
||||
node?: number[],
|
||||
clockseq?: number,
|
||||
msecs?: number | Date,
|
||||
nsecs?: number
|
||||
|},
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string,
|
||||
|
||||
static v4(
|
||||
options?: {|
|
||||
random?: number[],
|
||||
rng?: () => number[] | Buffer
|
||||
|},
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string
|
||||
}
|
||||
declare module.exports: Class<uuid>;
|
||||
}
|
||||
|
||||
declare module "uuid/v1" {
|
||||
declare class v1 {
|
||||
static (
|
||||
options?: {|
|
||||
node?: number[],
|
||||
clockseq?: number,
|
||||
msecs?: number | Date,
|
||||
nsecs?: number
|
||||
|},
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string
|
||||
}
|
||||
|
||||
declare module.exports: Class<v1>;
|
||||
}
|
||||
|
||||
declare module "uuid/v3" {
|
||||
declare class v3 {
|
||||
static (
|
||||
name?: string | number[],
|
||||
namespace?: string | number[],
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string,
|
||||
|
||||
static name: string,
|
||||
static DNS: string,
|
||||
static URL: string
|
||||
}
|
||||
|
||||
declare module.exports: Class<v3>;
|
||||
}
|
||||
|
||||
declare module "uuid/v4" {
|
||||
declare class v4 {
|
||||
static (
|
||||
options?: {|
|
||||
random?: number[],
|
||||
rng?: () => number[] | Buffer
|
||||
|},
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string
|
||||
}
|
||||
|
||||
declare module.exports: Class<v4>;
|
||||
}
|
||||
|
||||
declare module "uuid/v5" {
|
||||
declare class v5 {
|
||||
static (
|
||||
name?: string | number[],
|
||||
namespace?: string | number[],
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string,
|
||||
|
||||
static name: string,
|
||||
static DNS: string,
|
||||
static URL: string
|
||||
}
|
||||
|
||||
declare module.exports: Class<v5>;
|
||||
}
|
102
flow-typed/Claim.js
vendored
102
flow-typed/Claim.js
vendored
|
@ -1,14 +1,16 @@
|
|||
// @flow
|
||||
|
||||
declare type Claim = StreamClaim | ChannelClaim;
|
||||
declare type Claim = StreamClaim | ChannelClaim | CollectionClaim;
|
||||
|
||||
declare type ChannelClaim = GenericClaim & {
|
||||
is_channel_signature_valid?: boolean, // we may have signed channels in the future
|
||||
value: ChannelMetadata,
|
||||
};
|
||||
|
||||
declare type CollectionClaim = GenericClaim & {
|
||||
value: CollectionMetadata,
|
||||
};
|
||||
|
||||
declare type StreamClaim = GenericClaim & {
|
||||
is_channel_signature_valid?: boolean,
|
||||
value: StreamMetadata,
|
||||
};
|
||||
|
||||
|
@ -23,7 +25,8 @@ declare type GenericClaim = {
|
|||
decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044
|
||||
timestamp?: number, // date of last transaction
|
||||
height: number, // block height the tx was confirmed
|
||||
is_mine: boolean,
|
||||
is_channel_signature_valid?: boolean,
|
||||
is_my_output: boolean,
|
||||
name: string,
|
||||
normalized_name: string, // `name` normalized via unicode NFD spec,
|
||||
nout: number, // index number for an output of a tx
|
||||
|
@ -31,9 +34,13 @@ declare type GenericClaim = {
|
|||
short_url: string, // permanent_url with short id, no channel
|
||||
txid: string, // unique tx id
|
||||
type: 'claim' | 'update' | 'support',
|
||||
value_type: 'stream' | 'channel',
|
||||
value_type: 'stream' | 'channel' | 'collection',
|
||||
signing_channel?: ChannelClaim,
|
||||
reposted_claim?: GenericClaim,
|
||||
repost_channel_url?: string,
|
||||
repost_url?: string,
|
||||
repost_bid_amount?: string,
|
||||
purchase_receipt?: PurchaseReceipt,
|
||||
meta: {
|
||||
activation_height: number,
|
||||
claims_in_channel?: number,
|
||||
|
@ -43,6 +50,7 @@ declare type GenericClaim = {
|
|||
expiration_height: number,
|
||||
is_controlling: boolean,
|
||||
support_amount: string,
|
||||
reposted: number,
|
||||
trending_global: number,
|
||||
trending_group: number,
|
||||
trending_local: number,
|
||||
|
@ -70,6 +78,10 @@ declare type ChannelMetadata = GenericMetadata & {
|
|||
featured?: Array<string>,
|
||||
};
|
||||
|
||||
declare type CollectionMetadata = GenericMetadata & {
|
||||
claims: Array<string>,
|
||||
}
|
||||
|
||||
declare type StreamMetadata = GenericMetadata & {
|
||||
license?: string, // License "title" ex: Creative Commons, Custom copyright
|
||||
license_url?: string, // Link to full license
|
||||
|
@ -120,3 +132,83 @@ declare type Fee = {
|
|||
currency: string,
|
||||
address: string,
|
||||
};
|
||||
|
||||
declare type PurchaseReceipt = {
|
||||
address: string,
|
||||
amount: string,
|
||||
claim_id: string,
|
||||
confirmations: number,
|
||||
height: number,
|
||||
nout: number,
|
||||
timestamp: number,
|
||||
txid: string,
|
||||
type: 'purchase',
|
||||
};
|
||||
|
||||
declare type ClaimActionResolveInfo = {
|
||||
[string]: {
|
||||
stream: ?StreamClaim,
|
||||
channel: ?ChannelClaim,
|
||||
claimsInChannel: ?number,
|
||||
collection: ?CollectionClaim,
|
||||
},
|
||||
}
|
||||
|
||||
declare type ChannelUpdateParams = {
|
||||
claim_id: string,
|
||||
bid?: string,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
replace?: boolean,
|
||||
languages?: Array<string>,
|
||||
locations?: Array<string>,
|
||||
blocking?: boolean,
|
||||
}
|
||||
|
||||
declare type ChannelPublishParams = {
|
||||
name: string,
|
||||
bid: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
}
|
||||
|
||||
declare type CollectionUpdateParams = {
|
||||
claim_id: string,
|
||||
claim_ids?: Array<string>,
|
||||
bid?: string,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
replace?: boolean,
|
||||
languages?: Array<string>,
|
||||
locations?: Array<string>,
|
||||
blocking?: boolean,
|
||||
}
|
||||
|
||||
declare type CollectionPublishParams = {
|
||||
name: string,
|
||||
bid: string,
|
||||
claim_ids: Array<string>,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
}
|
||||
|
|
29
flow-typed/CoinSwap.js
vendored
Normal file
29
flow-typed/CoinSwap.js
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
declare type CoinSwapInfo = {
|
||||
chargeCode: string,
|
||||
coins: Array<string>,
|
||||
sendAddresses: { [string]: string},
|
||||
sendAmounts: { [string]: any },
|
||||
lbcAmount: number,
|
||||
status?: {
|
||||
status: string,
|
||||
receiptCurrency: string,
|
||||
receiptTxid: string,
|
||||
lbcTxid: string,
|
||||
},
|
||||
}
|
||||
|
||||
declare type CoinSwapState = {
|
||||
coinSwaps: Array<CoinSwapInfo>,
|
||||
};
|
||||
|
||||
declare type CoinSwapAddAction = {
|
||||
type: string,
|
||||
data: CoinSwapInfo,
|
||||
};
|
||||
|
||||
declare type CoinSwapRemoveAction = {
|
||||
type: string,
|
||||
data: {
|
||||
chargeCode: string,
|
||||
},
|
||||
};
|
34
flow-typed/Collections.js
vendored
Normal file
34
flow-typed/Collections.js
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
declare type Collection = {
|
||||
id: string,
|
||||
items: Array<?string>,
|
||||
name: string,
|
||||
type: string,
|
||||
updatedAt: number,
|
||||
totalItems?: number,
|
||||
sourceId?: string, // if copied, claimId of original collection
|
||||
};
|
||||
|
||||
declare type CollectionState = {
|
||||
unpublished: CollectionGroup,
|
||||
resolved: CollectionGroup,
|
||||
pending: CollectionGroup,
|
||||
edited: CollectionGroup,
|
||||
builtin: CollectionGroup,
|
||||
saved: Array<string>,
|
||||
isResolvingCollectionById: { [string]: boolean },
|
||||
error?: string | null,
|
||||
};
|
||||
|
||||
declare type CollectionGroup = {
|
||||
[string]: Collection,
|
||||
}
|
||||
|
||||
declare type CollectionEditParams = {
|
||||
claims?: Array<Claim>,
|
||||
remove?: boolean,
|
||||
claimIds?: Array<string>,
|
||||
replace?: boolean,
|
||||
order?: { from: number, to: number },
|
||||
type?: string,
|
||||
name?: string,
|
||||
}
|
23
flow-typed/Comment.js
vendored
23
flow-typed/Comment.js
vendored
|
@ -1,23 +0,0 @@
|
|||
declare type Comment = {
|
||||
comment: string, // comment body
|
||||
comment_id: string, // sha256 digest
|
||||
claim_id: string, // id linking to the claim this comment
|
||||
timestamp: number, // integer representing unix-time
|
||||
is_hidden: boolean, // claim owner may enable/disable this
|
||||
channel_id?: string, // claimId of channel signing this comment
|
||||
channel_name?: string, // name of channel claim
|
||||
channel_url?: string, // full lbry url to signing channel
|
||||
signature?: string, // signature of comment by originating channel
|
||||
signing_ts?: string, // timestamp used when signing this comment
|
||||
is_channel_signature_valid?: boolean, // whether or not the signature could be validated
|
||||
parent_id?: number, // comment_id of comment this is in reply to
|
||||
};
|
||||
|
||||
// todo: relate individual comments to their commentId
|
||||
declare type CommentsState = {
|
||||
commentsByUri: { [string]: string },
|
||||
byId: { [string]: Array<string> },
|
||||
commentById: { [string]: Comment },
|
||||
isLoading: boolean,
|
||||
myComments: ?Set<string>,
|
||||
};
|
7
flow-typed/File.js
vendored
7
flow-typed/File.js
vendored
|
@ -11,6 +11,8 @@ declare type FileListItem = {
|
|||
claim_id: string,
|
||||
claim_name: string,
|
||||
completed: false,
|
||||
content_fee?: { txid: string },
|
||||
purchase_receipt?: { txid: string, amount: string },
|
||||
download_directory: string,
|
||||
download_path: string,
|
||||
file_name: string,
|
||||
|
@ -20,6 +22,7 @@ declare type FileListItem = {
|
|||
outpoint: string,
|
||||
points_paid: number,
|
||||
protobuf: string,
|
||||
reflector_progress: number,
|
||||
sd_hash: string,
|
||||
status: string,
|
||||
stopped: false,
|
||||
|
@ -29,10 +32,12 @@ declare type FileListItem = {
|
|||
suggested_file_name: string,
|
||||
total_bytes: number,
|
||||
total_bytes_lower_bound: number,
|
||||
is_fully_reflected: boolean,
|
||||
// TODO: sdk plans to change `tx`
|
||||
// It isn't currently used by the apps
|
||||
tx: {},
|
||||
txid: string,
|
||||
uploading_to_reflector: boolean,
|
||||
written_bytes: number,
|
||||
};
|
||||
|
||||
|
@ -66,7 +71,7 @@ declare type PurchaseUriStarted = {
|
|||
};
|
||||
|
||||
declare type DeletePurchasedUri = {
|
||||
type: ACTIONS.DELETE_PURCHASED_URI,
|
||||
type: ACTIONS.CLEAR_PURCHASED_URI_SUCCESS,
|
||||
data: {
|
||||
uri: string,
|
||||
},
|
||||
|
|
100
flow-typed/Lbry.js
vendored
100
flow-typed/Lbry.js
vendored
|
@ -7,10 +7,6 @@ declare type StatusResponse = {
|
|||
download_progress: number,
|
||||
downloading_headers: boolean,
|
||||
},
|
||||
connection_status: {
|
||||
code: string,
|
||||
message: string,
|
||||
},
|
||||
dht: {
|
||||
node_id: string,
|
||||
peers_in_routing_table: number,
|
||||
|
@ -45,6 +41,7 @@ declare type StatusResponse = {
|
|||
redirects: {},
|
||||
},
|
||||
wallet: ?{
|
||||
connected: string,
|
||||
best_blockhash: string,
|
||||
blocks: number,
|
||||
blocks_behind: number,
|
||||
|
@ -78,7 +75,7 @@ declare type BalanceResponse = {
|
|||
|
||||
declare type ResolveResponse = {
|
||||
// Keys are the url(s) passed to resolve
|
||||
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, claimsInChannel?: number },
|
||||
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, collection?: CollectionClaim, claimsInChannel?: number },
|
||||
};
|
||||
|
||||
declare type GetResponse = FileListItem & { error?: string };
|
||||
|
@ -127,12 +124,22 @@ declare type ChannelUpdateResponse = GenericTxResponse & {
|
|||
declare type CommentCreateResponse = Comment;
|
||||
declare type CommentUpdateResponse = Comment;
|
||||
|
||||
declare type CommentListResponse = {
|
||||
items: Array<Comment>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
declare type MyReactions = {
|
||||
// Keys are the commentId
|
||||
[string]: Array<string>,
|
||||
};
|
||||
|
||||
declare type OthersReactions = {
|
||||
// Keys are the commentId
|
||||
[string]: {
|
||||
// Keys are the reaction_type, e.g. 'like'
|
||||
[string]: number,
|
||||
},
|
||||
};
|
||||
|
||||
declare type CommentReactListResponse = {
|
||||
my_reactions: Array<MyReactions>,
|
||||
others_reactions: Array<OthersReactions>,
|
||||
};
|
||||
|
||||
declare type CommentHideResponse = {
|
||||
|
@ -140,6 +147,11 @@ declare type CommentHideResponse = {
|
|||
[string]: { hidden: boolean },
|
||||
};
|
||||
|
||||
declare type CommentPinResponse = {
|
||||
// keyed by the CommentIds entered
|
||||
items: Comment,
|
||||
};
|
||||
|
||||
declare type CommentAbandonResponse = {
|
||||
// keyed by the CommentId given
|
||||
abandoned: boolean,
|
||||
|
@ -153,6 +165,42 @@ declare type ChannelListResponse = {
|
|||
total_pages: number,
|
||||
};
|
||||
|
||||
declare type ChannelSignResponse = {
|
||||
signature: string,
|
||||
signing_ts: string,
|
||||
};
|
||||
|
||||
declare type CollectionCreateResponse = {
|
||||
outputs: Array<Claim>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
}
|
||||
|
||||
declare type CollectionListResponse = {
|
||||
items: Array<Claim>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
};
|
||||
|
||||
declare type CollectionResolveResponse = {
|
||||
items: Array<Claim>,
|
||||
total_items: number,
|
||||
};
|
||||
|
||||
declare type CollectionResolveOptions = {
|
||||
claim_id: string,
|
||||
};
|
||||
|
||||
declare type CollectionListOptions = {
|
||||
page: number,
|
||||
page_size: number,
|
||||
resolve?: boolean,
|
||||
};
|
||||
|
||||
declare type FileListResponse = {
|
||||
items: Array<FileListItem>,
|
||||
page: number,
|
||||
|
@ -187,6 +235,7 @@ declare type WalletListResponse = Array<{
|
|||
declare type WalletStatusResponse = {
|
||||
is_encrypted: boolean,
|
||||
is_locked: boolean,
|
||||
is_syncing: boolean,
|
||||
};
|
||||
|
||||
declare type SyncApplyResponse = {
|
||||
|
@ -208,11 +257,27 @@ declare type StreamRepostOptions = {
|
|||
name: string,
|
||||
bid: string,
|
||||
claim_id: string,
|
||||
channel_id: string,
|
||||
channel_id?: string,
|
||||
};
|
||||
|
||||
declare type StreamRepostResponse = GenericTxResponse;
|
||||
|
||||
declare type PurchaseListResponse = {
|
||||
items: Array<PurchaseReceipt & { claim: StreamClaim }>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
};
|
||||
|
||||
declare type PurchaseListOptions = {
|
||||
page: number,
|
||||
page_size: number,
|
||||
resolve: boolean,
|
||||
claim_id?: string,
|
||||
channel_id?: string,
|
||||
};
|
||||
|
||||
//
|
||||
// Types used in the generic Lbry object that is exported
|
||||
//
|
||||
|
@ -221,6 +286,8 @@ declare type LbryTypes = {
|
|||
connectPromise: ?Promise<any>,
|
||||
connect: () => void,
|
||||
daemonConnectionString: string,
|
||||
alternateConnectionString: string,
|
||||
methodsUsingAlternateConnectionString: Array<string>,
|
||||
apiRequestHeaders: { [key: string]: string },
|
||||
setDaemonConnectionString: string => void,
|
||||
setApiHeader: (string, string) => void,
|
||||
|
@ -243,6 +310,7 @@ declare type LbryTypes = {
|
|||
channel_update: (params: {}) => Promise<ChannelUpdateResponse>,
|
||||
channel_import: (params: {}) => Promise<string>,
|
||||
channel_list: (params: {}) => Promise<ChannelListResponse>,
|
||||
channel_sign: (params: {}) => Promise<ChannelSignResponse>,
|
||||
stream_abandon: (params: {}) => Promise<GenericTxResponse>,
|
||||
stream_list: (params: {}) => Promise<StreamListResponse>,
|
||||
channel_abandon: (params: {}) => Promise<GenericTxResponse>,
|
||||
|
@ -250,6 +318,11 @@ declare type LbryTypes = {
|
|||
support_list: (params: {}) => Promise<SupportListResponse>,
|
||||
support_abandon: (params: {}) => Promise<SupportAbandonResponse>,
|
||||
stream_repost: (params: StreamRepostOptions) => Promise<StreamRepostResponse>,
|
||||
purchase_list: (params: PurchaseListOptions) => Promise<PurchaseListResponse>,
|
||||
collection_resolve: (params: CollectionResolveOptions) => Promise<CollectionResolveResponse>,
|
||||
collection_list: (params: CollectionListOptions) => Promise<CollectionListResponse>,
|
||||
collection_create: (params: {}) => Promise<CollectionCreateResponse>,
|
||||
collection_update: (params: {}) => Promise<CollectionCreateResponse>,
|
||||
|
||||
// File fetching and manipulation
|
||||
file_list: (params: {}) => Promise<FileListResponse>,
|
||||
|
@ -262,8 +335,6 @@ declare type LbryTypes = {
|
|||
preference_set: (params: {}) => Promise<any>,
|
||||
|
||||
// Commenting
|
||||
comment_list: (params: {}) => Promise<CommentListResponse>,
|
||||
comment_create: (params: {}) => Promise<CommentCreateResponse>,
|
||||
comment_update: (params: {}) => Promise<CommentUpdateResponse>,
|
||||
comment_hide: (params: {}) => Promise<CommentHideResponse>,
|
||||
comment_abandon: (params: {}) => Promise<CommentAbandonResponse>,
|
||||
|
@ -280,6 +351,7 @@ declare type LbryTypes = {
|
|||
address_unused: (params: {}) => Promise<string>, // New address
|
||||
address_list: (params: {}) => Promise<string>,
|
||||
transaction_list: (params: {}) => Promise<TxListResponse>,
|
||||
txo_list: (params: {}) => Promise<any>,
|
||||
|
||||
// Sync
|
||||
sync_hash: (params: {}) => Promise<string>,
|
||||
|
|
99
flow-typed/LbryFirst.js
vendored
Normal file
99
flow-typed/LbryFirst.js
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
// @flow
|
||||
declare type LbryFirstStatusResponse = {
|
||||
Version: string,
|
||||
Message: string,
|
||||
Running: boolean,
|
||||
Commit: string,
|
||||
};
|
||||
|
||||
declare type LbryFirstVersionResponse = {
|
||||
build: string,
|
||||
lbrynet_version: string,
|
||||
os_release: string,
|
||||
os_system: string,
|
||||
platform: string,
|
||||
processor: string,
|
||||
python_version: string,
|
||||
};
|
||||
/* SAMPLE UPLOAD RESPONSE (FULL)
|
||||
"Video": {
|
||||
"etag": "\"Dn5xIderbhAnUk5TAW0qkFFir0M/xlGLrlTox7VFTRcR8F77RbKtaU4\"",
|
||||
"id": "8InjtdvVmwE",
|
||||
"kind": "youtube#video",
|
||||
"snippet": {
|
||||
"categoryId": "22",
|
||||
"channelId": "UCXiVsGTU88fJjheB2rqF0rA",
|
||||
"channelTitle": "Mark Beamer",
|
||||
"liveBroadcastContent": "none",
|
||||
"localized": {
|
||||
"title": "my title"
|
||||
},
|
||||
"publishedAt": "2020-05-05T04:17:53.000Z",
|
||||
"thumbnails": {
|
||||
"default": {
|
||||
"height": 90,
|
||||
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/default.jpg?sqp=CMTQw_UF&rs=AOn4CLB6dlhZMSMrazDlWRsitPgCsn8fVw",
|
||||
"width": 120
|
||||
},
|
||||
"high": {
|
||||
"height": 360,
|
||||
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/hqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLB-Je_7l6qvASRAR_bSGWZHaXaJWQ",
|
||||
"width": 480
|
||||
},
|
||||
"medium": {
|
||||
"height": 180,
|
||||
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/mqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLCvSnDLqVznRNMKuvJ_0misY_chPQ",
|
||||
"width": 320
|
||||
}
|
||||
},
|
||||
"title": "my title"
|
||||
},
|
||||
"status": {
|
||||
"embeddable": true,
|
||||
"license": "youtube",
|
||||
"privacyStatus": "private",
|
||||
"publicStatsViewable": true,
|
||||
"uploadStatus": "uploaded"
|
||||
}
|
||||
}
|
||||
*/
|
||||
declare type UploadResponse = {
|
||||
Video: {
|
||||
id: string,
|
||||
snippet: {
|
||||
channelId: string,
|
||||
},
|
||||
status: {
|
||||
uploadStatus: string,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
declare type HasYTAuthResponse = {
|
||||
HashAuth: boolean,
|
||||
};
|
||||
|
||||
declare type YTSignupResponse = {};
|
||||
|
||||
//
|
||||
// Types used in the generic LbryFirst object that is exported
|
||||
//
|
||||
declare type LbryFirstTypes = {
|
||||
isConnected: boolean,
|
||||
connectPromise: ?Promise<any>,
|
||||
connect: () => void,
|
||||
lbryFirstConnectionString: string,
|
||||
apiRequestHeaders: { [key: string]: string },
|
||||
setApiHeader: (string, string) => void,
|
||||
unsetApiHeader: string => void,
|
||||
overrides: { [string]: ?Function },
|
||||
setOverride: (string, Function) => void,
|
||||
|
||||
// LbryFirst Methods
|
||||
stop: () => Promise<string>,
|
||||
status: () => Promise<StatusResponse>,
|
||||
version: () => Promise<VersionResponse>,
|
||||
upload: any => Promise<?UploadResponse>,
|
||||
hasYTAuth: string => Promise<HasYTAuthResponse>,
|
||||
ytSignup: () => Promise<YTSignupResponse>,
|
||||
};
|
5
flow-typed/Reflector.js
vendored
Normal file
5
flow-typed/Reflector.js
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
declare type ReflectingUpdate = {
|
||||
fileListItem: FileListItem,
|
||||
progress: number | boolean,
|
||||
stalled: boolean,
|
||||
};
|
84
flow-typed/Search.js
vendored
84
flow-typed/Search.js
vendored
|
@ -1,84 +0,0 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
|
||||
declare type SearchSuggestion = {
|
||||
value: string,
|
||||
shorthand: string,
|
||||
type: string,
|
||||
};
|
||||
|
||||
declare type SearchOptions = {
|
||||
// :(
|
||||
// https://github.com/facebook/flow/issues/6492
|
||||
RESULT_COUNT: number,
|
||||
CLAIM_TYPE: string,
|
||||
INCLUDE_FILES: string,
|
||||
INCLUDE_CHANNELS: string,
|
||||
INCLUDE_FILES_AND_CHANNELS: string,
|
||||
MEDIA_AUDIO: string,
|
||||
MEDIA_VIDEO: string,
|
||||
MEDIA_TEXT: string,
|
||||
MEDIA_IMAGE: string,
|
||||
MEDIA_APPLICATION: string,
|
||||
};
|
||||
|
||||
declare type SearchState = {
|
||||
isActive: boolean,
|
||||
searchQuery: string,
|
||||
options: SearchOptions,
|
||||
suggestions: { [string]: Array<SearchSuggestion> },
|
||||
urisByQuery: {},
|
||||
resolvedResultsByQuery: {},
|
||||
resolvedResultsByQueryLastPageReached: {},
|
||||
};
|
||||
|
||||
declare type SearchSuccess = {
|
||||
type: ACTIONS.SEARCH_SUCCESS,
|
||||
data: {
|
||||
query: string,
|
||||
uris: Array<string>,
|
||||
},
|
||||
};
|
||||
|
||||
declare type UpdateSearchQuery = {
|
||||
type: ACTIONS.UPDATE_SEARCH_QUERY,
|
||||
data: {
|
||||
query: string,
|
||||
},
|
||||
};
|
||||
|
||||
declare type UpdateSearchSuggestions = {
|
||||
type: ACTIONS.UPDATE_SEARCH_SUGGESTIONS,
|
||||
data: {
|
||||
query: string,
|
||||
suggestions: Array<SearchSuggestion>,
|
||||
},
|
||||
};
|
||||
|
||||
declare type UpdateSearchOptions = {
|
||||
type: ACTIONS.UPDATE_SEARCH_OPTIONS,
|
||||
data: SearchOptions,
|
||||
};
|
||||
|
||||
declare type ResolvedSearchResult = {
|
||||
channel: string,
|
||||
channel_claim_id: string,
|
||||
claimId: string,
|
||||
duration: number,
|
||||
fee: number,
|
||||
name: string,
|
||||
nsfw: boolean,
|
||||
release_time: string,
|
||||
thumbnail_url: string,
|
||||
title: string,
|
||||
};
|
||||
|
||||
declare type ResolvedSearchSuccess = {
|
||||
type: ACTIONS.RESOLVED_SEARCH_SUCCESS,
|
||||
data: {
|
||||
append: boolean,
|
||||
pageSize: number,
|
||||
results: Array<ResolvedSearchResult>,
|
||||
query: string,
|
||||
},
|
||||
};
|
27
flow-typed/Txo.js
vendored
Normal file
27
flow-typed/Txo.js
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
declare type Txo = {
|
||||
amount: number,
|
||||
claim_id: string,
|
||||
normalized_name: string,
|
||||
nout: number,
|
||||
txid: string,
|
||||
type: string,
|
||||
value_type: string,
|
||||
timestamp: number,
|
||||
is_my_output: boolean,
|
||||
is_my_input: boolean,
|
||||
is_spent: boolean,
|
||||
signing_channel?: {
|
||||
channel_id: string,
|
||||
},
|
||||
};
|
||||
|
||||
declare type TxoListParams = {
|
||||
page: number,
|
||||
page_size: number,
|
||||
type: string,
|
||||
is_my_input?: boolean,
|
||||
is_my_output?: boolean,
|
||||
is_not_my_input?: boolean,
|
||||
is_not_my_output?: boolean,
|
||||
is_spent?: boolean,
|
||||
};
|
1
flow-typed/lbryURI.js
vendored
1
flow-typed/lbryURI.js
vendored
|
@ -12,6 +12,7 @@ declare type LbryUrlObj = {
|
|||
secondaryClaimSequence?: number,
|
||||
primaryBidPosition?: number,
|
||||
secondaryBidPosition?: number,
|
||||
startTime?: number,
|
||||
|
||||
// Below are considered deprecated and should not be used due to unreliableness with claim.canonical_url
|
||||
claimName?: string,
|
||||
|
|
5
flow-typed/npm/from-entries.js
vendored
Normal file
5
flow-typed/npm/from-entries.js
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
// @flow
|
||||
|
||||
declare module '@ungap/from-entries' {
|
||||
declare module.exports: any;
|
||||
}
|
5
flow-typed/npm/uuid.js
vendored
Normal file
5
flow-typed/npm/uuid.js
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
// @flow
|
||||
|
||||
declare module 'uuid' {
|
||||
declare module.exports: any;
|
||||
}
|
102
flow-typed/npm/uuid_v3.x.x.js
vendored
102
flow-typed/npm/uuid_v3.x.x.js
vendored
|
@ -1,102 +0,0 @@
|
|||
// flow-typed signature: 3cf668e64747095cab0bb360cf2fb34f
|
||||
// flow-typed version: d659bd0cb8/uuid_v3.x.x/flow_>=v0.32.x
|
||||
|
||||
declare module "uuid" {
|
||||
declare class uuid {
|
||||
static (
|
||||
options?: {|
|
||||
random?: number[],
|
||||
rng?: () => number[] | Buffer
|
||||
|},
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string,
|
||||
|
||||
static v1(
|
||||
options?: {|
|
||||
node?: number[],
|
||||
clockseq?: number,
|
||||
msecs?: number | Date,
|
||||
nsecs?: number
|
||||
|},
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string,
|
||||
|
||||
static v4(
|
||||
options?: {|
|
||||
random?: number[],
|
||||
rng?: () => number[] | Buffer
|
||||
|},
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string
|
||||
}
|
||||
declare module.exports: Class<uuid>;
|
||||
}
|
||||
|
||||
declare module "uuid/v1" {
|
||||
declare class v1 {
|
||||
static (
|
||||
options?: {|
|
||||
node?: number[],
|
||||
clockseq?: number,
|
||||
msecs?: number | Date,
|
||||
nsecs?: number
|
||||
|},
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string
|
||||
}
|
||||
|
||||
declare module.exports: Class<v1>;
|
||||
}
|
||||
|
||||
declare module "uuid/v3" {
|
||||
declare class v3 {
|
||||
static (
|
||||
name?: string | number[],
|
||||
namespace?: string | number[],
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string,
|
||||
|
||||
static name: string,
|
||||
static DNS: string,
|
||||
static URL: string
|
||||
}
|
||||
|
||||
declare module.exports: Class<v3>;
|
||||
}
|
||||
|
||||
declare module "uuid/v4" {
|
||||
declare class v4 {
|
||||
static (
|
||||
options?: {|
|
||||
random?: number[],
|
||||
rng?: () => number[] | Buffer
|
||||
|},
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string
|
||||
}
|
||||
|
||||
declare module.exports: Class<v4>;
|
||||
}
|
||||
|
||||
declare module "uuid/v5" {
|
||||
declare class v5 {
|
||||
static (
|
||||
name?: string | number[],
|
||||
namespace?: string | number[],
|
||||
buffer?: number[] | Buffer,
|
||||
offset?: number
|
||||
): string,
|
||||
|
||||
static name: string,
|
||||
static DNS: string,
|
||||
static URL: string
|
||||
}
|
||||
|
||||
declare module.exports: Class<v5>;
|
||||
}
|
8
jest.config.js
Normal file
8
jest.config.js
Normal file
|
@ -0,0 +1,8 @@
|
|||
module.exports = {
|
||||
collectCoverageFrom: ["src/**/*.{js,jsx,mjs}"],
|
||||
testMatch: ["<rootDir>/tests/**/*.test.js"],
|
||||
transform: {
|
||||
"^.+\\.(js|jsx|mjs)$": "<rootDir>/tests/config/jest-transformer.js",
|
||||
},
|
||||
transformIgnorePatterns: ["[/\\\\]node_modules[/\\\\].+\\.(js|jsx|mjs)$"]
|
||||
};
|
12
package.json
12
package.json
|
@ -25,14 +25,21 @@
|
|||
"dev": "rollup --config --watch",
|
||||
"precommit": "flow check && lint-staged",
|
||||
"lint": "eslint 'src/**/*.js' --fix",
|
||||
"format": "prettier 'src/**/*.{js,json}' --write"
|
||||
"format": "prettier 'src/**/*.{js,json}' --write",
|
||||
"test": "jest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ungap/from-entries": "^0.2.1",
|
||||
"proxy-polyfill": "0.1.6",
|
||||
"reselect": "^3.0.0",
|
||||
"uuid": "^3.3.2"
|
||||
"uuid": "^8.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/plugin-proposal-class-properties": "^7.10.4",
|
||||
"@babel/plugin-proposal-decorators": "^7.10.5",
|
||||
"@babel/plugin-transform-flow-strip-types": "^7.10.4",
|
||||
"@babel/preset-env": "^7.11.0",
|
||||
"@babel/preset-react": "^7.10.4",
|
||||
"babel-core": "^6.26.0",
|
||||
"babel-eslint": "^8.0.3",
|
||||
"babel-loader": "^7.1.4",
|
||||
|
@ -53,6 +60,7 @@
|
|||
"flow-bin": "^0.97.0",
|
||||
"flow-typed": "^2.5.1",
|
||||
"husky": "^0.14.3",
|
||||
"jest": "^26.4.2",
|
||||
"lint-staged": "^7.0.4",
|
||||
"prettier": "^1.4.2",
|
||||
"rollup": "^1.8.0",
|
||||
|
|
4
src/constants/abandon_states.js
Normal file
4
src/constants/abandon_states.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
export const PENDING = 'pending';
|
||||
export const DONE = 'done';
|
||||
export const READY = 'ready';
|
||||
export const ERROR = 'error';
|
|
@ -35,10 +35,19 @@ export const GET_NEW_ADDRESS_STARTED = 'GET_NEW_ADDRESS_STARTED';
|
|||
export const GET_NEW_ADDRESS_COMPLETED = 'GET_NEW_ADDRESS_COMPLETED';
|
||||
export const FETCH_TRANSACTIONS_STARTED = 'FETCH_TRANSACTIONS_STARTED';
|
||||
export const FETCH_TRANSACTIONS_COMPLETED = 'FETCH_TRANSACTIONS_COMPLETED';
|
||||
export const FETCH_TXO_PAGE_STARTED = 'FETCH_TXO_PAGE_STARTED';
|
||||
export const FETCH_TXO_PAGE_COMPLETED = 'FETCH_TXO_PAGE_COMPLETED';
|
||||
export const FETCH_TXO_PAGE_FAILED = 'FETCH_TXO_PAGE_FAILED';
|
||||
export const UPDATE_TXO_FETCH_PARAMS = 'UPDATE_TXO_FETCH_PARAMS';
|
||||
export const FETCH_SUPPORTS_STARTED = 'FETCH_SUPPORTS_STARTED';
|
||||
export const FETCH_SUPPORTS_COMPLETED = 'FETCH_SUPPORTS_COMPLETED';
|
||||
export const ABANDON_SUPPORT_STARTED = 'ABANDON_SUPPORT_STARTED';
|
||||
export const ABANDON_SUPPORT_COMPLETED = 'ABANDON_SUPPORT_COMPLETED';
|
||||
export const ABANDON_CLAIM_SUPPORT_STARTED = 'ABANDON_CLAIM_SUPPORT_STARTED';
|
||||
export const ABANDON_CLAIM_SUPPORT_COMPLETED = 'ABANDON_CLAIM_SUPPORT_COMPLETED';
|
||||
export const ABANDON_CLAIM_SUPPORT_FAILED = 'ABANDON_CLAIM_SUPPORT_FAILED';
|
||||
export const ABANDON_CLAIM_SUPPORT_PREVIEW = 'ABANDON_CLAIM_SUPPORT_PREVIEW';
|
||||
export const PENDING_SUPPORTS_UPDATED = 'PENDING_SUPPORTS_UPDATED';
|
||||
export const UPDATE_BALANCE = 'UPDATE_BALANCE';
|
||||
export const UPDATE_TOTAL_BALANCE = 'UPDATE_TOTAL_BALANCE';
|
||||
export const CHECK_ADDRESS_IS_MINE_STARTED = 'CHECK_ADDRESS_IS_MINE_STARTED';
|
||||
|
@ -70,6 +79,16 @@ export const SET_TRANSACTION_LIST_FILTER = 'SET_TRANSACTION_LIST_FILTER';
|
|||
export const UPDATE_CURRENT_HEIGHT = 'UPDATE_CURRENT_HEIGHT';
|
||||
export const SET_DRAFT_TRANSACTION_AMOUNT = 'SET_DRAFT_TRANSACTION_AMOUNT';
|
||||
export const SET_DRAFT_TRANSACTION_ADDRESS = 'SET_DRAFT_TRANSACTION_ADDRESS';
|
||||
export const FETCH_UTXO_COUNT_STARTED = 'FETCH_UTXO_COUNT_STARTED';
|
||||
export const FETCH_UTXO_COUNT_COMPLETED = 'FETCH_UTXO_COUNT_COMPLETED';
|
||||
export const FETCH_UTXO_COUNT_FAILED = 'FETCH_UTXO_COUNT_FAILED';
|
||||
export const TIP_CLAIM_MASS_STARTED = 'TIP_CLAIM_MASS_STARTED';
|
||||
export const TIP_CLAIM_MASS_COMPLETED = 'TIP_CLAIM_MASS_COMPLETED';
|
||||
export const TIP_CLAIM_MASS_FAILED = 'TIP_CLAIM_MASS_FAILED';
|
||||
export const DO_UTXO_CONSOLIDATE_STARTED = 'DO_UTXO_CONSOLIDATE_STARTED';
|
||||
export const DO_UTXO_CONSOLIDATE_COMPLETED = 'DO_UTXO_CONSOLIDATE_COMPLETED';
|
||||
export const DO_UTXO_CONSOLIDATE_FAILED = 'DO_UTXO_CONSOLIDATE_FAILED';
|
||||
export const PENDING_CONSOLIDATED_TXOS_UPDATED = 'PENDING_CONSOLIDATED_TXOS_UPDATED';
|
||||
|
||||
// Claims
|
||||
export const RESOLVE_URIS_STARTED = 'RESOLVE_URIS_STARTED';
|
||||
|
@ -82,6 +101,10 @@ export const ABANDON_CLAIM_STARTED = 'ABANDON_CLAIM_STARTED';
|
|||
export const ABANDON_CLAIM_SUCCEEDED = 'ABANDON_CLAIM_SUCCEEDED';
|
||||
export const FETCH_CHANNEL_LIST_STARTED = 'FETCH_CHANNEL_LIST_STARTED';
|
||||
export const FETCH_CHANNEL_LIST_COMPLETED = 'FETCH_CHANNEL_LIST_COMPLETED';
|
||||
export const FETCH_CHANNEL_LIST_FAILED = 'FETCH_CHANNEL_LIST_FAILED';
|
||||
export const FETCH_COLLECTION_LIST_STARTED = 'FETCH_COLLECTION_LIST_STARTED';
|
||||
export const FETCH_COLLECTION_LIST_COMPLETED = 'FETCH_COLLECTION_LIST_COMPLETED';
|
||||
export const FETCH_COLLECTION_LIST_FAILED = 'FETCH_COLLECTION_LIST_FAILED';
|
||||
export const CREATE_CHANNEL_STARTED = 'CREATE_CHANNEL_STARTED';
|
||||
export const CREATE_CHANNEL_COMPLETED = 'CREATE_CHANNEL_COMPLETED';
|
||||
export const CREATE_CHANNEL_FAILED = 'CREATE_CHANNEL_FAILED';
|
||||
|
@ -91,6 +114,7 @@ export const UPDATE_CHANNEL_FAILED = 'UPDATE_CHANNEL_FAILED';
|
|||
export const IMPORT_CHANNEL_STARTED = 'IMPORT_CHANNEL_STARTED';
|
||||
export const IMPORT_CHANNEL_COMPLETED = 'IMPORT_CHANNEL_COMPLETED';
|
||||
export const IMPORT_CHANNEL_FAILED = 'IMPORT_CHANNEL_FAILED';
|
||||
export const CLEAR_CHANNEL_ERRORS = 'CLEAR_CHANNEL_ERRORS';
|
||||
export const PUBLISH_STARTED = 'PUBLISH_STARTED';
|
||||
export const PUBLISH_COMPLETED = 'PUBLISH_COMPLETED';
|
||||
export const PUBLISH_FAILED = 'PUBLISH_FAILED';
|
||||
|
@ -109,6 +133,38 @@ export const CLAIM_REPOST_STARTED = 'CLAIM_REPOST_STARTED';
|
|||
export const CLAIM_REPOST_COMPLETED = 'CLAIM_REPOST_COMPLETED';
|
||||
export const CLAIM_REPOST_FAILED = 'CLAIM_REPOST_FAILED';
|
||||
export const CLEAR_REPOST_ERROR = 'CLEAR_REPOST_ERROR';
|
||||
export const CHECK_PUBLISH_NAME_STARTED = 'CHECK_PUBLISH_NAME_STARTED';
|
||||
export const CHECK_PUBLISH_NAME_COMPLETED = 'CHECK_PUBLISH_NAME_COMPLETED';
|
||||
export const UPDATE_PENDING_CLAIMS = 'UPDATE_PENDING_CLAIMS';
|
||||
export const UPDATE_CONFIRMED_CLAIMS = 'UPDATE_CONFIRMED_CLAIMS';
|
||||
export const ADD_FILES_REFLECTING = 'ADD_FILES_REFLECTING';
|
||||
export const UPDATE_FILES_REFLECTING = 'UPDATE_FILES_REFLECTING';
|
||||
export const TOGGLE_CHECKING_REFLECTING = 'TOGGLE_CHECKING_REFLECTING';
|
||||
export const TOGGLE_CHECKING_PENDING = 'TOGGLE_CHECKING_PENDING';
|
||||
export const PURCHASE_LIST_STARTED = 'PURCHASE_LIST_STARTED';
|
||||
export const PURCHASE_LIST_COMPLETED = 'PURCHASE_LIST_COMPLETED';
|
||||
export const PURCHASE_LIST_FAILED = 'PURCHASE_LIST_FAILED';
|
||||
|
||||
export const COLLECTION_PUBLISH_STARTED = 'COLLECTION_PUBLISH_STARTED';
|
||||
export const COLLECTION_PUBLISH_COMPLETED = 'COLLECTION_PUBLISH_COMPLETED';
|
||||
export const COLLECTION_PUBLISH_FAILED = 'COLLECTION_PUBLISH_FAILED';
|
||||
export const COLLECTION_PUBLISH_UPDATE_STARTED = 'COLLECTION_PUBLISH_UPDATE_STARTED';
|
||||
export const COLLECTION_PUBLISH_UPDATE_COMPLETED = 'COLLECTION_PUBLISH_UPDATE_COMPLETED';
|
||||
export const COLLECTION_PUBLISH_UPDATE_FAILED = 'COLLECTION_PUBLISH_UPDATE_FAILED';
|
||||
export const COLLECTION_PUBLISH_ABANDON_STARTED = 'COLLECTION_PUBLISH_ABANDON_STARTED';
|
||||
export const COLLECTION_PUBLISH_ABANDON_COMPLETED = 'COLLECTION_PUBLISH_ABANDON_COMPLETED';
|
||||
export const COLLECTION_PUBLISH_ABANDON_FAILED = 'COLLECTION_PUBLISH_ABANDON_FAILED';
|
||||
export const CLEAR_COLLECTION_ERRORS = 'CLEAR_COLLECTION_ERRORS';
|
||||
export const COLLECTION_ITEMS_RESOLVE_STARTED = 'COLLECTION_ITEMS_RESOLVE_STARTED';
|
||||
export const COLLECTION_ITEMS_RESOLVE_COMPLETED = 'COLLECTION_ITEMS_RESOLVE_COMPLETED';
|
||||
export const COLLECTION_ITEMS_RESOLVE_FAILED = 'COLLECTION_ITEMS_RESOLVE_FAILED';
|
||||
export const COLLECTION_NEW = 'COLLECTION_NEW';
|
||||
export const COLLECTION_DELETE = 'COLLECTION_DELETE';
|
||||
export const COLLECTION_PENDING = 'COLLECTION_PENDING';
|
||||
export const COLLECTION_EDIT = 'COLLECTION_EDIT';
|
||||
export const COLLECTION_COPY = 'COLLECTION_COPY';
|
||||
export const COLLECTION_SAVE = 'COLLECTION_SAVE';
|
||||
export const COLLECTION_ERROR = 'COLLECTION_ERROR';
|
||||
|
||||
// Comments
|
||||
export const COMMENT_LIST_STARTED = 'COMMENT_LIST_STARTED';
|
||||
|
@ -148,20 +204,7 @@ export const SET_FILE_LIST_SORT = 'SET_FILE_LIST_SORT';
|
|||
export const PURCHASE_URI_STARTED = 'PURCHASE_URI_STARTED';
|
||||
export const PURCHASE_URI_COMPLETED = 'PURCHASE_URI_COMPLETED';
|
||||
export const PURCHASE_URI_FAILED = 'PURCHASE_URI_FAILED';
|
||||
export const DELETE_PURCHASED_URI = 'DELETE_PURCHASED_URI';
|
||||
|
||||
// Search
|
||||
export const SEARCH_START = 'SEARCH_START';
|
||||
export const SEARCH_SUCCESS = 'SEARCH_SUCCESS';
|
||||
export const SEARCH_FAIL = 'SEARCH_FAIL';
|
||||
export const RESOLVED_SEARCH_START = 'RESOLVED_SEARCH_START';
|
||||
export const RESOLVED_SEARCH_SUCCESS = 'RESOLVED_SEARCH_SUCCESS';
|
||||
export const RESOLVED_SEARCH_FAIL = 'RESOLVED_SEARCH_FAIL';
|
||||
export const UPDATE_SEARCH_QUERY = 'UPDATE_SEARCH_QUERY';
|
||||
export const UPDATE_SEARCH_OPTIONS = 'UPDATE_SEARCH_OPTIONS';
|
||||
export const UPDATE_SEARCH_SUGGESTIONS = 'UPDATE_SEARCH_SUGGESTIONS';
|
||||
export const SEARCH_FOCUS = 'SEARCH_FOCUS';
|
||||
export const SEARCH_BLUR = 'SEARCH_BLUR';
|
||||
export const CLEAR_PURCHASED_URI_SUCCESS = 'CLEAR_PURCHASED_URI_SUCCESS';
|
||||
|
||||
// Settings
|
||||
export const DAEMON_SETTINGS_RECEIVED = 'DAEMON_SETTINGS_RECEIVED';
|
||||
|
@ -257,13 +300,6 @@ export const FETCH_COST_INFO_STARTED = 'FETCH_COST_INFO_STARTED';
|
|||
export const FETCH_COST_INFO_COMPLETED = 'FETCH_COST_INFO_COMPLETED';
|
||||
export const FETCH_COST_INFO_FAILED = 'FETCH_COST_INFO_FAILED';
|
||||
|
||||
// Tags
|
||||
export const TOGGLE_TAG_FOLLOW = 'TOGGLE_TAG_FOLLOW';
|
||||
export const TAG_ADD = 'TAG_ADD';
|
||||
export const TAG_DELETE = 'TAG_DELETE';
|
||||
|
||||
// Blocked Channels
|
||||
export const TOGGLE_BLOCK_CHANNEL = 'TOGGLE_BLOCK_CHANNEL';
|
||||
|
||||
// Sync
|
||||
export const USER_STATE_POPULATE = 'USER_STATE_POPULATE';
|
||||
export const SYNC_FATAL_ERROR = 'SYNC_FATAL_ERROR';
|
||||
|
|
|
@ -3,3 +3,9 @@ export const MINIMUM_PUBLISH_BID = 0.00000001;
|
|||
export const CHANNEL_ANONYMOUS = 'anonymous';
|
||||
export const CHANNEL_NEW = 'new';
|
||||
export const PAGE_SIZE = 20;
|
||||
|
||||
export const LEVEL_1_STAKED_AMOUNT = 0;
|
||||
export const LEVEL_2_STAKED_AMOUNT = 1;
|
||||
export const LEVEL_3_STAKED_AMOUNT = 50;
|
||||
export const LEVEL_4_STAKED_AMOUNT = 250;
|
||||
export const LEVEL_5_STAKED_AMOUNT = 1000;
|
||||
|
|
15
src/constants/collections.js
Normal file
15
src/constants/collections.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
export const COLLECTION_ID = 'lid';
|
||||
export const COLLECTION_INDEX = 'linx';
|
||||
|
||||
export const COL_TYPE_PLAYLIST = 'playlist';
|
||||
export const COL_TYPE_CHANNELS = 'channelList';
|
||||
|
||||
export const WATCH_LATER_ID = 'watchlater';
|
||||
export const FAVORITES_ID = 'favorites';
|
||||
export const FAVORITE_CHANNELS_ID = 'favoriteChannels';
|
||||
export const BUILTIN_LISTS = [WATCH_LATER_ID, FAVORITES_ID, FAVORITE_CHANNELS_ID];
|
||||
|
||||
export const COL_KEY_EDITED = 'edited';
|
||||
export const COL_KEY_UNPUBLISHED = 'unpublished';
|
||||
export const COL_KEY_PENDING = 'pending';
|
||||
export const COL_KEY_SAVED = 'saved';
|
|
@ -1,19 +0,0 @@
|
|||
export const SEARCH_TYPES = {
|
||||
FILE: 'file',
|
||||
CHANNEL: 'channel',
|
||||
SEARCH: 'search',
|
||||
TAG: 'tag',
|
||||
};
|
||||
|
||||
export const SEARCH_OPTIONS = {
|
||||
RESULT_COUNT: 'size',
|
||||
CLAIM_TYPE: 'claimType',
|
||||
INCLUDE_FILES: 'file',
|
||||
INCLUDE_CHANNELS: 'channel',
|
||||
INCLUDE_FILES_AND_CHANNELS: 'file,channel',
|
||||
MEDIA_AUDIO: 'audio',
|
||||
MEDIA_VIDEO: 'video',
|
||||
MEDIA_TEXT: 'text',
|
||||
MEDIA_IMAGE: 'image',
|
||||
MEDIA_APPLICATION: 'application',
|
||||
};
|
|
@ -6,9 +6,16 @@ export const SHOW_NSFW = 'showNsfw';
|
|||
export const CREDIT_REQUIRED_ACKNOWLEDGED = 'credit_required_acknowledged';
|
||||
export const NEW_USER_ACKNOWLEDGED = 'welcome_acknowledged';
|
||||
export const EMAIL_COLLECTION_ACKNOWLEDGED = 'email_collection_acknowledged';
|
||||
export const FIRST_RUN_STARTED = 'first_run_started';
|
||||
export const INVITE_ACKNOWLEDGED = 'invite_acknowledged';
|
||||
export const FOLLOWING_ACKNOWLEDGED = 'following_acknowledged';
|
||||
export const TAGS_ACKNOWLEDGED = 'tags_acknowledged';
|
||||
export const REWARDS_ACKNOWLEDGED = 'rewards_acknowledged';
|
||||
export const LANGUAGE = 'language';
|
||||
export const SEARCH_IN_LANGUAGE = 'search_in_language';
|
||||
export const SHOW_MATURE = 'show_mature';
|
||||
export const HOMEPAGE = 'homepage';
|
||||
export const HIDE_REPOSTS = 'hide_reposts';
|
||||
export const SHOW_ANONYMOUS = 'show_anonymous';
|
||||
export const SHOW_UNAVAILABLE = 'show_unavailable';
|
||||
export const INSTANT_PURCHASE_ENABLED = 'instant_purchase_enabled';
|
||||
|
@ -16,16 +23,24 @@ export const INSTANT_PURCHASE_MAX = 'instant_purchase_max';
|
|||
export const THEME = 'theme';
|
||||
export const THEMES = 'themes';
|
||||
export const AUTOMATIC_DARK_MODE_ENABLED = 'automatic_dark_mode_enabled';
|
||||
export const AUTOPLAY = 'autoplay';
|
||||
export const AUTOPLAY_MEDIA = 'autoplay';
|
||||
export const AUTOPLAY_NEXT = 'autoplay_next';
|
||||
export const OS_NOTIFICATIONS_ENABLED = 'os_notifications_enabled';
|
||||
export const AUTO_DOWNLOAD = 'auto_download';
|
||||
export const AUTO_LAUNCH = 'auto_launch';
|
||||
export const TO_TRAY_WHEN_CLOSED = 'to_tray_when_closed';
|
||||
export const SUPPORT_OPTION = 'support_option';
|
||||
export const HIDE_BALANCE = 'hide_balance';
|
||||
export const HIDE_SPLASH_ANIMATION = 'hide_splash_animation';
|
||||
export const FLOATING_PLAYER = 'floating_player';
|
||||
export const DARK_MODE_TIMES = 'dark_mode_times';
|
||||
export const ENABLE_SYNC = 'enable_sync';
|
||||
export const ENABLE_PUBLISH_PREVIEW = 'enable-publish-preview';
|
||||
export const TILE_LAYOUT = 'tile_layout';
|
||||
export const VIDEO_THEATER_MODE = 'video_theater_mode';
|
||||
export const VIDEO_PLAYBACK_RATE = 'video_playback_rate';
|
||||
export const CUSTOM_COMMENTS_SERVER_ENABLED = 'custom_comments_server_enabled';
|
||||
export const CUSTOM_COMMENTS_SERVER_URL = 'custom_comments_server_url';
|
||||
|
||||
// mobile settings
|
||||
export const BACKGROUND_PLAY_ENABLED = 'backgroundPlayEnabled';
|
||||
|
|
|
@ -8,6 +8,25 @@
|
|||
*/
|
||||
|
||||
import * as DAEMON_SETTINGS from './daemon_settings';
|
||||
import * as SETTINGS from './settings';
|
||||
|
||||
export const WALLET_SERVERS = DAEMON_SETTINGS.LBRYUM_SERVERS;
|
||||
export const SHARE_USAGE_DATA = DAEMON_SETTINGS.SHARE_USAGE_DATA;
|
||||
// DAEMON
|
||||
export const SDK_SYNC_KEYS = [DAEMON_SETTINGS.LBRYUM_SERVERS, DAEMON_SETTINGS.SHARE_USAGE_DATA];
|
||||
|
||||
// CLIENT
|
||||
export const CLIENT_SYNC_KEYS = [
|
||||
SETTINGS.SHOW_MATURE,
|
||||
SETTINGS.HIDE_REPOSTS,
|
||||
SETTINGS.SHOW_ANONYMOUS,
|
||||
SETTINGS.INSTANT_PURCHASE_ENABLED,
|
||||
SETTINGS.INSTANT_PURCHASE_MAX,
|
||||
SETTINGS.THEME,
|
||||
SETTINGS.AUTOPLAY_MEDIA,
|
||||
SETTINGS.AUTOPLAY_NEXT,
|
||||
SETTINGS.HIDE_BALANCE,
|
||||
SETTINGS.HIDE_SPLASH_ANIMATION,
|
||||
SETTINGS.FLOATING_PLAYER,
|
||||
SETTINGS.DARK_MODE_TIMES,
|
||||
SETTINGS.AUTOMATIC_DARK_MODE_ENABLED,
|
||||
SETTINGS.LANGUAGE,
|
||||
];
|
||||
|
|
|
@ -13,18 +13,39 @@ export const DEFAULT_FOLLOWED_TAGS = [
|
|||
'technology',
|
||||
];
|
||||
|
||||
export const MATURE_TAGS = ['porn', 'nsfw', 'mature', 'xxx'];
|
||||
export const MATURE_TAGS = [
|
||||
'porn',
|
||||
'porno',
|
||||
'nsfw',
|
||||
'mature',
|
||||
'xxx',
|
||||
'sex',
|
||||
'creampie',
|
||||
'blowjob',
|
||||
'handjob',
|
||||
'vagina',
|
||||
'boobs',
|
||||
'big boobs',
|
||||
'big dick',
|
||||
'pussy',
|
||||
'cumshot',
|
||||
'anal',
|
||||
'hard fucking',
|
||||
'ass',
|
||||
'fuck',
|
||||
'hentai',
|
||||
];
|
||||
|
||||
export const DEFAULT_KNOWN_TAGS = [
|
||||
const DEFAULT_ENGLISH_KNOWN_TAGS = [
|
||||
'free speech',
|
||||
'censorship',
|
||||
'gaming',
|
||||
'pop culture',
|
||||
'Entertainment',
|
||||
'entertainment',
|
||||
'technology',
|
||||
'music',
|
||||
'funny',
|
||||
'Education',
|
||||
'education',
|
||||
'learning',
|
||||
'news',
|
||||
'gameplay',
|
||||
|
@ -32,16 +53,14 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'beliefs',
|
||||
'comedy',
|
||||
'games',
|
||||
'sony interactive entertainment',
|
||||
'film & animation',
|
||||
'game',
|
||||
'weapons',
|
||||
"let's play",
|
||||
'blockchain',
|
||||
'video game',
|
||||
'sports',
|
||||
'walkthrough',
|
||||
'ps4live',
|
||||
'lbrytvpaidbeta',
|
||||
'art',
|
||||
'pc',
|
||||
'minecraft',
|
||||
|
@ -49,7 +68,6 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'economics',
|
||||
'automotive',
|
||||
'play',
|
||||
'ps4share',
|
||||
'tutorial',
|
||||
'twitch',
|
||||
'how to',
|
||||
|
@ -60,17 +78,16 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'lets play',
|
||||
'fun',
|
||||
'politics',
|
||||
'xbox',
|
||||
'autos & vehicles',
|
||||
'Travel & Events',
|
||||
'travel',
|
||||
'food',
|
||||
'science',
|
||||
'xbox one',
|
||||
'xbox',
|
||||
'liberal',
|
||||
'democrat',
|
||||
'progressive',
|
||||
'survival',
|
||||
'Nonprofits & Activism',
|
||||
'non-profits',
|
||||
'activism',
|
||||
'cryptocurrency',
|
||||
'playstation',
|
||||
'nintendo',
|
||||
|
@ -125,7 +142,6 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'lol',
|
||||
'sony',
|
||||
'god',
|
||||
"let's",
|
||||
'dance',
|
||||
'pvp',
|
||||
'tech',
|
||||
|
@ -133,12 +149,10 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'zombies',
|
||||
'fail',
|
||||
'film',
|
||||
'xbox 360',
|
||||
'xbox360',
|
||||
'animation',
|
||||
'unboxing',
|
||||
'money',
|
||||
'how',
|
||||
'travel',
|
||||
'wwe',
|
||||
'mods',
|
||||
'indie',
|
||||
|
@ -146,7 +160,6 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'ios',
|
||||
'history',
|
||||
'rap',
|
||||
'sony computer entertainment',
|
||||
'mobile',
|
||||
'trump',
|
||||
'hack',
|
||||
|
@ -170,7 +183,7 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'mining',
|
||||
'daily',
|
||||
'diy',
|
||||
'pets & animals',
|
||||
'pets',
|
||||
'videogame',
|
||||
'death',
|
||||
'funny moments',
|
||||
|
@ -198,14 +211,12 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'house',
|
||||
'fire',
|
||||
'bass',
|
||||
'bitcoin news',
|
||||
'truth',
|
||||
'crash',
|
||||
'mario',
|
||||
'league of legends',
|
||||
'wii',
|
||||
'mmorpg',
|
||||
'grand theft auto v',
|
||||
'health',
|
||||
'marvel',
|
||||
'racing',
|
||||
|
@ -239,7 +250,6 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'dota 2',
|
||||
'studio',
|
||||
'star wars',
|
||||
'gta 5',
|
||||
'shooting',
|
||||
'nasa',
|
||||
'rock',
|
||||
|
@ -272,7 +282,6 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'world of warcraft',
|
||||
'industry',
|
||||
'cartoon',
|
||||
'crypto news',
|
||||
'garden',
|
||||
'animals',
|
||||
'windows',
|
||||
|
@ -286,7 +295,6 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'parody',
|
||||
'rv',
|
||||
'beats',
|
||||
'fortnite battle royale',
|
||||
'building',
|
||||
'disney',
|
||||
'drone',
|
||||
|
@ -319,7 +327,6 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'canon',
|
||||
'microsoft',
|
||||
'camping',
|
||||
'cryptocurrency news',
|
||||
'ufo',
|
||||
'progressive talk',
|
||||
'switch',
|
||||
|
@ -355,7 +362,6 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'manga',
|
||||
'howto',
|
||||
'insane',
|
||||
'xbox360',
|
||||
'press',
|
||||
'special',
|
||||
'church',
|
||||
|
@ -372,7 +378,7 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'sound',
|
||||
'christ',
|
||||
'duty',
|
||||
'Juvenile fiction',
|
||||
'juvenile fiction',
|
||||
'pc game',
|
||||
'how-to',
|
||||
'ww2',
|
||||
|
@ -411,7 +417,6 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'style',
|
||||
'travel trailer',
|
||||
'rda',
|
||||
'5859dfec-026f-46ba-bea0-02bf43aa1a6f',
|
||||
'gun',
|
||||
'secret',
|
||||
'far cry 5',
|
||||
|
@ -452,8 +457,6 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'capcom',
|
||||
'rta',
|
||||
'discord',
|
||||
'action role-playing game',
|
||||
'playthrough part',
|
||||
'batman',
|
||||
'player',
|
||||
'server',
|
||||
|
@ -494,7 +497,7 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'paladins',
|
||||
'warrior',
|
||||
'creepypasta',
|
||||
'role-playing video game',
|
||||
'role-playing',
|
||||
'solar',
|
||||
'vr',
|
||||
'animal',
|
||||
|
@ -503,7 +506,7 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'dota',
|
||||
'audio',
|
||||
'mass effect',
|
||||
'Humour',
|
||||
'humour',
|
||||
'first look',
|
||||
'videogames',
|
||||
'future bass',
|
||||
|
@ -513,4 +516,32 @@ export const DEFAULT_KNOWN_TAGS = [
|
|||
'dantdm',
|
||||
'teaser',
|
||||
'lbry',
|
||||
'coronavirus',
|
||||
'2020protests',
|
||||
'covidcuts',
|
||||
'covid-19',
|
||||
'LBRYFoundationBoardCandidacy',
|
||||
'helplbrysavecrypto'
|
||||
];
|
||||
|
||||
const DEFAULT_SPANISH_KNOWN_TAGS = [
|
||||
'español',
|
||||
'tecnología',
|
||||
'criptomonedas',
|
||||
'economía',
|
||||
'bitcoin',
|
||||
'educación',
|
||||
'videojuegos',
|
||||
'música',
|
||||
'noticias',
|
||||
'ciencia',
|
||||
'deportes',
|
||||
'latinoamérica',
|
||||
'latam',
|
||||
'conspiración',
|
||||
'humor',
|
||||
'política',
|
||||
'tutoriales',
|
||||
];
|
||||
|
||||
export const DEFAULT_KNOWN_TAGS = [...DEFAULT_ENGLISH_KNOWN_TAGS, ...DEFAULT_SPANISH_KNOWN_TAGS];
|
||||
|
|
36
src/constants/txo_list.js
Normal file
36
src/constants/txo_list.js
Normal file
|
@ -0,0 +1,36 @@
|
|||
export const ACTIVE = 'active'; // spent, active, all
|
||||
export const TYPE = 'type'; // all, payment, support, channel, stream, repost
|
||||
export const SUB_TYPE = 'subtype'; // other, purchase, tip
|
||||
export const PAGE_SIZE = 'page_size';
|
||||
export const PAGE = 'page';
|
||||
export const ALL = 'all';
|
||||
// dropdown types
|
||||
export const SENT = 'sent';
|
||||
export const RECEIVED = 'received';
|
||||
export const SUPPORT = 'support';
|
||||
export const CHANNEL = 'channel';
|
||||
export const PUBLISH = 'publish';
|
||||
export const REPOST = 'repost';
|
||||
export const DROPDOWN_TYPES = [ALL, SENT, RECEIVED, SUPPORT, CHANNEL, PUBLISH, REPOST];
|
||||
// dropdown subtypes
|
||||
export const TIP = 'tip';
|
||||
export const PURCHASE = 'purchase';
|
||||
export const PAYMENT = 'payment';
|
||||
export const DROPDOWN_SUBTYPES = [ALL, TIP, PURCHASE, PAYMENT];
|
||||
|
||||
// rpc params
|
||||
export const TX_TYPE = 'type'; // = other, stream, repost, channel, support, purchase
|
||||
export const IS_SPENT = 'is_spent';
|
||||
export const IS_NOT_SPENT = 'is_not_spent';
|
||||
export const IS_MY_INPUT = 'is_my_input';
|
||||
export const IS_MY_OUTPUT = 'is_my_output';
|
||||
export const IS_NOT_MY_INPUT = 'is_not_my_input';
|
||||
export const IS_NOT_MY_OUTPUT = 'is_not_my_output'; // use to further distinguish payments to self / from self.
|
||||
export const IS_MY_INPUT_OR_OUTPUT = 'is_my_input_or_output';
|
||||
export const EXCLUDE_INTERNAL_TRANSFERS = 'exclude_internal_transfers';
|
||||
|
||||
// sdk unique types
|
||||
export const OTHER = 'other';
|
||||
export const STREAM = 'stream';
|
||||
|
||||
export const PAGE_SIZE_DEFAULT = 20;
|
195
src/index.js
195
src/index.js
|
@ -7,13 +7,15 @@ import * as SORT_OPTIONS from 'constants/sort_options';
|
|||
import * as THUMBNAIL_STATUSES from 'constants/thumbnail_upload_statuses';
|
||||
import * as TRANSACTIONS from 'constants/transaction_types';
|
||||
import * as TX_LIST from 'constants/transaction_list';
|
||||
import * as ABANDON_STATES from 'constants/abandon_states';
|
||||
import * as TXO_LIST from 'constants/txo_list';
|
||||
import * as SPEECH_URLS from 'constants/speech_urls';
|
||||
import * as DAEMON_SETTINGS from 'constants/daemon_settings';
|
||||
import * as SHARED_PREFERENCES from 'constants/shared_preferences';
|
||||
import { SEARCH_TYPES, SEARCH_OPTIONS } from 'constants/search';
|
||||
import * as COLLECTIONS_CONSTS from 'constants/collections';
|
||||
import { DEFAULT_KNOWN_TAGS, DEFAULT_FOLLOWED_TAGS, MATURE_TAGS } from 'constants/tags';
|
||||
import Lbry, { apiCall } from 'lbry';
|
||||
import { selectState as selectSearchState } from 'redux/selectors/search';
|
||||
import LbryFirst from 'lbry-first';
|
||||
|
||||
// constants
|
||||
export {
|
||||
|
@ -21,12 +23,12 @@ export {
|
|||
CLAIM_VALUES,
|
||||
LICENSES,
|
||||
THUMBNAIL_STATUSES,
|
||||
SEARCH_TYPES,
|
||||
SEARCH_OPTIONS,
|
||||
SETTINGS,
|
||||
DAEMON_SETTINGS,
|
||||
TRANSACTIONS,
|
||||
TX_LIST,
|
||||
TXO_LIST,
|
||||
ABANDON_STATES,
|
||||
SORT_OPTIONS,
|
||||
PAGES,
|
||||
DEFAULT_KNOWN_TAGS,
|
||||
|
@ -34,10 +36,12 @@ export {
|
|||
MATURE_TAGS,
|
||||
SPEECH_URLS,
|
||||
SHARED_PREFERENCES,
|
||||
COLLECTIONS_CONSTS,
|
||||
};
|
||||
|
||||
// common
|
||||
export { Lbry, apiCall };
|
||||
export { LbryFirst };
|
||||
export {
|
||||
regexInvalidURI,
|
||||
regexAddress,
|
||||
|
@ -48,6 +52,8 @@ export {
|
|||
isURIClaimable,
|
||||
isNameValid,
|
||||
convertToShareLink,
|
||||
splitBySeparator,
|
||||
isURIEqual,
|
||||
} from 'lbryURI';
|
||||
|
||||
// middlware
|
||||
|
@ -55,28 +61,43 @@ export { buildSharedStateMiddleware } from 'redux/middleware/shared-state';
|
|||
|
||||
// actions
|
||||
export { doToast, doDismissToast, doError, doDismissError } from 'redux/actions/notifications';
|
||||
export {
|
||||
doLocalCollectionCreate,
|
||||
doFetchItemsInCollection,
|
||||
doFetchItemsInCollections,
|
||||
doCollectionEdit,
|
||||
doCollectionDelete,
|
||||
} from 'redux/actions/collections';
|
||||
|
||||
export {
|
||||
doFetchClaimsByChannel,
|
||||
doFetchClaimListMine,
|
||||
doAbandonClaim,
|
||||
doAbandonTxo,
|
||||
doResolveUris,
|
||||
doResolveUri,
|
||||
doFetchChannelListMine,
|
||||
doFetchCollectionListMine,
|
||||
doCreateChannel,
|
||||
doUpdateChannel,
|
||||
doClaimSearch,
|
||||
doImportChannel,
|
||||
doRepost,
|
||||
doClearRepostError,
|
||||
doClearChannelErrors,
|
||||
doCheckPublishNameAvailability,
|
||||
doPurchaseList,
|
||||
doCheckPendingClaims,
|
||||
doCollectionPublish,
|
||||
doCollectionPublishUpdate,
|
||||
} from 'redux/actions/claims';
|
||||
|
||||
export { doDeletePurchasedUri, doPurchaseUri, doFileGet } from 'redux/actions/file';
|
||||
export { doClearPurchasedUriSuccess, doPurchaseUri, doFileGet } from 'redux/actions/file';
|
||||
|
||||
export {
|
||||
doFetchFileInfo,
|
||||
doFileList,
|
||||
doFetchFileInfosAndPublishedClaims,
|
||||
doFetchFileInfos,
|
||||
doSetFileListSort,
|
||||
} from 'redux/actions/file_info';
|
||||
|
||||
|
@ -87,25 +108,17 @@ export {
|
|||
doUploadThumbnail,
|
||||
doPrepareEdit,
|
||||
doPublish,
|
||||
doCheckPendingPublishes,
|
||||
doCheckReflectingFiles,
|
||||
} from 'redux/actions/publish';
|
||||
|
||||
export {
|
||||
doSearch,
|
||||
doResolvedSearch,
|
||||
doUpdateSearchQuery,
|
||||
doFocusSearchInput,
|
||||
doBlurSearchInput,
|
||||
setSearchApi,
|
||||
doUpdateSearchOptions,
|
||||
} from 'redux/actions/search';
|
||||
|
||||
export { savePosition } from 'redux/actions/content';
|
||||
|
||||
export {
|
||||
doUpdateBalance,
|
||||
doBalanceSubscribe,
|
||||
doFetchTransactions,
|
||||
doFetchTxoPage,
|
||||
doUpdateTxoPageParams,
|
||||
doGetNewAddress,
|
||||
doCheckAddressIsMine,
|
||||
doSendDraftTransaction,
|
||||
|
@ -120,20 +133,12 @@ export {
|
|||
doSetTransactionListFilter,
|
||||
doUpdateBlockHeight,
|
||||
doClearSupport,
|
||||
doSupportAbandonForClaim,
|
||||
doFetchUtxoCounts,
|
||||
doUtxoConsolidate,
|
||||
doTipClaimMass,
|
||||
} from 'redux/actions/wallet';
|
||||
|
||||
export { doToggleTagFollow, doAddTag, doDeleteTag } from 'redux/actions/tags';
|
||||
|
||||
export {
|
||||
doCommentList,
|
||||
doCommentCreate,
|
||||
doCommentAbandon,
|
||||
doCommentHide,
|
||||
doCommentUpdate,
|
||||
} from 'redux/actions/comments';
|
||||
|
||||
export { doToggleBlockChannel } from 'redux/actions/blocked';
|
||||
|
||||
export { doPopulateSharedUserState, doPreferenceGet, doPreferenceSet } from 'redux/actions/sync';
|
||||
|
||||
// utils
|
||||
|
@ -144,29 +149,44 @@ export { isClaimNsfw, createNormalizedClaimSearchKey } from 'util/claim';
|
|||
|
||||
// reducers
|
||||
export { claimsReducer } from 'redux/reducers/claims';
|
||||
export { commentReducer } from 'redux/reducers/comments';
|
||||
export { contentReducer } from 'redux/reducers/content';
|
||||
export { fileInfoReducer } from 'redux/reducers/file_info';
|
||||
export { fileReducer } from 'redux/reducers/file';
|
||||
export { notificationsReducer } from 'redux/reducers/notifications';
|
||||
export { publishReducer } from 'redux/reducers/publish';
|
||||
export { searchReducer } from 'redux/reducers/search';
|
||||
export { tagsReducer } from 'redux/reducers/tags';
|
||||
export { blockedReducer } from 'redux/reducers/blocked';
|
||||
export { walletReducer } from 'redux/reducers/wallet';
|
||||
export { collectionsReducer } from 'redux/reducers/collections';
|
||||
|
||||
// selectors
|
||||
export { makeSelectContentPositionForUri } from 'redux/selectors/content';
|
||||
|
||||
export { selectToast, selectError } from 'redux/selectors/notifications';
|
||||
|
||||
export {
|
||||
selectFailedPurchaseUris,
|
||||
selectPurchasedUris,
|
||||
selectPurchaseUriErrorMessage,
|
||||
selectLastPurchasedUri,
|
||||
makeSelectStreamingUrlForUri,
|
||||
} from 'redux/selectors/file';
|
||||
selectSavedCollectionIds,
|
||||
selectBuiltinCollections,
|
||||
selectResolvedCollections,
|
||||
selectMyUnpublishedCollections,
|
||||
selectMyEditedCollections,
|
||||
selectMyPublishedCollections,
|
||||
selectMyPublishedMixedCollections,
|
||||
selectMyPublishedPlaylistCollections,
|
||||
makeSelectEditedCollectionForId,
|
||||
makeSelectPendingCollectionForId,
|
||||
makeSelectPublishedCollectionForId,
|
||||
makeSelectCollectionIsMine,
|
||||
makeSelectMyPublishedCollectionForId,
|
||||
makeSelectUnpublishedCollectionForId,
|
||||
makeSelectCollectionForId,
|
||||
makeSelectClaimUrlInCollection,
|
||||
makeSelectUrlsForCollectionId,
|
||||
makeSelectClaimIdsForCollectionId,
|
||||
makeSelectNameForCollectionId,
|
||||
makeSelectCountForCollectionId,
|
||||
makeSelectIsResolvingCollectionForId,
|
||||
makeSelectIndexForUrlInCollection,
|
||||
makeSelectPreviousUrlForCollectionAndUrl,
|
||||
makeSelectNextUrlForCollectionAndUrl,
|
||||
makeSelectCollectionForIdHasClaimUrl,
|
||||
} from 'redux/selectors/collections';
|
||||
|
||||
export {
|
||||
makeSelectClaimForUri,
|
||||
|
@ -182,27 +202,37 @@ export {
|
|||
makeSelectTitleForUri,
|
||||
makeSelectDateForUri,
|
||||
makeSelectAmountForUri,
|
||||
makeSelectEffectiveAmountForUri,
|
||||
makeSelectTagsForUri,
|
||||
makeSelectTagInClaimOrChannelForUri,
|
||||
makeSelectTotalStakedAmountForChannelUri,
|
||||
makeSelectStakedLevelForChannelUri,
|
||||
makeSelectContentTypeForUri,
|
||||
makeSelectIsUriResolving,
|
||||
makeSelectPendingClaimForUri,
|
||||
makeSelectTotalItemsForChannel,
|
||||
makeSelectTotalPagesForChannel,
|
||||
makeSelectNsfwCountFromUris,
|
||||
makeSelectNsfwCountForChannel,
|
||||
makeSelectOmittedCountForChannel,
|
||||
makeSelectClaimIsNsfw,
|
||||
makeSelectRecommendedContentForUri,
|
||||
makeSelectResolvedRecommendedContentForUri,
|
||||
makeSelectFirstRecommendedFileForUri,
|
||||
makeSelectChannelForClaimUri,
|
||||
makeSelectChannelPermUrlForClaimUri,
|
||||
makeSelectMyChannelPermUrlForName,
|
||||
makeSelectClaimIsPending,
|
||||
makeSelectPendingByUri,
|
||||
makeSelectClaimsInChannelForCurrentPageState,
|
||||
makeSelectReflectingClaimForUri,
|
||||
makeSelectShortUrlForUri,
|
||||
makeSelectCanonicalUrlForUri,
|
||||
makeSelectPermanentUrlForUri,
|
||||
makeSelectSupportsForUri,
|
||||
selectPendingById,
|
||||
makeSelectMyPurchasesForPage,
|
||||
makeSelectClaimWasPurchased,
|
||||
makeSelectAbandoningClaimById,
|
||||
makeSelectIsAbandoningClaimForUri,
|
||||
makeSelectClaimHasSource,
|
||||
makeSelectClaimIsStreamPlaceholder,
|
||||
selectPendingIds,
|
||||
selectReflectingById,
|
||||
makeSelectClaimForClaimId,
|
||||
selectClaimsById,
|
||||
selectClaimsByUri,
|
||||
selectAllClaimsByChannel,
|
||||
|
@ -211,13 +241,16 @@ export {
|
|||
selectMyActiveClaims,
|
||||
selectAllFetchingChannelClaims,
|
||||
selectIsFetchingClaimListMine,
|
||||
selectPendingClaims,
|
||||
selectMyClaims,
|
||||
selectPendingClaims,
|
||||
selectMyClaimsWithoutChannels,
|
||||
selectMyChannelUrls,
|
||||
selectMyClaimUrisWithoutChannels,
|
||||
selectAllMyClaimsByOutpoint,
|
||||
selectMyClaimsOutpoints,
|
||||
selectFetchingMyChannels,
|
||||
selectFetchingMyCollections,
|
||||
selectMyCollectionIds,
|
||||
selectMyChannelClaims,
|
||||
selectResolvingUris,
|
||||
selectPlayingUri,
|
||||
|
@ -236,10 +269,24 @@ export {
|
|||
selectMyStreamUrlsCount,
|
||||
selectRepostError,
|
||||
selectRepostLoading,
|
||||
selectClaimIdsByUri,
|
||||
selectMyClaimsPage,
|
||||
selectMyClaimsPageNumber,
|
||||
selectMyClaimsPageItemCount,
|
||||
selectFetchingMyClaimsPageError,
|
||||
selectMyPurchases,
|
||||
selectIsFetchingMyPurchases,
|
||||
selectFetchingMyPurchasesError,
|
||||
selectMyPurchasesCount,
|
||||
selectPurchaseUriSuccess,
|
||||
makeSelectClaimIdForUri,
|
||||
selectUpdatingCollection,
|
||||
selectUpdateCollectionError,
|
||||
selectCreatingCollection,
|
||||
selectCreateCollectionError,
|
||||
makeSelectClaimIdIsPending,
|
||||
} from 'redux/selectors/claims';
|
||||
|
||||
export { makeSelectCommentsForUri } from 'redux/selectors/comments';
|
||||
|
||||
export {
|
||||
makeSelectFileInfoForUri,
|
||||
makeSelectDownloadingForUri,
|
||||
|
@ -263,6 +310,7 @@ export {
|
|||
makeSelectSearchDownloadUrlsForPage,
|
||||
makeSelectSearchDownloadUrlsCount,
|
||||
selectDownloadUrlsCount,
|
||||
makeSelectStreamingUrlForUri,
|
||||
} from 'redux/selectors/file_info';
|
||||
|
||||
export {
|
||||
|
@ -274,22 +322,6 @@ export {
|
|||
selectTakeOverAmount,
|
||||
} from 'redux/selectors/publish';
|
||||
|
||||
export { selectSearchState };
|
||||
export {
|
||||
makeSelectSearchUris,
|
||||
makeSelectResolvedSearchResults,
|
||||
makeSelectResolvedSearchResultsLastPageReached,
|
||||
selectSearchValue,
|
||||
selectSearchOptions,
|
||||
selectIsSearching,
|
||||
selectResolvedSearchResultsByQuery,
|
||||
selectResolvedSearchResultsByQueryLastPageReached,
|
||||
selectSearchUrisByQuery,
|
||||
selectSearchBarFocused,
|
||||
selectSearchSuggestions,
|
||||
makeSelectQueryWithOptions,
|
||||
} from 'redux/selectors/search';
|
||||
|
||||
export {
|
||||
selectBalance,
|
||||
selectTotalBalance,
|
||||
|
@ -301,6 +333,7 @@ export {
|
|||
selectSupportsByOutpoint,
|
||||
selectTotalSupports,
|
||||
selectTransactionItems,
|
||||
selectTransactionsFile,
|
||||
selectRecentTransactions,
|
||||
selectHasTransactions,
|
||||
selectIsFetchingTransactions,
|
||||
|
@ -325,20 +358,24 @@ export {
|
|||
selectWalletUnlockResult,
|
||||
selectTransactionListFilter,
|
||||
selectFilteredTransactions,
|
||||
selectTxoPageParams,
|
||||
selectTxoPage,
|
||||
selectTxoPageNumber,
|
||||
selectTxoItemCount,
|
||||
selectIsFetchingTxos,
|
||||
selectFetchingTxosError,
|
||||
makeSelectLatestTransactions,
|
||||
makeSelectFilteredTransactionsForPage,
|
||||
selectFilteredTransactionCount,
|
||||
selectIsWalletReconnecting,
|
||||
selectPendingSupportTransactions,
|
||||
selectAbandonClaimSupportError,
|
||||
makeSelectPendingAmountByUri,
|
||||
selectIsFetchingUtxoCounts,
|
||||
selectIsConsolidatingUtxos,
|
||||
selectIsMassClaimingTips,
|
||||
selectUtxoCounts,
|
||||
selectPendingOtherTransactions,
|
||||
selectPendingConsolidateTxid,
|
||||
selectPendingMassClaimTxid,
|
||||
} from 'redux/selectors/wallet';
|
||||
|
||||
export {
|
||||
selectFollowedTags,
|
||||
selectUnfollowedTags,
|
||||
makeSelectIsFollowingTag,
|
||||
} from 'redux/selectors/tags';
|
||||
|
||||
export {
|
||||
selectBlockedChannels,
|
||||
selectChannelIsBlocked,
|
||||
selectBlockedChannelsCount,
|
||||
} from 'redux/selectors/blocked';
|
||||
|
|
183
src/lbry-first.js
Normal file
183
src/lbry-first.js
Normal file
|
@ -0,0 +1,183 @@
|
|||
// @flow
|
||||
import 'proxy-polyfill';
|
||||
|
||||
const CHECK_LBRYFIRST_STARTED_TRY_NUMBER = 200;
|
||||
//
|
||||
// Basic LBRYFIRST connection config
|
||||
// Offers a proxy to call LBRYFIRST methods
|
||||
|
||||
//
|
||||
const LbryFirst: LbryFirstTypes = {
|
||||
isConnected: false,
|
||||
connectPromise: null,
|
||||
lbryFirstConnectionString: 'http://localhost:1337/rpc',
|
||||
apiRequestHeaders: { 'Content-Type': 'application/json' },
|
||||
|
||||
// Allow overriding lbryFirst connection string (e.g. to `/api/proxy` for lbryweb)
|
||||
setLbryFirstConnectionString: (value: string) => {
|
||||
LbryFirst.lbryFirstConnectionString = value;
|
||||
},
|
||||
|
||||
setApiHeader: (key: string, value: string) => {
|
||||
LbryFirst.apiRequestHeaders = Object.assign(LbryFirst.apiRequestHeaders, { [key]: value });
|
||||
},
|
||||
|
||||
unsetApiHeader: key => {
|
||||
Object.keys(LbryFirst.apiRequestHeaders).includes(key) &&
|
||||
delete LbryFirst.apiRequestHeaders['key'];
|
||||
},
|
||||
// Allow overriding Lbry methods
|
||||
overrides: {},
|
||||
setOverride: (methodName, newMethod) => {
|
||||
LbryFirst.overrides[methodName] = newMethod;
|
||||
},
|
||||
getApiRequestHeaders: () => LbryFirst.apiRequestHeaders,
|
||||
|
||||
//
|
||||
// LbryFirst Methods
|
||||
//
|
||||
status: (params = {}) => lbryFirstCallWithResult('status', params),
|
||||
stop: () => lbryFirstCallWithResult('stop', {}),
|
||||
version: () => lbryFirstCallWithResult('version', {}),
|
||||
|
||||
// Upload to youtube
|
||||
upload: (params: { title: string, description: string, file_path: ?string } = {}) => {
|
||||
// Only upload when originally publishing for now
|
||||
if (!params.file_path) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
const uploadParams: {
|
||||
Title: string,
|
||||
Description: string,
|
||||
FilePath: string,
|
||||
Category: string,
|
||||
Keywords: string,
|
||||
} = {
|
||||
Title: params.title,
|
||||
Description: params.description,
|
||||
FilePath: params.file_path,
|
||||
Category: '',
|
||||
Keywords: '',
|
||||
};
|
||||
|
||||
return lbryFirstCallWithResult('youtube.Upload', uploadParams);
|
||||
},
|
||||
|
||||
hasYTAuth: (token: string) => {
|
||||
const hasYTAuthParams = {};
|
||||
hasYTAuthParams.AuthToken = token;
|
||||
return lbryFirstCallWithResult('youtube.HasAuth', hasYTAuthParams);
|
||||
},
|
||||
|
||||
ytSignup: () => {
|
||||
const emptyParams = {};
|
||||
return lbryFirstCallWithResult('youtube.Signup', emptyParams);
|
||||
},
|
||||
|
||||
remove: () => {
|
||||
const emptyParams = {};
|
||||
return lbryFirstCallWithResult('youtube.Remove', emptyParams);
|
||||
},
|
||||
|
||||
// Connect to lbry-first
|
||||
connect: () => {
|
||||
if (LbryFirst.connectPromise === null) {
|
||||
LbryFirst.connectPromise = new Promise((resolve, reject) => {
|
||||
let tryNum = 0;
|
||||
// Check every half second to see if the lbryFirst is accepting connections
|
||||
function checkLbryFirstStarted() {
|
||||
tryNum += 1;
|
||||
LbryFirst.status()
|
||||
.then(resolve)
|
||||
.catch(() => {
|
||||
if (tryNum <= CHECK_LBRYFIRST_STARTED_TRY_NUMBER) {
|
||||
setTimeout(checkLbryFirstStarted, tryNum < 50 ? 400 : 1000);
|
||||
} else {
|
||||
reject(new Error('Unable to connect to LBRY'));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
checkLbryFirstStarted();
|
||||
});
|
||||
}
|
||||
|
||||
// Flow thinks this could be empty, but it will always return a promise
|
||||
// $FlowFixMe
|
||||
return LbryFirst.connectPromise;
|
||||
},
|
||||
};
|
||||
|
||||
function checkAndParse(response) {
|
||||
if (response.status >= 200 && response.status < 300) {
|
||||
return response.json();
|
||||
}
|
||||
return response.json().then(json => {
|
||||
let error;
|
||||
if (json.error) {
|
||||
const errorMessage = typeof json.error === 'object' ? json.error.message : json.error;
|
||||
error = new Error(errorMessage);
|
||||
} else {
|
||||
error = new Error('Protocol error with unknown response signature');
|
||||
}
|
||||
return Promise.reject(error);
|
||||
});
|
||||
}
|
||||
|
||||
export function apiCall(method: string, params: ?{}, resolve: Function, reject: Function) {
|
||||
const counter = new Date().getTime();
|
||||
const paramsArray = [params];
|
||||
const options = {
|
||||
method: 'POST',
|
||||
headers: LbryFirst.apiRequestHeaders,
|
||||
body: JSON.stringify({
|
||||
jsonrpc: '2.0',
|
||||
method,
|
||||
params: paramsArray,
|
||||
id: counter,
|
||||
}),
|
||||
};
|
||||
|
||||
return fetch(LbryFirst.lbryFirstConnectionString, options)
|
||||
.then(checkAndParse)
|
||||
.then(response => {
|
||||
const error = response.error || (response.result && response.result.error);
|
||||
|
||||
if (error) {
|
||||
return reject(error);
|
||||
}
|
||||
return resolve(response.result);
|
||||
})
|
||||
.catch(reject);
|
||||
}
|
||||
|
||||
function lbryFirstCallWithResult(name: string, params: ?{} = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
apiCall(
|
||||
name,
|
||||
params,
|
||||
result => {
|
||||
resolve(result);
|
||||
},
|
||||
reject
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// This is only for a fallback
|
||||
// If there is a LbryFirst method that is being called by an app, it should be added to /flow-typed/LbryFirst.js
|
||||
const lbryFirstProxy = new Proxy(LbryFirst, {
|
||||
get(target: LbryFirstTypes, name: string) {
|
||||
if (name in target) {
|
||||
return target[name];
|
||||
}
|
||||
|
||||
return (params = {}) =>
|
||||
new Promise((resolve, reject) => {
|
||||
apiCall(name, params, resolve, reject);
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export default lbryFirstProxy;
|
16
src/lbry.js
16
src/lbry.js
|
@ -11,6 +11,8 @@ const Lbry: LbryTypes = {
|
|||
isConnected: false,
|
||||
connectPromise: null,
|
||||
daemonConnectionString: 'http://localhost:5279',
|
||||
alternateConnectionString: '',
|
||||
methodsUsingAlternateConnectionString: [],
|
||||
apiRequestHeaders: { 'Content-Type': 'application/json-rpc' },
|
||||
|
||||
// Allow overriding daemon connection string (e.g. to `/api/proxy` for lbryweb)
|
||||
|
@ -38,7 +40,7 @@ const Lbry: LbryTypes = {
|
|||
const formats = [
|
||||
[/\.(mp4|m4v|webm|flv|f4v|ogv)$/i, 'video'],
|
||||
[/\.(mp3|m4a|aac|wav|flac|ogg|opus)$/i, 'audio'],
|
||||
[/\.(jpeg|jpg|png|gif|svg)$/i, 'image'],
|
||||
[/\.(jpeg|jpg|png|gif|svg|webp)$/i, 'image'],
|
||||
[/\.(h|go|ja|java|js|jsx|c|cpp|cs|css|rb|scss|sh|php|py)$/i, 'script'],
|
||||
[/\.(html|json|csv|txt|log|md|markdown|docx|pdf|xml|yml|yaml)$/i, 'document'],
|
||||
[/\.(pdf|odf|doc|docx|epub|org|rtf)$/i, 'e-book'],
|
||||
|
@ -84,9 +86,14 @@ const Lbry: LbryTypes = {
|
|||
stream_abandon: params => daemonCallWithResult('stream_abandon', params),
|
||||
stream_list: params => daemonCallWithResult('stream_list', params),
|
||||
channel_abandon: params => daemonCallWithResult('channel_abandon', params),
|
||||
channel_sign: params => daemonCallWithResult('channel_sign', params),
|
||||
support_create: params => daemonCallWithResult('support_create', params),
|
||||
support_list: params => daemonCallWithResult('support_list', params),
|
||||
stream_repost: params => daemonCallWithResult('stream_repost', params),
|
||||
collection_resolve: params => daemonCallWithResult('collection_resolve', params),
|
||||
collection_list: params => daemonCallWithResult('collection_list', params),
|
||||
collection_create: params => daemonCallWithResult('collection_create', params),
|
||||
collection_update: params => daemonCallWithResult('collection_update', params),
|
||||
|
||||
// File fetching and manipulation
|
||||
file_list: (params = {}) => daemonCallWithResult('file_list', params),
|
||||
|
@ -109,6 +116,8 @@ const Lbry: LbryTypes = {
|
|||
transaction_list: (params = {}) => daemonCallWithResult('transaction_list', params),
|
||||
utxo_release: (params = {}) => daemonCallWithResult('utxo_release', params),
|
||||
support_abandon: (params = {}) => daemonCallWithResult('support_abandon', params),
|
||||
purchase_list: (params = {}) => daemonCallWithResult('purchase_list', params),
|
||||
txo_list: (params = {}) => daemonCallWithResult('txo_list', params),
|
||||
|
||||
sync_hash: (params = {}) => daemonCallWithResult('sync_hash', params),
|
||||
sync_apply: (params = {}) => daemonCallWithResult('sync_apply', params),
|
||||
|
@ -191,7 +200,10 @@ export function apiCall(method: string, params: ?{}, resolve: Function, reject:
|
|||
}),
|
||||
};
|
||||
|
||||
return fetch(Lbry.daemonConnectionString, options)
|
||||
const connectionString = Lbry.methodsUsingAlternateConnectionString.includes(method)
|
||||
? Lbry.alternateConnectionString
|
||||
: Lbry.daemonConnectionString;
|
||||
return fetch(connectionString + '?m=' + method, options)
|
||||
.then(checkAndParse)
|
||||
.then(response => {
|
||||
const error = response.error || (response.result && response.result.error);
|
||||
|
|
|
@ -12,6 +12,11 @@ const regexPartModifierSeparator = '([:$#]?)([^/]*)';
|
|||
const queryStringBreaker = '^([\\S]+)([?][\\S]*)';
|
||||
const separateQuerystring = new RegExp(queryStringBreaker);
|
||||
|
||||
const MOD_SEQUENCE_SEPARATOR = '*';
|
||||
const MOD_CLAIM_ID_SEPARATOR_OLD = '#';
|
||||
const MOD_CLAIM_ID_SEPARATOR = ':';
|
||||
const MOD_BID_POSITION_SEPARATOR = '$';
|
||||
|
||||
/**
|
||||
* Parses a LBRY name into its component parts. Throws errors with user-friendly
|
||||
* messages for invalid names.
|
||||
|
@ -29,7 +34,7 @@ const separateQuerystring = new RegExp(queryStringBreaker);
|
|||
* - secondaryBidPosition (int, if present)
|
||||
*/
|
||||
|
||||
export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj {
|
||||
export function parseURI(url: string, requireProto: boolean = false): LbryUrlObj {
|
||||
// Break into components. Empty sub-matches are converted to null
|
||||
|
||||
const componentsRegex = new RegExp(
|
||||
|
@ -42,12 +47,12 @@ export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj
|
|||
);
|
||||
// chop off the querystring first
|
||||
let QSStrippedURL, qs;
|
||||
const qsRegexResult = separateQuerystring.exec(URL);
|
||||
const qsRegexResult = separateQuerystring.exec(url);
|
||||
if (qsRegexResult) {
|
||||
[QSStrippedURL, qs] = qsRegexResult.slice(1).map(match => match || null);
|
||||
}
|
||||
|
||||
const cleanURL = QSStrippedURL || URL;
|
||||
const cleanURL = QSStrippedURL || url;
|
||||
const regexMatch = componentsRegex.exec(cleanURL) || [];
|
||||
const [proto, ...rest] = regexMatch.slice(1).map(match => match || null);
|
||||
const path = rest.join('');
|
||||
|
@ -60,6 +65,8 @@ export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj
|
|||
secondaryModSeparator,
|
||||
secondaryModValue,
|
||||
] = rest;
|
||||
const searchParams = new URLSearchParams(qs || '');
|
||||
const startTime = searchParams.get('t');
|
||||
|
||||
// Validate protocol
|
||||
if (requireProto && !proto) {
|
||||
|
@ -73,7 +80,7 @@ export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj
|
|||
|
||||
rest.forEach(urlPiece => {
|
||||
if (urlPiece && urlPiece.includes(' ')) {
|
||||
console.error('URL can not include a space');
|
||||
throw new Error(__('URL can not include a space'));
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -121,6 +128,7 @@ export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj
|
|||
: {}),
|
||||
...(primaryBidPosition ? { primaryBidPosition: parseInt(primaryBidPosition, 10) } : {}),
|
||||
...(secondaryBidPosition ? { secondaryBidPosition: parseInt(secondaryBidPosition, 10) } : {}),
|
||||
...(startTime ? { startTime: parseInt(startTime, 10) } : {}),
|
||||
|
||||
// The values below should not be used for new uses of parseURI
|
||||
// They will not work properly with canonical_urls
|
||||
|
@ -141,11 +149,11 @@ function parseURIModifier(modSeperator: ?string, modValue: ?string) {
|
|||
throw new Error(__(`No modifier provided after separator %modSeperator%.`, { modSeperator }));
|
||||
}
|
||||
|
||||
if (modSeperator === '#') {
|
||||
if (modSeperator === MOD_CLAIM_ID_SEPARATOR || MOD_CLAIM_ID_SEPARATOR_OLD) {
|
||||
claimId = modValue;
|
||||
} else if (modSeperator === ':') {
|
||||
} else if (modSeperator === MOD_SEQUENCE_SEPARATOR) {
|
||||
claimSequence = modValue;
|
||||
} else if (modSeperator === '$') {
|
||||
} else if (modSeperator === MOD_BID_POSITION_SEPARATOR) {
|
||||
bidPosition = modValue;
|
||||
}
|
||||
}
|
||||
|
@ -184,6 +192,7 @@ export function buildURI(
|
|||
primaryBidPosition,
|
||||
secondaryClaimSequence,
|
||||
secondaryBidPosition,
|
||||
startTime,
|
||||
...deprecatedParts
|
||||
} = UrlObj;
|
||||
const { claimId, claimName, contentName } = deprecatedParts;
|
||||
|
@ -233,7 +242,8 @@ export function buildURI(
|
|||
(secondaryClaimName ? `/${secondaryClaimName}` : '') +
|
||||
(secondaryClaimId ? `#${secondaryClaimId}` : '') +
|
||||
(secondaryClaimSequence ? `:${secondaryClaimSequence}` : '') +
|
||||
(secondaryBidPosition ? `${secondaryBidPosition}` : '')
|
||||
(secondaryBidPosition ? `${secondaryBidPosition}` : '') +
|
||||
(startTime ? `?t=${startTime}` : '')
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -248,6 +258,7 @@ export function normalizeURI(URL: string) {
|
|||
primaryBidPosition,
|
||||
secondaryClaimSequence,
|
||||
secondaryBidPosition,
|
||||
startTime,
|
||||
} = parseURI(URL);
|
||||
|
||||
return buildURI({
|
||||
|
@ -259,6 +270,7 @@ export function normalizeURI(URL: string) {
|
|||
primaryBidPosition,
|
||||
secondaryClaimSequence,
|
||||
secondaryBidPosition,
|
||||
startTime,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -313,3 +325,22 @@ export function convertToShareLink(URL: string) {
|
|||
'https://open.lbry.com/'
|
||||
);
|
||||
}
|
||||
|
||||
export function splitBySeparator(uri: string) {
|
||||
const protocolLength = 7;
|
||||
return uri.startsWith('lbry://') ? uri.slice(protocolLength).split(/[#:*]/) : uri.split(/#:\*\$/);
|
||||
}
|
||||
|
||||
export function isURIEqual(uriA: string, uriB: string) {
|
||||
const parseA = parseURI(normalizeURI(uriA));
|
||||
const parseB = parseURI(normalizeURI(uriB));
|
||||
if (parseA.isChannel) {
|
||||
if (parseB.isChannel && parseA.channelClaimId === parseB.channelClaimId) {
|
||||
return true;
|
||||
}
|
||||
} else if (parseA.streamClaimId === parseB.streamClaimId) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import * as ABANDON_STATES from 'constants/abandon_states';
|
||||
import Lbry from 'lbry';
|
||||
import { normalizeURI } from 'lbryURI';
|
||||
import { doToast } from 'redux/actions/notifications';
|
||||
|
@ -8,14 +9,34 @@ import {
|
|||
selectResolvingUris,
|
||||
selectClaimsByUri,
|
||||
selectMyChannelClaims,
|
||||
selectPendingIds,
|
||||
selectPendingClaimsById,
|
||||
} from 'redux/selectors/claims';
|
||||
import { doFetchTransactions } from 'redux/actions/wallet';
|
||||
|
||||
import { doFetchTxoPage } from 'redux/actions/wallet';
|
||||
import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
|
||||
import { creditsToString } from 'util/format-credits';
|
||||
import { batchActions } from 'util/batch-actions';
|
||||
import { createNormalizedClaimSearchKey } from 'util/claim';
|
||||
import { PAGE_SIZE } from 'constants/claim';
|
||||
import {
|
||||
selectPendingCollections,
|
||||
makeSelectClaimIdsForCollectionId,
|
||||
} from 'redux/selectors/collections';
|
||||
import {
|
||||
doFetchItemsInCollection,
|
||||
doFetchItemsInCollections,
|
||||
doCollectionDelete,
|
||||
} from 'redux/actions/collections';
|
||||
|
||||
export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean = false) {
|
||||
let onChannelConfirmCallback;
|
||||
let checkPendingInterval;
|
||||
|
||||
export function doResolveUris(
|
||||
uris: Array<string>,
|
||||
returnCachedClaims: boolean = false,
|
||||
resolveReposts: boolean = true
|
||||
) {
|
||||
return (dispatch: Dispatch, getState: GetState) => {
|
||||
const normalizedUris = uris.map(normalizeURI);
|
||||
const state = getState();
|
||||
|
@ -34,6 +55,13 @@ export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean =
|
|||
return;
|
||||
}
|
||||
|
||||
const options: { include_is_my_output?: boolean, include_purchase_receipt: boolean } = {
|
||||
include_purchase_receipt: true,
|
||||
};
|
||||
|
||||
if (urisToResolve.length === 1) {
|
||||
options.include_is_my_output = true;
|
||||
}
|
||||
dispatch({
|
||||
type: ACTIONS.RESOLVE_URIS_STARTED,
|
||||
data: { uris: normalizedUris },
|
||||
|
@ -44,49 +72,88 @@ export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean =
|
|||
stream: ?StreamClaim,
|
||||
channel: ?ChannelClaim,
|
||||
claimsInChannel: ?number,
|
||||
collection: ?CollectionClaim,
|
||||
},
|
||||
} = {};
|
||||
|
||||
Lbry.resolve({ urls: urisToResolve }).then((result: ResolveResponse) => {
|
||||
Object.entries(result).forEach(([uri, uriResolveInfo]) => {
|
||||
const collectionIds: Array<string> = [];
|
||||
|
||||
return Lbry.resolve({ urls: urisToResolve, ...options }).then(
|
||||
async(result: ResolveResponse) => {
|
||||
let repostedResults = {};
|
||||
const repostsToResolve = [];
|
||||
const fallbackResolveInfo = {
|
||||
stream: null,
|
||||
claimsInChannel: null,
|
||||
channel: null,
|
||||
};
|
||||
|
||||
// Flow has terrible Object.entries support
|
||||
// https://github.com/facebook/flow/issues/2221
|
||||
if (uriResolveInfo) {
|
||||
if (uriResolveInfo.error) {
|
||||
resolveInfo[uri] = { ...fallbackResolveInfo };
|
||||
} else {
|
||||
let result = {};
|
||||
if (uriResolveInfo.value_type === 'channel') {
|
||||
result.channel = uriResolveInfo;
|
||||
// $FlowFixMe
|
||||
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
|
||||
} else {
|
||||
result.stream = uriResolveInfo;
|
||||
if (uriResolveInfo.signing_channel) {
|
||||
result.channel = uriResolveInfo.signing_channel;
|
||||
result.claimsInChannel =
|
||||
(uriResolveInfo.signing_channel.meta &&
|
||||
uriResolveInfo.signing_channel.meta.claims_in_channel) ||
|
||||
0;
|
||||
function processResult(result, resolveInfo = {}, checkReposts = false) {
|
||||
Object.entries(result).forEach(([uri, uriResolveInfo]) => {
|
||||
// Flow has terrible Object.entries support
|
||||
// https://github.com/facebook/flow/issues/2221
|
||||
if (uriResolveInfo) {
|
||||
if (uriResolveInfo.error) {
|
||||
// $FlowFixMe
|
||||
resolveInfo[uri] = { ...fallbackResolveInfo };
|
||||
} else {
|
||||
if (checkReposts) {
|
||||
if (uriResolveInfo.reposted_claim) {
|
||||
// $FlowFixMe
|
||||
const repostUrl = uriResolveInfo.reposted_claim.permanent_url;
|
||||
if (!resolvingUris.includes(repostUrl)) {
|
||||
repostsToResolve.push(repostUrl);
|
||||
}
|
||||
}
|
||||
}
|
||||
let result = {};
|
||||
if (uriResolveInfo.value_type === 'channel') {
|
||||
result.channel = uriResolveInfo;
|
||||
// $FlowFixMe
|
||||
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
|
||||
} else if (uriResolveInfo.value_type === 'collection') {
|
||||
result.collection = uriResolveInfo;
|
||||
// $FlowFixMe
|
||||
collectionIds.push(uriResolveInfo.claim_id);
|
||||
} else {
|
||||
result.stream = uriResolveInfo;
|
||||
if (uriResolveInfo.signing_channel) {
|
||||
result.channel = uriResolveInfo.signing_channel;
|
||||
result.claimsInChannel =
|
||||
(uriResolveInfo.signing_channel.meta &&
|
||||
uriResolveInfo.signing_channel.meta.claims_in_channel) ||
|
||||
0;
|
||||
}
|
||||
}
|
||||
// $FlowFixMe
|
||||
resolveInfo[uri] = result;
|
||||
}
|
||||
}
|
||||
// $FlowFixMe
|
||||
resolveInfo[uri] = result;
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
processResult(result, resolveInfo, resolveReposts);
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.RESOLVE_URIS_COMPLETED,
|
||||
data: { resolveInfo },
|
||||
});
|
||||
});
|
||||
if (repostsToResolve.length) {
|
||||
dispatch({
|
||||
type: ACTIONS.RESOLVE_URIS_STARTED,
|
||||
data: { uris: repostsToResolve, debug: 'reposts' },
|
||||
});
|
||||
repostedResults = await Lbry.resolve({ urls: repostsToResolve, ...options });
|
||||
}
|
||||
processResult(repostedResults, resolveInfo);
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.RESOLVE_URIS_COMPLETED,
|
||||
data: { resolveInfo },
|
||||
});
|
||||
|
||||
if (collectionIds.length) {
|
||||
dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 }));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -97,29 +164,123 @@ export function doResolveUri(uri: string) {
|
|||
export function doFetchClaimListMine(
|
||||
page: number = 1,
|
||||
pageSize: number = 99999,
|
||||
resolve: boolean = true
|
||||
resolve: boolean = true,
|
||||
filterBy: Array<string> = []
|
||||
) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED,
|
||||
});
|
||||
|
||||
Lbry.claim_list({ page, page_size: pageSize, claim_type: ['stream', 'repost'], resolve }).then(
|
||||
(result: StreamListResponse) => {
|
||||
const claims = result.items;
|
||||
let claimTypes = ['stream', 'repost'];
|
||||
if (filterBy && filterBy.length !== 0) {
|
||||
claimTypes = claimTypes.filter(t => filterBy.includes(t));
|
||||
}
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED,
|
||||
data: {
|
||||
claims,
|
||||
},
|
||||
});
|
||||
}
|
||||
);
|
||||
// $FlowFixMe
|
||||
Lbry.claim_list({
|
||||
page: page,
|
||||
page_size: pageSize,
|
||||
claim_type: claimTypes,
|
||||
resolve,
|
||||
}).then((result: StreamListResponse) => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED,
|
||||
data: {
|
||||
result,
|
||||
resolve,
|
||||
},
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doAbandonClaim(txid: string, nout: number) {
|
||||
export function doAbandonTxo(txo: Txo, cb: string => void) {
|
||||
return (dispatch: Dispatch) => {
|
||||
if (cb) cb(ABANDON_STATES.PENDING);
|
||||
const isClaim = txo.type === 'claim';
|
||||
const isSupport = txo.type === 'support' && txo.is_my_input === true;
|
||||
const isTip = txo.type === 'support' && txo.is_my_input === false;
|
||||
|
||||
const data = isClaim ? { claimId: txo.claim_id } : { outpoint: `${txo.txid}:${txo.nout}` };
|
||||
|
||||
const startedActionType = isClaim
|
||||
? ACTIONS.ABANDON_CLAIM_STARTED
|
||||
: ACTIONS.ABANDON_SUPPORT_STARTED;
|
||||
const completedActionType = isClaim
|
||||
? ACTIONS.ABANDON_CLAIM_SUCCEEDED
|
||||
: ACTIONS.ABANDON_SUPPORT_COMPLETED;
|
||||
|
||||
dispatch({
|
||||
type: startedActionType,
|
||||
data,
|
||||
});
|
||||
|
||||
const errorCallback = () => {
|
||||
if (cb) cb(ABANDON_STATES.ERROR);
|
||||
dispatch(
|
||||
doToast({
|
||||
message: isClaim ? 'Error abandoning your claim/support' : 'Error unlocking your tip',
|
||||
isError: true,
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const successCallback = () => {
|
||||
dispatch({
|
||||
type: completedActionType,
|
||||
data,
|
||||
});
|
||||
|
||||
let abandonMessage;
|
||||
if (isClaim) {
|
||||
abandonMessage = __('Successfully abandoned your claim.');
|
||||
} else if (isSupport) {
|
||||
abandonMessage = __('Successfully abandoned your support.');
|
||||
} else {
|
||||
abandonMessage = __('Successfully unlocked your tip!');
|
||||
}
|
||||
if (cb) cb(ABANDON_STATES.DONE);
|
||||
|
||||
dispatch(
|
||||
doToast({
|
||||
message: abandonMessage,
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const abandonParams: {
|
||||
claim_id?: string,
|
||||
txid?: string,
|
||||
nout?: number,
|
||||
} = {
|
||||
blocking: true,
|
||||
};
|
||||
if (isClaim) {
|
||||
abandonParams['claim_id'] = txo.claim_id;
|
||||
} else {
|
||||
abandonParams['txid'] = txo.txid;
|
||||
abandonParams['nout'] = txo.nout;
|
||||
}
|
||||
|
||||
let method;
|
||||
if (isSupport || isTip) {
|
||||
method = 'support_abandon';
|
||||
} else if (isClaim) {
|
||||
const { normalized_name: claimName } = txo;
|
||||
method = claimName.startsWith('@') ? 'channel_abandon' : 'stream_abandon';
|
||||
}
|
||||
|
||||
if (!method) {
|
||||
console.error('No "method" chosen for claim or support abandon');
|
||||
return;
|
||||
}
|
||||
|
||||
Lbry[method](abandonParams).then(successCallback, errorCallback);
|
||||
};
|
||||
}
|
||||
|
||||
export function doAbandonClaim(txid: string, nout: number, cb: string => void) {
|
||||
const outpoint = `${txid}:${nout}`;
|
||||
|
||||
return (dispatch: Dispatch, getState: GetState) => {
|
||||
|
@ -160,6 +321,7 @@ export function doAbandonClaim(txid: string, nout: number) {
|
|||
isError: true,
|
||||
})
|
||||
);
|
||||
if (cb) cb(ABANDON_STATES.ERROR);
|
||||
};
|
||||
|
||||
const successCallback = () => {
|
||||
|
@ -167,14 +329,15 @@ export function doAbandonClaim(txid: string, nout: number) {
|
|||
type: completedActionType,
|
||||
data,
|
||||
});
|
||||
if (cb) cb(ABANDON_STATES.DONE);
|
||||
|
||||
let abandonMessage;
|
||||
if (isClaim) {
|
||||
abandonMessage = 'Successfully abandoned your claim.';
|
||||
abandonMessage = __('Successfully abandoned your claim.');
|
||||
} else if (supportToAbandon) {
|
||||
abandonMessage = 'Successfully abandoned your support.';
|
||||
abandonMessage = __('Successfully abandoned your support.');
|
||||
} else {
|
||||
abandonMessage = 'Successfully unlocked your tip!';
|
||||
abandonMessage = __('Successfully unlocked your tip!');
|
||||
}
|
||||
|
||||
dispatch(
|
||||
|
@ -182,13 +345,7 @@ export function doAbandonClaim(txid: string, nout: number) {
|
|||
message: abandonMessage,
|
||||
})
|
||||
);
|
||||
|
||||
// After abandoning, fetch transactions to show the new abandon transaction
|
||||
// Only fetch the latest few transactions since we don't care about old ones
|
||||
// Not very robust, but better than calling the entire list for large wallets
|
||||
const page = 1;
|
||||
const pageSize = 10;
|
||||
dispatch(doFetchTransactions(page, pageSize));
|
||||
dispatch(doFetchTxoPage());
|
||||
};
|
||||
|
||||
const abandonParams = {
|
||||
|
@ -226,6 +383,8 @@ export function doFetchClaimsByChannel(uri: string, page: number = 1) {
|
|||
valid_channel_signature: true,
|
||||
page: page || 1,
|
||||
order_by: ['release_time'],
|
||||
include_is_my_output: true,
|
||||
include_purchase_receipt: true,
|
||||
}).then((result: ClaimSearchResponse) => {
|
||||
const { items: claims, total_items: claimsInChannel, page: returnedPage } = result;
|
||||
|
||||
|
@ -242,7 +401,13 @@ export function doFetchClaimsByChannel(uri: string, page: number = 1) {
|
|||
};
|
||||
}
|
||||
|
||||
export function doCreateChannel(name: string, amount: number, optionalParams: any) {
|
||||
export function doClearChannelErrors() {
|
||||
return {
|
||||
type: ACTIONS.CLEAR_CHANNEL_ERRORS,
|
||||
};
|
||||
}
|
||||
|
||||
export function doCreateChannel(name: string, amount: number, optionalParams: any, onConfirm: any) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.CREATE_CHANNEL_STARTED,
|
||||
|
@ -258,7 +423,8 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
|
|||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
tags?: Array<Tag>,
|
||||
languages?: Array<string>,
|
||||
} = {
|
||||
name,
|
||||
bid: creditsToString(amount),
|
||||
|
@ -287,6 +453,9 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
|
|||
if (optionalParams.tags) {
|
||||
createParams.tags = optionalParams.tags.map(tag => tag.name);
|
||||
}
|
||||
if (optionalParams.languages) {
|
||||
createParams.languages = optionalParams.languages;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
|
@ -299,6 +468,13 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
|
|||
type: ACTIONS.CREATE_CHANNEL_COMPLETED,
|
||||
data: { channelClaim },
|
||||
});
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_PENDING_CLAIMS,
|
||||
data: {
|
||||
claims: [channelClaim],
|
||||
},
|
||||
});
|
||||
dispatch(doCheckPendingClaims(onConfirm));
|
||||
return channelClaim;
|
||||
})
|
||||
.catch(error => {
|
||||
|
@ -306,13 +482,12 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
|
|||
type: ACTIONS.CREATE_CHANNEL_FAILED,
|
||||
data: error.message,
|
||||
});
|
||||
return error;
|
||||
})
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
export function doUpdateChannel(params: any) {
|
||||
export function doUpdateChannel(params: any, cb: any) {
|
||||
return (dispatch: Dispatch, getState: GetState) => {
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_CHANNEL_STARTED,
|
||||
|
@ -332,7 +507,7 @@ export function doUpdateChannel(params: any) {
|
|||
email: params.email,
|
||||
tags: [],
|
||||
replace: true,
|
||||
languages: [],
|
||||
languages: params.languages || [],
|
||||
locations: [],
|
||||
blocking: true,
|
||||
};
|
||||
|
@ -342,15 +517,10 @@ export function doUpdateChannel(params: any) {
|
|||
}
|
||||
|
||||
// we'll need to remove these once we add locations/channels to channel page edit/create options
|
||||
|
||||
if (channelClaim && channelClaim.value && channelClaim.value.locations) {
|
||||
updateParams.locations = channelClaim.value.locations;
|
||||
}
|
||||
|
||||
if (channelClaim && channelClaim.value && channelClaim.value.languages) {
|
||||
updateParams.languages = channelClaim.value.languages;
|
||||
}
|
||||
|
||||
return Lbry.channel_update(updateParams)
|
||||
.then((result: ChannelUpdateResponse) => {
|
||||
const channelClaim = result.outputs[0];
|
||||
|
@ -358,7 +528,16 @@ export function doUpdateChannel(params: any) {
|
|||
type: ACTIONS.UPDATE_CHANNEL_COMPLETED,
|
||||
data: { channelClaim },
|
||||
});
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_PENDING_CLAIMS,
|
||||
data: {
|
||||
claims: [channelClaim],
|
||||
},
|
||||
});
|
||||
dispatch(doCheckPendingClaims(cb));
|
||||
return Boolean(result.outputs[0]);
|
||||
})
|
||||
.then()
|
||||
.catch(error => {
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_CHANNEL_FAILED,
|
||||
|
@ -375,7 +554,7 @@ export function doImportChannel(certificate: string) {
|
|||
});
|
||||
|
||||
return Lbry.channel_import({ channel_data: certificate })
|
||||
.then((result: string) => {
|
||||
.then(() => {
|
||||
dispatch({
|
||||
type: ACTIONS.IMPORT_CHANNEL_COMPLETED,
|
||||
});
|
||||
|
@ -406,7 +585,48 @@ export function doFetchChannelListMine(
|
|||
});
|
||||
};
|
||||
|
||||
Lbry.channel_list({ page, page_size: pageSize, resolve }).then(callback);
|
||||
const failure = error => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_CHANNEL_LIST_FAILED,
|
||||
data: error,
|
||||
});
|
||||
};
|
||||
|
||||
Lbry.channel_list({ page, page_size: pageSize, resolve }).then(callback, failure);
|
||||
};
|
||||
}
|
||||
|
||||
export function doFetchCollectionListMine(page: number = 1, pageSize: number = 99999) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_COLLECTION_LIST_STARTED,
|
||||
});
|
||||
|
||||
const callback = (response: CollectionListResponse) => {
|
||||
const { items } = response;
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_COLLECTION_LIST_COMPLETED,
|
||||
data: { claims: items },
|
||||
});
|
||||
dispatch(
|
||||
doFetchItemsInCollections({
|
||||
collectionIds: items.map(claim => claim.claim_id),
|
||||
page_size: 5,
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const failure = error => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_COLLECTION_LIST_FAILED,
|
||||
data: error,
|
||||
});
|
||||
};
|
||||
|
||||
Lbry.collection_list({ page, page_size: pageSize, resolve_claims: 1, resolve: true }).then(
|
||||
callback,
|
||||
failure
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -414,13 +634,16 @@ export function doClaimSearch(
|
|||
options: {
|
||||
page_size: number,
|
||||
page: number,
|
||||
no_totals: boolean,
|
||||
no_totals?: boolean,
|
||||
any_tags?: Array<string>,
|
||||
claim_ids?: Array<string>,
|
||||
channel_ids?: Array<string>,
|
||||
not_channel_ids?: Array<string>,
|
||||
not_tags?: Array<string>,
|
||||
order_by?: Array<string>,
|
||||
release_time?: string,
|
||||
has_source?: boolean,
|
||||
has_no_souce?: boolean,
|
||||
} = {
|
||||
no_totals: true,
|
||||
page_size: 10,
|
||||
|
@ -428,7 +651,7 @@ export function doClaimSearch(
|
|||
}
|
||||
) {
|
||||
const query = createNormalizedClaimSearchKey(options);
|
||||
return (dispatch: Dispatch) => {
|
||||
return async(dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.CLAIM_SEARCH_STARTED,
|
||||
data: { query: query },
|
||||
|
@ -452,6 +675,7 @@ export function doClaimSearch(
|
|||
pageSize: options.page_size,
|
||||
},
|
||||
});
|
||||
return resolveInfo;
|
||||
};
|
||||
|
||||
const failure = err => {
|
||||
|
@ -460,15 +684,18 @@ export function doClaimSearch(
|
|||
data: { query },
|
||||
error: err,
|
||||
});
|
||||
return false;
|
||||
};
|
||||
|
||||
Lbry.claim_search(options).then(success, failure);
|
||||
return await Lbry.claim_search({
|
||||
...options,
|
||||
include_purchase_receipt: true,
|
||||
}).then(success, failure);
|
||||
};
|
||||
}
|
||||
|
||||
export function doRepost(options: StreamRepostOptions) {
|
||||
return (dispatch: Dispatch) => {
|
||||
// $FlowFixMe
|
||||
return (dispatch: Dispatch): Promise<any> => {
|
||||
return new Promise(resolve => {
|
||||
dispatch({
|
||||
type: ACTIONS.CLAIM_REPOST_STARTED,
|
||||
|
@ -483,9 +710,15 @@ export function doRepost(options: StreamRepostOptions) {
|
|||
repostClaim,
|
||||
},
|
||||
});
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_PENDING_CLAIMS,
|
||||
data: {
|
||||
claims: [repostClaim],
|
||||
},
|
||||
});
|
||||
|
||||
dispatch(doFetchClaimListMine(1, 10));
|
||||
resolve();
|
||||
resolve(repostClaim);
|
||||
}
|
||||
|
||||
function failure(error) {
|
||||
|
@ -502,8 +735,336 @@ export function doRepost(options: StreamRepostOptions) {
|
|||
};
|
||||
}
|
||||
|
||||
export function doCollectionPublish(
|
||||
options: {
|
||||
name: string,
|
||||
bid: string,
|
||||
blocking: true,
|
||||
title?: string,
|
||||
channel_id?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
tags?: Array<Tag>,
|
||||
languages?: Array<string>,
|
||||
claims: Array<string>,
|
||||
},
|
||||
localId: string
|
||||
) {
|
||||
return (dispatch: Dispatch): Promise<any> => {
|
||||
// $FlowFixMe
|
||||
|
||||
const params: {
|
||||
name: string,
|
||||
bid: string,
|
||||
channel_id?: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
claims: Array<string>,
|
||||
} = {
|
||||
name: options.name,
|
||||
bid: creditsToString(options.bid),
|
||||
title: options.title,
|
||||
thumbnail_url: options.thumbnail_url,
|
||||
description: options.description,
|
||||
tags: [],
|
||||
languages: options.languages || [],
|
||||
locations: [],
|
||||
blocking: true,
|
||||
claims: options.claims,
|
||||
};
|
||||
|
||||
if (options.tags) {
|
||||
params['tags'] = options.tags.map(tag => tag.name);
|
||||
}
|
||||
|
||||
if (options.channel_id) {
|
||||
params['channel_id'] = options.channel_id;
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_STARTED,
|
||||
});
|
||||
|
||||
function success(response) {
|
||||
const collectionClaim = response.outputs[0];
|
||||
dispatch(
|
||||
batchActions(
|
||||
{
|
||||
type: ACTIONS.COLLECTION_PUBLISH_COMPLETED,
|
||||
data: { claimId: collectionClaim.claim_id },
|
||||
},
|
||||
// move unpublished collection to pending collection with new publish id
|
||||
// recent publish won't resolve this second. handle it in checkPending
|
||||
{
|
||||
type: ACTIONS.UPDATE_PENDING_CLAIMS,
|
||||
data: {
|
||||
claims: [collectionClaim],
|
||||
},
|
||||
}
|
||||
)
|
||||
);
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PENDING,
|
||||
data: { localId: localId, claimId: collectionClaim.claim_id },
|
||||
});
|
||||
dispatch(doCheckPendingClaims());
|
||||
dispatch(doFetchCollectionListMine(1, 10));
|
||||
return resolve(collectionClaim);
|
||||
}
|
||||
|
||||
function failure(error) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_FAILED,
|
||||
data: {
|
||||
error: error.message,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return Lbry.collection_create(params).then(success, failure);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doCollectionPublishUpdate(
|
||||
options: {
|
||||
bid?: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
claim_id: string,
|
||||
tags?: Array<Tag>,
|
||||
languages?: Array<string>,
|
||||
claims?: Array<string>,
|
||||
channel_id?: string,
|
||||
},
|
||||
isBackgroundUpdate?: boolean
|
||||
) {
|
||||
return (dispatch: Dispatch, getState: GetState): Promise<any> => {
|
||||
// TODO: implement one click update
|
||||
|
||||
const updateParams: {
|
||||
bid?: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
channel_id?: string,
|
||||
description?: string,
|
||||
claim_id: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
claims?: Array<string>,
|
||||
clear_claims: boolean,
|
||||
replace?: boolean,
|
||||
} = isBackgroundUpdate
|
||||
? {
|
||||
blocking: true,
|
||||
claim_id: options.claim_id,
|
||||
clear_claims: true,
|
||||
}
|
||||
: {
|
||||
bid: creditsToString(options.bid),
|
||||
title: options.title,
|
||||
thumbnail_url: options.thumbnail_url,
|
||||
description: options.description,
|
||||
tags: [],
|
||||
languages: options.languages || [],
|
||||
locations: [],
|
||||
blocking: true,
|
||||
claim_id: options.claim_id,
|
||||
clear_claims: true,
|
||||
replace: true,
|
||||
};
|
||||
|
||||
if (isBackgroundUpdate && updateParams.claim_id) {
|
||||
const state = getState();
|
||||
updateParams['claims'] = makeSelectClaimIdsForCollectionId(updateParams.claim_id)(state);
|
||||
} else if (options.claims) {
|
||||
updateParams['claims'] = options.claims;
|
||||
}
|
||||
|
||||
if (options.tags) {
|
||||
updateParams['tags'] = options.tags.map(tag => tag.name);
|
||||
}
|
||||
|
||||
if (options.channel_id) {
|
||||
updateParams['channel_id'] = options.channel_id;
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_STARTED,
|
||||
});
|
||||
|
||||
function success(response) {
|
||||
const collectionClaim = response.outputs[0];
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_COMPLETED,
|
||||
data: {
|
||||
collectionClaim,
|
||||
},
|
||||
});
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PENDING,
|
||||
data: { claimId: collectionClaim.claim_id },
|
||||
});
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_PENDING_CLAIMS,
|
||||
data: {
|
||||
claims: [collectionClaim],
|
||||
},
|
||||
});
|
||||
dispatch(doCheckPendingClaims());
|
||||
return resolve(collectionClaim);
|
||||
}
|
||||
|
||||
function failure(error) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED,
|
||||
data: {
|
||||
error: error.message,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return Lbry.collection_update(updateParams).then(success, failure);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doCheckPublishNameAvailability(name: string) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.CHECK_PUBLISH_NAME_STARTED,
|
||||
});
|
||||
|
||||
return Lbry.claim_list({ name: name }).then(result => {
|
||||
dispatch({
|
||||
type: ACTIONS.CHECK_PUBLISH_NAME_COMPLETED,
|
||||
});
|
||||
if (result.items.length) {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED,
|
||||
data: {
|
||||
result,
|
||||
resolve: false,
|
||||
},
|
||||
});
|
||||
}
|
||||
return !(result && result.items && result.items.length);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doClearRepostError() {
|
||||
return {
|
||||
type: ACTIONS.CLEAR_REPOST_ERROR,
|
||||
};
|
||||
}
|
||||
|
||||
export function doPurchaseList(page: number = 1, pageSize: number = PAGE_SIZE) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.PURCHASE_LIST_STARTED,
|
||||
});
|
||||
|
||||
const success = (result: PurchaseListResponse) => {
|
||||
return dispatch({
|
||||
type: ACTIONS.PURCHASE_LIST_COMPLETED,
|
||||
data: {
|
||||
result,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const failure = error => {
|
||||
dispatch({
|
||||
type: ACTIONS.PURCHASE_LIST_FAILED,
|
||||
data: {
|
||||
error: error.message,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
Lbry.purchase_list({
|
||||
page: page,
|
||||
page_size: pageSize,
|
||||
resolve: true,
|
||||
}).then(success, failure);
|
||||
};
|
||||
}
|
||||
|
||||
export const doCheckPendingClaims = (onChannelConfirmed: Function) => (
|
||||
dispatch: Dispatch,
|
||||
getState: GetState
|
||||
) => {
|
||||
if (onChannelConfirmed) {
|
||||
onChannelConfirmCallback = onChannelConfirmed;
|
||||
}
|
||||
clearInterval(checkPendingInterval);
|
||||
const checkTxoList = () => {
|
||||
const state = getState();
|
||||
const pendingById = Object.assign({}, selectPendingClaimsById(state));
|
||||
const pendingTxos = (Object.values(pendingById): any).map(p => p.txid);
|
||||
// use collections
|
||||
const pendingCollections = selectPendingCollections(state);
|
||||
if (pendingTxos.length) {
|
||||
Lbry.txo_list({ txid: pendingTxos })
|
||||
.then(result => {
|
||||
const txos = result.items;
|
||||
const idsToConfirm = [];
|
||||
txos.forEach(txo => {
|
||||
if (txo.claim_id && txo.confirmations > 0) {
|
||||
idsToConfirm.push(txo.claim_id);
|
||||
delete pendingById[txo.claim_id];
|
||||
}
|
||||
});
|
||||
return { idsToConfirm, pendingById };
|
||||
})
|
||||
.then(results => {
|
||||
const { idsToConfirm, pendingById } = results;
|
||||
if (idsToConfirm.length) {
|
||||
return Lbry.claim_list({ claim_id: idsToConfirm, resolve: true }).then(results => {
|
||||
const claims = results.items;
|
||||
const collectionIds = claims
|
||||
.filter(c => c.value_type === 'collection')
|
||||
.map(c => c.claim_id);
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
|
||||
data: {
|
||||
claims: claims,
|
||||
pending: pendingById,
|
||||
},
|
||||
});
|
||||
if (collectionIds.length) {
|
||||
dispatch(
|
||||
doFetchItemsInCollections({
|
||||
collectionIds,
|
||||
})
|
||||
);
|
||||
}
|
||||
const channelClaims = claims.filter(claim => claim.value_type === 'channel');
|
||||
if (channelClaims.length && onChannelConfirmCallback) {
|
||||
channelClaims.forEach(claim => onChannelConfirmCallback(claim));
|
||||
}
|
||||
if (Object.keys(pendingById).length === 0) {
|
||||
clearInterval(checkPendingInterval);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
clearInterval(checkPendingInterval);
|
||||
}
|
||||
};
|
||||
// do something with onConfirmed (typically get blocklist for channel)
|
||||
checkPendingInterval = setInterval(() => {
|
||||
checkTxoList();
|
||||
}, 30000);
|
||||
};
|
||||
|
|
495
src/redux/actions/collections.js
Normal file
495
src/redux/actions/collections.js
Normal file
|
@ -0,0 +1,495 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import Lbry from 'lbry';
|
||||
import { doClaimSearch, doAbandonClaim } from 'redux/actions/claims';
|
||||
import { makeSelectClaimForClaimId } from 'redux/selectors/claims';
|
||||
import {
|
||||
makeSelectCollectionForId,
|
||||
// makeSelectPublishedCollectionForId, // for "save" or "copy" action
|
||||
makeSelectMyPublishedCollectionForId,
|
||||
makeSelectPublishedCollectionForId,
|
||||
makeSelectUnpublishedCollectionForId,
|
||||
makeSelectEditedCollectionForId,
|
||||
} from 'redux/selectors/collections';
|
||||
import * as COLS from 'constants/collections';
|
||||
|
||||
const getTimestamp = () => {
|
||||
return Math.floor(Date.now() / 1000);
|
||||
};
|
||||
|
||||
const FETCH_BATCH_SIZE = 50;
|
||||
|
||||
export const doLocalCollectionCreate = (
|
||||
name: string,
|
||||
collectionItems: Array<string>,
|
||||
type: string,
|
||||
sourceId: string
|
||||
) => (dispatch: Dispatch) => {
|
||||
return dispatch({
|
||||
type: ACTIONS.COLLECTION_NEW,
|
||||
data: {
|
||||
entry: {
|
||||
id: uuid(), // start with a uuid, this becomes a claimId after publish
|
||||
name: name,
|
||||
updatedAt: getTimestamp(),
|
||||
items: collectionItems || [],
|
||||
sourceId: sourceId,
|
||||
type: type,
|
||||
},
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const doCollectionDelete = (id: string, colKey: ?string = undefined) => (
|
||||
dispatch: Dispatch,
|
||||
getState: GetState
|
||||
) => {
|
||||
const state = getState();
|
||||
const claim = makeSelectClaimForClaimId(id)(state);
|
||||
const collectionDelete = () =>
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_DELETE,
|
||||
data: {
|
||||
id: id,
|
||||
collectionKey: colKey,
|
||||
},
|
||||
});
|
||||
if (claim && !colKey) {
|
||||
// could support "abandon, but keep" later
|
||||
const { txid, nout } = claim;
|
||||
return dispatch(doAbandonClaim(txid, nout, collectionDelete));
|
||||
}
|
||||
return collectionDelete();
|
||||
};
|
||||
|
||||
// Given a collection, save its collectionId to be resolved and displayed in Library
|
||||
// export const doCollectionSave = (
|
||||
// id: string,
|
||||
// ) => (dispatch: Dispatch) => {
|
||||
// return dispatch({
|
||||
// type: ACTIONS.COLLECTION_SAVE,
|
||||
// data: {
|
||||
// id: id,
|
||||
// },
|
||||
// });
|
||||
// };
|
||||
|
||||
// Given a collection and name, copy it to a local private collection with a name
|
||||
// export const doCollectionCopy = (
|
||||
// id: string,
|
||||
// ) => (dispatch: Dispatch) => {
|
||||
// return dispatch({
|
||||
// type: ACTIONS.COLLECTION_COPY,
|
||||
// data: {
|
||||
// id: id,
|
||||
// },
|
||||
// });
|
||||
// };
|
||||
|
||||
export const doFetchItemsInCollections = (
|
||||
resolveItemsOptions: {
|
||||
collectionIds: Array<string>,
|
||||
pageSize?: number,
|
||||
},
|
||||
resolveStartedCallback?: () => void
|
||||
) => async(dispatch: Dispatch, getState: GetState) => {
|
||||
/*
|
||||
1) make sure all the collection claims are loaded into claims reducer, search/resolve if necessary.
|
||||
2) get the item claims for each
|
||||
3) format and make sure they're in the order as in the claim
|
||||
4) Build the collection objects and update collections reducer
|
||||
5) Update redux claims reducer
|
||||
*/
|
||||
let state = getState();
|
||||
const { collectionIds, pageSize } = resolveItemsOptions;
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_ITEMS_RESOLVE_STARTED,
|
||||
data: { ids: collectionIds },
|
||||
});
|
||||
|
||||
if (resolveStartedCallback) resolveStartedCallback();
|
||||
|
||||
const collectionIdsToSearch = collectionIds.filter(claimId => !state.claims.byId[claimId]);
|
||||
|
||||
if (collectionIdsToSearch.length) {
|
||||
await dispatch(doClaimSearch({ claim_ids: collectionIdsToSearch, page: 1, page_size: 9999 }));
|
||||
}
|
||||
|
||||
const stateAfterClaimSearch = getState();
|
||||
|
||||
async function fetchItemsForCollectionClaim(claim: CollectionClaim, pageSize?: number) {
|
||||
const totalItems = claim.value.claims && claim.value.claims.length;
|
||||
const claimId = claim.claim_id;
|
||||
const itemOrder = claim.value.claims;
|
||||
|
||||
const sortResults = (items: Array<Claim>, claimList) => {
|
||||
const newItems: Array<Claim> = [];
|
||||
claimList.forEach(id => {
|
||||
const index = items.findIndex(i => i.claim_id === id);
|
||||
if (index >= 0) {
|
||||
newItems.push(items[index]);
|
||||
}
|
||||
});
|
||||
/*
|
||||
This will return newItems[] of length less than total_items below
|
||||
if one or more of the claims has been abandoned. That's ok for now.
|
||||
*/
|
||||
return newItems;
|
||||
};
|
||||
|
||||
const mergeBatches = (
|
||||
arrayOfResults: Array<{ items: Array<Claim>, total_items: number }>,
|
||||
claimList: Array<string>
|
||||
) => {
|
||||
const mergedResults: { items: Array<Claim>, total_items: number } = {
|
||||
items: [],
|
||||
total_items: 0,
|
||||
};
|
||||
arrayOfResults.forEach(result => {
|
||||
mergedResults.items = mergedResults.items.concat(result.items);
|
||||
mergedResults.total_items = result.total_items;
|
||||
});
|
||||
|
||||
mergedResults.items = sortResults(mergedResults.items, claimList);
|
||||
return mergedResults;
|
||||
};
|
||||
|
||||
try {
|
||||
const batchSize = pageSize || FETCH_BATCH_SIZE;
|
||||
const batches: Array<Promise<any>> = [];
|
||||
|
||||
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
|
||||
batches[i] = Lbry.claim_search({
|
||||
claim_ids: claim.value.claims,
|
||||
page: i + 1,
|
||||
page_size: batchSize,
|
||||
no_totals: true,
|
||||
});
|
||||
}
|
||||
const itemsInBatches = await Promise.all(batches);
|
||||
const result = mergeBatches(itemsInBatches, itemOrder);
|
||||
|
||||
// $FlowFixMe
|
||||
const itemsById: { claimId: string, items?: ?Array<GenericClaim> } = { claimId: claimId };
|
||||
if (result.items) {
|
||||
itemsById.items = result.items;
|
||||
} else {
|
||||
itemsById.items = null;
|
||||
}
|
||||
return itemsById;
|
||||
} catch (e) {
|
||||
return {
|
||||
claimId: claimId,
|
||||
items: null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function formatForClaimActions(resultClaimsByUri) {
|
||||
const formattedClaims = {};
|
||||
Object.entries(resultClaimsByUri).forEach(([uri, uriResolveInfo]) => {
|
||||
// Flow has terrible Object.entries support
|
||||
// https://github.com/facebook/flow/issues/2221
|
||||
if (uriResolveInfo) {
|
||||
let result = {};
|
||||
if (uriResolveInfo.value_type === 'channel') {
|
||||
result.channel = uriResolveInfo;
|
||||
// $FlowFixMe
|
||||
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
|
||||
// ALSO SKIP COLLECTIONS
|
||||
} else if (uriResolveInfo.value_type === 'collection') {
|
||||
result.collection = uriResolveInfo;
|
||||
} else {
|
||||
result.stream = uriResolveInfo;
|
||||
if (uriResolveInfo.signing_channel) {
|
||||
result.channel = uriResolveInfo.signing_channel;
|
||||
result.claimsInChannel =
|
||||
(uriResolveInfo.signing_channel.meta &&
|
||||
uriResolveInfo.signing_channel.meta.claims_in_channel) ||
|
||||
0;
|
||||
}
|
||||
}
|
||||
// $FlowFixMe
|
||||
formattedClaims[uri] = result;
|
||||
}
|
||||
});
|
||||
return formattedClaims;
|
||||
}
|
||||
|
||||
const invalidCollectionIds = [];
|
||||
const promisedCollectionItemFetches = [];
|
||||
collectionIds.forEach(collectionId => {
|
||||
const claim = makeSelectClaimForClaimId(collectionId)(stateAfterClaimSearch);
|
||||
if (!claim) {
|
||||
invalidCollectionIds.push(collectionId);
|
||||
} else {
|
||||
promisedCollectionItemFetches.push(fetchItemsForCollectionClaim(claim, pageSize));
|
||||
}
|
||||
});
|
||||
|
||||
// $FlowFixMe
|
||||
const collectionItemsById: Array<{
|
||||
claimId: string,
|
||||
items: ?Array<GenericClaim>,
|
||||
}> = await Promise.all(promisedCollectionItemFetches);
|
||||
|
||||
const newCollectionObjectsById = {};
|
||||
const resolvedItemsByUrl = {};
|
||||
collectionItemsById.forEach(entry => {
|
||||
// $FlowFixMe
|
||||
const collectionItems: Array<any> = entry.items;
|
||||
const collectionId = entry.claimId;
|
||||
if (collectionItems) {
|
||||
const claim = makeSelectClaimForClaimId(collectionId)(stateAfterClaimSearch);
|
||||
|
||||
const editedCollection = makeSelectEditedCollectionForId(collectionId)(stateAfterClaimSearch);
|
||||
const { name, timestamp, value } = claim || {};
|
||||
const { title } = value;
|
||||
const valueTypes = new Set();
|
||||
const streamTypes = new Set();
|
||||
|
||||
let newItems = [];
|
||||
let isPlaylist;
|
||||
|
||||
if (collectionItems) {
|
||||
collectionItems.forEach(collectionItem => {
|
||||
newItems.push(collectionItem.permanent_url);
|
||||
valueTypes.add(collectionItem.value_type);
|
||||
if (collectionItem.value.stream_type) {
|
||||
streamTypes.add(collectionItem.value.stream_type);
|
||||
}
|
||||
resolvedItemsByUrl[collectionItem.canonical_url] = collectionItem;
|
||||
});
|
||||
isPlaylist =
|
||||
valueTypes.size === 1 &&
|
||||
valueTypes.has('stream') &&
|
||||
((streamTypes.size === 1 && (streamTypes.has('audio') || streamTypes.has('video'))) ||
|
||||
(streamTypes.size === 2 && (streamTypes.has('audio') && streamTypes.has('video'))));
|
||||
}
|
||||
|
||||
newCollectionObjectsById[collectionId] = {
|
||||
items: newItems,
|
||||
id: collectionId,
|
||||
name: title || name,
|
||||
itemCount: claim.value.claims.length,
|
||||
type: isPlaylist ? 'playlist' : 'collection',
|
||||
updatedAt: timestamp,
|
||||
};
|
||||
|
||||
if (editedCollection && timestamp > editedCollection['updatedAt']) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_DELETE,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'edited',
|
||||
},
|
||||
});
|
||||
}
|
||||
} else {
|
||||
invalidCollectionIds.push(collectionId);
|
||||
}
|
||||
});
|
||||
const formattedClaimsByUri = formatForClaimActions(collectionItemsById);
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.RESOLVE_URIS_COMPLETED,
|
||||
data: { resolveInfo: formattedClaimsByUri },
|
||||
});
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_ITEMS_RESOLVE_COMPLETED,
|
||||
data: {
|
||||
resolvedCollections: newCollectionObjectsById,
|
||||
failedCollectionIds: invalidCollectionIds,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const doFetchItemsInCollection = (
|
||||
options: { collectionId: string, pageSize?: number },
|
||||
cb?: () => void
|
||||
) => {
|
||||
const { collectionId, pageSize } = options;
|
||||
const newOptions: { collectionIds: Array<string>, pageSize?: number } = {
|
||||
collectionIds: [collectionId],
|
||||
};
|
||||
if (pageSize) newOptions.pageSize = pageSize;
|
||||
return doFetchItemsInCollections(newOptions, cb);
|
||||
};
|
||||
|
||||
export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => async(
|
||||
dispatch: Dispatch,
|
||||
getState: GetState
|
||||
) => {
|
||||
const state = getState();
|
||||
const collection: Collection = makeSelectCollectionForId(collectionId)(state);
|
||||
const editedCollection: Collection = makeSelectEditedCollectionForId(collectionId)(state);
|
||||
const unpublishedCollection: Collection = makeSelectUnpublishedCollectionForId(collectionId)(
|
||||
state
|
||||
);
|
||||
const publishedCollection: Collection = makeSelectPublishedCollectionForId(collectionId)(state); // needs to be published only
|
||||
|
||||
const generateCollectionItemsFromSearchResult = results => {
|
||||
return (
|
||||
Object.values(results)
|
||||
// $FlowFixMe
|
||||
.reduce(
|
||||
(
|
||||
acc,
|
||||
cur: {
|
||||
stream: ?StreamClaim,
|
||||
channel: ?ChannelClaim,
|
||||
claimsInChannel: ?number,
|
||||
collection: ?CollectionClaim,
|
||||
}
|
||||
) => {
|
||||
let url;
|
||||
if (cur.stream) {
|
||||
url = cur.stream.permanent_url;
|
||||
} else if (cur.channel) {
|
||||
url = cur.channel.permanent_url;
|
||||
} else if (cur.collection) {
|
||||
url = cur.collection.permanent_url;
|
||||
} else {
|
||||
return acc;
|
||||
}
|
||||
acc.push(url);
|
||||
return acc;
|
||||
},
|
||||
[]
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
if (!collection) {
|
||||
return dispatch({
|
||||
type: ACTIONS.COLLECTION_ERROR,
|
||||
data: {
|
||||
message: 'collection does not exist',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
let currentItems = collection.items ? collection.items.concat() : [];
|
||||
const { claims: passedClaims, order, claimIds, replace, remove, type } = params;
|
||||
|
||||
const collectionType = type || collection.type;
|
||||
let newItems: Array<?string> = currentItems;
|
||||
|
||||
if (passedClaims) {
|
||||
if (remove) {
|
||||
const passedUrls = passedClaims.map(claim => claim.permanent_url);
|
||||
// $FlowFixMe // need this?
|
||||
newItems = currentItems.filter((item: string) => !passedUrls.includes(item));
|
||||
} else {
|
||||
passedClaims.forEach(claim => newItems.push(claim.permanent_url));
|
||||
}
|
||||
}
|
||||
|
||||
if (claimIds) {
|
||||
const batches = [];
|
||||
if (claimIds.length > 50) {
|
||||
for (let i = 0; i < Math.ceil(claimIds.length / 50); i++) {
|
||||
batches[i] = claimIds.slice(i * 50, (i + 1) * 50);
|
||||
}
|
||||
} else {
|
||||
batches[0] = claimIds;
|
||||
}
|
||||
const resultArray = await Promise.all(
|
||||
batches.map(batch => {
|
||||
let options = { claim_ids: batch, page: 1, page_size: 50 };
|
||||
return dispatch(doClaimSearch(options));
|
||||
})
|
||||
);
|
||||
|
||||
const searchResults = Object.assign({}, ...resultArray);
|
||||
|
||||
if (replace) {
|
||||
newItems = generateCollectionItemsFromSearchResult(searchResults);
|
||||
} else {
|
||||
newItems = currentItems.concat(generateCollectionItemsFromSearchResult(searchResults));
|
||||
}
|
||||
}
|
||||
|
||||
if (order) {
|
||||
const [movedItem] = currentItems.splice(order.from, 1);
|
||||
currentItems.splice(order.to, 0, movedItem);
|
||||
}
|
||||
|
||||
// console.log('p&e', publishedCollection.items, newItems, publishedCollection.items.join(','), newItems.join(','))
|
||||
if (editedCollection) {
|
||||
// delete edited if newItems are the same as publishedItems
|
||||
if (publishedCollection.items.join(',') === newItems.join(',')) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_DELETE,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'edited',
|
||||
},
|
||||
});
|
||||
} else {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_EDIT,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'edited',
|
||||
collection: {
|
||||
items: newItems,
|
||||
id: collectionId,
|
||||
name: params.name || collection.name,
|
||||
updatedAt: getTimestamp(),
|
||||
type: collectionType,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
} else if (publishedCollection) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_EDIT,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'edited',
|
||||
collection: {
|
||||
items: newItems,
|
||||
id: collectionId,
|
||||
name: params.name || collection.name,
|
||||
updatedAt: getTimestamp(),
|
||||
type: collectionType,
|
||||
},
|
||||
},
|
||||
});
|
||||
} else if (COLS.BUILTIN_LISTS.includes(collectionId)) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_EDIT,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'builtin',
|
||||
collection: {
|
||||
items: newItems,
|
||||
id: collectionId,
|
||||
name: params.name || collection.name,
|
||||
updatedAt: getTimestamp(),
|
||||
type: collectionType,
|
||||
},
|
||||
},
|
||||
});
|
||||
} else if (unpublishedCollection) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_EDIT,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'unpublished',
|
||||
collection: {
|
||||
items: newItems,
|
||||
id: collectionId,
|
||||
name: params.name || collection.name,
|
||||
updatedAt: getTimestamp(),
|
||||
type: collectionType,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
return true;
|
||||
};
|
|
@ -1,209 +0,0 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import Lbry from 'lbry';
|
||||
import { selectClaimsByUri, selectMyChannelClaims } from 'redux/selectors/claims';
|
||||
import { doToast } from 'redux/actions/notifications';
|
||||
|
||||
export function doCommentList(uri: string, page: number = 1, pageSize: number = 99999) {
|
||||
return (dispatch: Dispatch, getState: GetState) => {
|
||||
const state = getState();
|
||||
const claim = selectClaimsByUri(state)[uri];
|
||||
const claimId = claim ? claim.claim_id : null;
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_LIST_STARTED,
|
||||
});
|
||||
Lbry.comment_list({
|
||||
claim_id: claimId,
|
||||
page,
|
||||
page_size: pageSize,
|
||||
})
|
||||
.then((result: CommentListResponse) => {
|
||||
const { items: comments } = result;
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_LIST_COMPLETED,
|
||||
data: {
|
||||
comments,
|
||||
claimId: claimId,
|
||||
uri: uri,
|
||||
},
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
console.log(error);
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_LIST_FAILED,
|
||||
data: error,
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doCommentCreate(
|
||||
comment: string = '',
|
||||
claim_id: string = '',
|
||||
channel: ?string,
|
||||
parent_id?: string
|
||||
) {
|
||||
return (dispatch: Dispatch, getState: GetState) => {
|
||||
const state = getState();
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_CREATE_STARTED,
|
||||
});
|
||||
const myChannels = selectMyChannelClaims(state);
|
||||
const namedChannelClaim =
|
||||
myChannels && myChannels.find(myChannel => myChannel.name === channel);
|
||||
const channel_id = namedChannelClaim ? namedChannelClaim.claim_id : null;
|
||||
return Lbry.comment_create({
|
||||
comment: comment,
|
||||
claim_id: claim_id,
|
||||
channel_id: channel_id,
|
||||
parent_id: parent_id,
|
||||
})
|
||||
.then((result: CommentCreateResponse) => {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_CREATE_COMPLETED,
|
||||
data: {
|
||||
comment: result,
|
||||
claimId: claim_id,
|
||||
},
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_CREATE_FAILED,
|
||||
data: error,
|
||||
});
|
||||
dispatch(
|
||||
doToast({
|
||||
message: 'Unable to create comment, please try again later.',
|
||||
isError: true,
|
||||
})
|
||||
);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doCommentHide(comment_id: string) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_HIDE_STARTED,
|
||||
});
|
||||
return Lbry.comment_hide({
|
||||
comment_ids: [comment_id],
|
||||
})
|
||||
.then((result: CommentHideResponse) => {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_HIDE_COMPLETED,
|
||||
data: result,
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_HIDE_FAILED,
|
||||
data: error,
|
||||
});
|
||||
dispatch(
|
||||
doToast({
|
||||
message: 'Unable to hide this comment, please try again later.',
|
||||
isError: true,
|
||||
})
|
||||
);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doCommentAbandon(comment_id: string) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_ABANDON_STARTED,
|
||||
});
|
||||
return Lbry.comment_abandon({
|
||||
comment_id: comment_id,
|
||||
})
|
||||
.then((result: CommentAbandonResponse) => {
|
||||
// Comment may not be deleted if the signing channel can't be signed.
|
||||
// This will happen if the channel was recently created or abandoned.
|
||||
if (result.abandoned) {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_ABANDON_COMPLETED,
|
||||
data: {
|
||||
comment_id: comment_id,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_ABANDON_FAILED,
|
||||
});
|
||||
dispatch(
|
||||
doToast({
|
||||
message: 'Your channel is still being setup, try again in a few moments.',
|
||||
isError: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_ABANDON_FAILED,
|
||||
data: error,
|
||||
});
|
||||
dispatch(
|
||||
doToast({
|
||||
message: 'Unable to delete this comment, please try again later.',
|
||||
isError: true,
|
||||
})
|
||||
);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doCommentUpdate(comment_id: string, comment: string) {
|
||||
// if they provided an empty string, they must have wanted to abandon
|
||||
if (comment === '') {
|
||||
return doCommentAbandon(comment_id);
|
||||
} else {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_UPDATE_STARTED,
|
||||
});
|
||||
return Lbry.comment_update({
|
||||
comment_id: comment_id,
|
||||
comment: comment,
|
||||
})
|
||||
.then((result: CommentUpdateResponse) => {
|
||||
if (result != null) {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_UPDATE_COMPLETED,
|
||||
data: {
|
||||
comment: result,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
// the result will return null
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_UPDATE_FAILED,
|
||||
});
|
||||
dispatch(
|
||||
doToast({
|
||||
message: 'Your channel is still being setup, try again in a few moments.',
|
||||
isError: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
dispatch({
|
||||
type: ACTIONS.COMMENT_UPDATE_FAILED,
|
||||
data: error,
|
||||
});
|
||||
dispatch(
|
||||
doToast({
|
||||
message: 'Unable to edit this comment, please try again later.',
|
||||
isError: true,
|
||||
})
|
||||
);
|
||||
});
|
||||
};
|
||||
}
|
||||
}
|
|
@ -3,8 +3,11 @@ import * as ACTIONS from 'constants/action_types';
|
|||
import Lbry from 'lbry';
|
||||
import { doToast } from 'redux/actions/notifications';
|
||||
import { selectBalance } from 'redux/selectors/wallet';
|
||||
import { makeSelectFileInfoForUri, selectDownloadingByOutpoint } from 'redux/selectors/file_info';
|
||||
import { makeSelectStreamingUrlForUri } from 'redux/selectors/file';
|
||||
import {
|
||||
makeSelectFileInfoForUri,
|
||||
selectDownloadingByOutpoint,
|
||||
makeSelectStreamingUrlForUri,
|
||||
} from 'redux/selectors/file_info';
|
||||
import { makeSelectClaimForUri } from 'redux/selectors/claims';
|
||||
|
||||
type Dispatch = (action: any) => any;
|
||||
|
@ -28,7 +31,6 @@ export function doFileGet(uri: string, saveFile: boolean = true, onSuccess?: Get
|
|||
.then((streamInfo: GetResponse) => {
|
||||
const timeout =
|
||||
streamInfo === null || typeof streamInfo !== 'object' || streamInfo.error === 'Timeout';
|
||||
|
||||
if (timeout) {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_FILE_INFO_FAILED,
|
||||
|
@ -37,16 +39,17 @@ export function doFileGet(uri: string, saveFile: boolean = true, onSuccess?: Get
|
|||
|
||||
dispatch(doToast({ message: `File timeout for uri ${uri}`, isError: true }));
|
||||
} else {
|
||||
// purchase was completed successfully
|
||||
dispatch({
|
||||
type: ACTIONS.PURCHASE_URI_COMPLETED,
|
||||
data: { uri },
|
||||
});
|
||||
if (streamInfo.purchase_receipt || streamInfo.content_fee) {
|
||||
dispatch({
|
||||
type: ACTIONS.PURCHASE_URI_COMPLETED,
|
||||
data: { uri, purchaseReceipt: streamInfo.purchase_receipt || streamInfo.content_fee },
|
||||
});
|
||||
}
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_FILE_INFO_COMPLETED,
|
||||
data: {
|
||||
fileInfo: streamInfo,
|
||||
outpoint: streamInfo.outpoint,
|
||||
outpoint: outpoint,
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -55,10 +58,10 @@ export function doFileGet(uri: string, saveFile: boolean = true, onSuccess?: Get
|
|||
}
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
.catch(error => {
|
||||
dispatch({
|
||||
type: ACTIONS.PURCHASE_URI_FAILED,
|
||||
data: { uri },
|
||||
data: { uri, error },
|
||||
});
|
||||
|
||||
dispatch({
|
||||
|
@ -101,7 +104,10 @@ export function doPurchaseUri(
|
|||
data: { uri, error: `Already fetching uri: ${uri}` },
|
||||
});
|
||||
|
||||
Promise.resolve();
|
||||
if (onSuccess) {
|
||||
onSuccess(fileInfo);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -120,9 +126,8 @@ export function doPurchaseUri(
|
|||
};
|
||||
}
|
||||
|
||||
export function doDeletePurchasedUri(uri: string) {
|
||||
export function doClearPurchasedUriSuccess() {
|
||||
return {
|
||||
type: ACTIONS.DELETE_PURCHASED_URI,
|
||||
data: { uri },
|
||||
type: ACTIONS.CLEAR_PURCHASED_URI_SUCCESS,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import * as ACTIONS from 'constants/action_types';
|
||||
import Lbry from 'lbry';
|
||||
import { doFetchClaimListMine } from 'redux/actions/claims';
|
||||
import { selectClaimsByUri, selectIsFetchingClaimListMine } from 'redux/selectors/claims';
|
||||
import { selectClaimsByUri } from 'redux/selectors/claims';
|
||||
import { selectIsFetchingFileList, selectUrisLoading } from 'redux/selectors/file_info';
|
||||
|
||||
export function doFetchFileInfo(uri) {
|
||||
|
@ -58,13 +57,10 @@ export function doFileList(page = 1, pageSize = 99999) {
|
|||
};
|
||||
}
|
||||
|
||||
export function doFetchFileInfosAndPublishedClaims() {
|
||||
export function doFetchFileInfos() {
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const isFetchingClaimListMine = selectIsFetchingClaimListMine(state);
|
||||
const isFetchingFileInfo = selectIsFetchingFileList(state);
|
||||
|
||||
if (!isFetchingClaimListMine) dispatch(doFetchClaimListMine());
|
||||
if (!isFetchingFileInfo) dispatch(doFileList());
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import uuid from 'uuid/v4';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
export function doToast(params: ToastParams) {
|
||||
if (!params) {
|
||||
|
|
|
@ -4,14 +4,15 @@ import { SPEECH_STATUS, SPEECH_PUBLISH } from 'constants/speech_urls';
|
|||
import * as ACTIONS from 'constants/action_types';
|
||||
import * as THUMBNAIL_STATUSES from 'constants/thumbnail_upload_statuses';
|
||||
import Lbry from 'lbry';
|
||||
import LbryFirst from 'lbry-first';
|
||||
import { batchActions } from 'util/batch-actions';
|
||||
import { creditsToString } from 'util/format-credits';
|
||||
import { doError } from 'redux/actions/notifications';
|
||||
import { isClaimNsfw } from 'util/claim';
|
||||
import {
|
||||
selectMyChannelClaims,
|
||||
selectPendingById,
|
||||
selectMyClaimsWithoutChannels,
|
||||
selectReflectingById,
|
||||
} from 'redux/selectors/claims';
|
||||
import { selectPublishFormValues, selectMyClaimForUri } from 'redux/selectors/publish';
|
||||
|
||||
|
@ -20,6 +21,7 @@ export const doResetThumbnailStatus = () => (dispatch: Dispatch) => {
|
|||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
thumbnailPath: '',
|
||||
thumbnailError: undefined,
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -67,8 +69,10 @@ export const doUploadThumbnail = (
|
|||
thumbnailBlob?: File,
|
||||
fsAdapter?: any,
|
||||
fs?: any,
|
||||
path?: any
|
||||
path?: any,
|
||||
cb?: (string) => void
|
||||
) => (dispatch: Dispatch) => {
|
||||
const downMessage = __('Thumbnail upload service may be down, try again later.');
|
||||
let thumbnail, fileExt, fileName, fileType;
|
||||
|
||||
const makeid = () => {
|
||||
|
@ -94,6 +98,45 @@ export const doUploadThumbnail = (
|
|||
);
|
||||
};
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
thumbnailError: undefined,
|
||||
},
|
||||
});
|
||||
|
||||
const doUpload = data => {
|
||||
return fetch(SPEECH_PUBLISH, {
|
||||
method: 'POST',
|
||||
body: data,
|
||||
})
|
||||
.then(res => res.text())
|
||||
.then(text => (text.length ? JSON.parse(text) : {}))
|
||||
.then(json => {
|
||||
if (!json.success) return uploadError(json.message || downMessage);
|
||||
if (cb) {
|
||||
cb(json.data.serveUrl);
|
||||
}
|
||||
return dispatch({
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
|
||||
thumbnail: json.data.serveUrl,
|
||||
},
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
let message = err.message;
|
||||
|
||||
// This sucks but ¯\_(ツ)_/¯
|
||||
if (message === 'Failed to fetch') {
|
||||
message = downMessage;
|
||||
}
|
||||
|
||||
uploadError(message);
|
||||
});
|
||||
};
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: { uploadThumbnailStatus: THUMBNAIL_STATUSES.IN_PROGRESS },
|
||||
|
@ -110,24 +153,7 @@ export const doUploadThumbnail = (
|
|||
data.append('name', name);
|
||||
// $FlowFixMe
|
||||
data.append('file', { uri: 'file://' + filePath, type: fileType, name: fileName });
|
||||
|
||||
return fetch(SPEECH_PUBLISH, {
|
||||
method: 'POST',
|
||||
body: data,
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(json =>
|
||||
json.success
|
||||
? dispatch({
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
|
||||
thumbnail: `${json.data.url}.${fileExt}`,
|
||||
},
|
||||
})
|
||||
: uploadError(json.message)
|
||||
)
|
||||
.catch(err => uploadError(err.message));
|
||||
return doUpload(data);
|
||||
});
|
||||
} else {
|
||||
if (filePath && fs && path) {
|
||||
|
@ -150,24 +176,7 @@ export const doUploadThumbnail = (
|
|||
data.append('name', name);
|
||||
// $FlowFixMe
|
||||
data.append('file', file);
|
||||
|
||||
return fetch(SPEECH_PUBLISH, {
|
||||
method: 'POST',
|
||||
body: data,
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(json =>
|
||||
json.success
|
||||
? dispatch({
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
|
||||
thumbnail: `${json.data.url}${fileExt}`,
|
||||
},
|
||||
})
|
||||
: uploadError(json.message)
|
||||
)
|
||||
.catch(err => uploadError(err.message));
|
||||
return doUpload(data);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -186,6 +195,7 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
|
|||
currency: 'LBC',
|
||||
},
|
||||
languages,
|
||||
release_time,
|
||||
license,
|
||||
license_url: licenseUrl,
|
||||
thumbnail,
|
||||
|
@ -201,6 +211,8 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
|
|||
description,
|
||||
fee,
|
||||
languages,
|
||||
releaseTime: release_time,
|
||||
releaseTimeEdited: undefined,
|
||||
thumbnail: thumbnail ? thumbnail.url : null,
|
||||
title,
|
||||
uri,
|
||||
|
@ -232,11 +244,13 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
|
|||
dispatch({ type: ACTIONS.DO_PREPARE_EDIT, data: publishData });
|
||||
};
|
||||
|
||||
export const doPublish = (success: Function, fail: Function) => (
|
||||
export const doPublish = (success: Function, fail: Function, preview: Function) => (
|
||||
dispatch: Dispatch,
|
||||
getState: () => {}
|
||||
) => {
|
||||
dispatch({ type: ACTIONS.PUBLISH_START });
|
||||
if (!preview) {
|
||||
dispatch({ type: ACTIONS.PUBLISH_START });
|
||||
}
|
||||
|
||||
const state = getState();
|
||||
const myClaimForUri = selectMyClaimForUri(state);
|
||||
|
@ -252,8 +266,10 @@ export const doPublish = (success: Function, fail: Function) => (
|
|||
filePath,
|
||||
description,
|
||||
language,
|
||||
releaseTimeEdited,
|
||||
license,
|
||||
licenseUrl,
|
||||
useLBRYUploader,
|
||||
licenseType,
|
||||
otherLicenseDescription,
|
||||
thumbnail,
|
||||
|
@ -264,7 +280,11 @@ export const doPublish = (success: Function, fail: Function) => (
|
|||
uri,
|
||||
tags,
|
||||
locations,
|
||||
optimize,
|
||||
isLivestreamPublish,
|
||||
remoteFileUrl,
|
||||
} = publishData;
|
||||
|
||||
// Handle scenario where we have a claim that has the same name as a channel we are publishing with.
|
||||
const myClaimForUriEditing = myClaimForUri && myClaimForUri.name === name ? myClaimForUri : null;
|
||||
|
||||
|
@ -290,7 +310,6 @@ export const doPublish = (success: Function, fail: Function) => (
|
|||
description?: string,
|
||||
channel_id?: string,
|
||||
file_path?: string,
|
||||
|
||||
license_url?: string,
|
||||
license?: string,
|
||||
thumbnail_url?: string,
|
||||
|
@ -301,6 +320,9 @@ export const doPublish = (success: Function, fail: Function) => (
|
|||
tags: Array<string>,
|
||||
locations?: Array<any>,
|
||||
blocking: boolean,
|
||||
optimize_file?: boolean,
|
||||
preview?: boolean,
|
||||
remote_url?: string,
|
||||
} = {
|
||||
name,
|
||||
title,
|
||||
|
@ -311,10 +333,14 @@ export const doPublish = (success: Function, fail: Function) => (
|
|||
tags: tags && tags.map(tag => tag.name),
|
||||
thumbnail_url: thumbnail,
|
||||
blocking: true,
|
||||
preview: false,
|
||||
};
|
||||
// Temporary solution to keep the same publish flow with the new tags api
|
||||
// Eventually we will allow users to enter their own tags on publish
|
||||
// `nsfw` will probably be removed
|
||||
if (remoteFileUrl) {
|
||||
publishPayload.remote_url = remoteFileUrl;
|
||||
}
|
||||
|
||||
if (publishingLicense) {
|
||||
publishPayload.license = publishingLicense;
|
||||
|
@ -328,8 +354,14 @@ export const doPublish = (success: Function, fail: Function) => (
|
|||
publishPayload.thumbnail_url = thumbnail;
|
||||
}
|
||||
|
||||
if (useLBRYUploader) {
|
||||
publishPayload.tags.push('lbry-first');
|
||||
}
|
||||
|
||||
// Set release time to curret date. On edits, keep original release/transaction time as release_time
|
||||
if (myClaimForUriEditing && myClaimForUriEditing.value.release_time) {
|
||||
if (releaseTimeEdited) {
|
||||
publishPayload.release_time = releaseTimeEdited;
|
||||
} else if (myClaimForUriEditing && myClaimForUriEditing.value.release_time) {
|
||||
publishPayload.release_time = Number(myClaimForUri.value.release_time);
|
||||
} else if (myClaimForUriEditing && myClaimForUriEditing.timestamp) {
|
||||
publishPayload.release_time = Number(myClaimForUriEditing.timestamp);
|
||||
|
@ -350,55 +382,113 @@ export const doPublish = (success: Function, fail: Function) => (
|
|||
publishPayload.fee_amount = creditsToString(fee.amount);
|
||||
}
|
||||
|
||||
// Only pass file on new uploads, not metadata only edits.
|
||||
// The sdk will figure it out
|
||||
if (filePath) publishPayload.file_path = filePath;
|
||||
|
||||
return Lbry.publish(publishPayload).then(success, fail);
|
||||
};
|
||||
|
||||
// Calls claim_list_mine until any pending publishes are confirmed
|
||||
export const doCheckPendingPublishes = (onConfirmed: Function) => (
|
||||
dispatch: Dispatch,
|
||||
getState: GetState
|
||||
) => {
|
||||
const state = getState();
|
||||
const pendingById = selectPendingById(state);
|
||||
|
||||
if (!Object.keys(pendingById).length) {
|
||||
return;
|
||||
if (optimize) {
|
||||
publishPayload.optimize_file = true;
|
||||
}
|
||||
|
||||
let publishCheckInterval;
|
||||
// Only pass file on new uploads, not metadata only edits.
|
||||
// The sdk will figure it out
|
||||
if (filePath && !isLivestreamPublish) publishPayload.file_path = filePath;
|
||||
|
||||
const checkFileList = () => {
|
||||
Lbry.stream_list({ page: 1, page_size: 10 }).then(result => {
|
||||
const claims = result.items;
|
||||
if (preview) {
|
||||
publishPayload.preview = true;
|
||||
publishPayload.optimize_file = false;
|
||||
|
||||
claims.forEach(claim => {
|
||||
// If it's confirmed, check if it was pending previously
|
||||
if (claim.confirmations > 0 && pendingById[claim.claim_id]) {
|
||||
delete pendingById[claim.claim_id];
|
||||
if (onConfirmed) {
|
||||
onConfirmed(claim);
|
||||
}
|
||||
}
|
||||
return Lbry.publish(publishPayload).then((previewResponse: PublishResponse) => {
|
||||
return preview(previewResponse);
|
||||
}, fail);
|
||||
}
|
||||
|
||||
return Lbry.publish(publishPayload).then((response: PublishResponse) => {
|
||||
if (!useLBRYUploader) {
|
||||
return success(response);
|
||||
}
|
||||
|
||||
// $FlowFixMe
|
||||
publishPayload.permanent_url = response.outputs[0].permanent_url;
|
||||
|
||||
return LbryFirst.upload(publishPayload)
|
||||
.then(() => {
|
||||
// Return original publish response so app treats it like a normal publish
|
||||
return success(response);
|
||||
})
|
||||
.catch(error => {
|
||||
return success(response, error);
|
||||
});
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED,
|
||||
data: {
|
||||
claims,
|
||||
},
|
||||
});
|
||||
|
||||
if (!Object.keys(pendingById).length) {
|
||||
clearInterval(publishCheckInterval);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
publishCheckInterval = setInterval(() => {
|
||||
checkFileList();
|
||||
}, 30000);
|
||||
}, fail);
|
||||
};
|
||||
|
||||
// Calls file_list until any reflecting files are done
|
||||
export const doCheckReflectingFiles = () => (dispatch: Dispatch, getState: GetState) => {
|
||||
const state = getState();
|
||||
const { checkingReflector } = state.claims;
|
||||
let reflectorCheckInterval;
|
||||
|
||||
const checkFileList = async () => {
|
||||
const state = getState();
|
||||
const reflectingById = selectReflectingById(state);
|
||||
const ids = Object.keys(reflectingById);
|
||||
|
||||
const newReflectingById = {};
|
||||
const promises = [];
|
||||
// TODO: just use file_list({claim_id: Array<claimId>})
|
||||
if (Object.keys(reflectingById).length) {
|
||||
ids.forEach(claimId => {
|
||||
promises.push(Lbry.file_list({ claim_id: claimId }));
|
||||
});
|
||||
|
||||
Promise.all(promises)
|
||||
.then(results => {
|
||||
results.forEach(res => {
|
||||
if (res.items[0]) {
|
||||
const fileListItem = res.items[0];
|
||||
const fileClaimId = fileListItem.claim_id;
|
||||
const {
|
||||
is_fully_reflected: done,
|
||||
uploading_to_reflector: uploading,
|
||||
reflector_progress: progress,
|
||||
} = fileListItem;
|
||||
if (uploading) {
|
||||
newReflectingById[fileClaimId] = {
|
||||
fileListItem: fileListItem,
|
||||
progress,
|
||||
stalled: !done && !uploading,
|
||||
};
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_FILES_REFLECTING,
|
||||
data: newReflectingById,
|
||||
});
|
||||
if (!Object.keys(newReflectingById).length) {
|
||||
dispatch({
|
||||
type: ACTIONS.TOGGLE_CHECKING_REFLECTING,
|
||||
data: false,
|
||||
});
|
||||
clearInterval(reflectorCheckInterval);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
dispatch({
|
||||
type: ACTIONS.TOGGLE_CHECKING_REFLECTING,
|
||||
data: false,
|
||||
});
|
||||
clearInterval(reflectorCheckInterval);
|
||||
}
|
||||
};
|
||||
// do it once...
|
||||
checkFileList();
|
||||
// then start the interval if it's not already started
|
||||
if (!checkingReflector) {
|
||||
dispatch({
|
||||
type: ACTIONS.TOGGLE_CHECKING_REFLECTING,
|
||||
data: true,
|
||||
});
|
||||
reflectorCheckInterval = setInterval(() => {
|
||||
checkFileList();
|
||||
}, 5000);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,278 +0,0 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import { buildURI } from 'lbryURI';
|
||||
import { doResolveUri } from 'redux/actions/claims';
|
||||
import {
|
||||
makeSelectSearchUris,
|
||||
makeSelectResolvedSearchResults,
|
||||
selectSuggestions,
|
||||
makeSelectQueryWithOptions,
|
||||
selectSearchValue,
|
||||
} from 'redux/selectors/search';
|
||||
import { batchActions } from 'util/batch-actions';
|
||||
import debounce from 'util/debounce';
|
||||
import handleFetchResponse from 'util/handle-fetch';
|
||||
|
||||
const DEBOUNCED_SEARCH_SUGGESTION_MS = 300;
|
||||
type Dispatch = (action: any) => any;
|
||||
type GetState = () => { search: SearchState };
|
||||
|
||||
type SearchOptions = {
|
||||
size?: number,
|
||||
from?: number,
|
||||
related_to?: string,
|
||||
nsfw?: boolean,
|
||||
isBackgroundSearch?: boolean,
|
||||
resolveResults?: boolean,
|
||||
};
|
||||
|
||||
// We can't use env's because they aren't passed into node_modules
|
||||
let CONNECTION_STRING = 'https://lighthouse.lbry.com/';
|
||||
|
||||
export const setSearchApi = (endpoint: string) => {
|
||||
CONNECTION_STRING = endpoint.replace(/\/*$/, '/'); // exactly one slash at the end;
|
||||
};
|
||||
|
||||
export const getSearchSuggestions = (value: string) => (dispatch: Dispatch, getState: GetState) => {
|
||||
const query = value.trim();
|
||||
|
||||
// strip out any basic stuff for more accurate search results
|
||||
let searchValue = query.replace(/lbry:\/\//g, '').replace(/-/g, ' ');
|
||||
if (searchValue.includes('#')) {
|
||||
// This should probably be more robust, but I think it's fine for now
|
||||
// Remove everything after # to get rid of the claim id
|
||||
searchValue = searchValue.substring(0, searchValue.indexOf('#'));
|
||||
}
|
||||
|
||||
const suggestions = selectSuggestions(getState());
|
||||
if (suggestions[searchValue]) {
|
||||
return;
|
||||
}
|
||||
|
||||
fetch(`${CONNECTION_STRING}autocomplete?s=${searchValue}`)
|
||||
.then(handleFetchResponse)
|
||||
.then(apiSuggestions => {
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_SEARCH_SUGGESTIONS,
|
||||
data: {
|
||||
query: searchValue,
|
||||
suggestions: apiSuggestions,
|
||||
},
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
// If the fetch fails, do nothing
|
||||
// Basic search suggestions are already populated at this point
|
||||
});
|
||||
};
|
||||
|
||||
const throttledSearchSuggestions = debounce((dispatch, query) => {
|
||||
dispatch(getSearchSuggestions(query));
|
||||
}, DEBOUNCED_SEARCH_SUGGESTION_MS);
|
||||
|
||||
export const doUpdateSearchQuery = (query: string, shouldSkipSuggestions: ?boolean) => (
|
||||
dispatch: Dispatch
|
||||
) => {
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_SEARCH_QUERY,
|
||||
data: { query },
|
||||
});
|
||||
|
||||
// Don't fetch new suggestions if the user just added a space
|
||||
if (!query.endsWith(' ') || !shouldSkipSuggestions) {
|
||||
throttledSearchSuggestions(dispatch, query);
|
||||
}
|
||||
};
|
||||
|
||||
export const doSearch = (rawQuery: string, searchOptions: SearchOptions) => (
|
||||
dispatch: Dispatch,
|
||||
getState: GetState
|
||||
) => {
|
||||
const query = rawQuery.replace(/^lbry:\/\//i, '').replace(/\//, ' ');
|
||||
const resolveResults = searchOptions && searchOptions.resolveResults;
|
||||
const isBackgroundSearch = (searchOptions && searchOptions.isBackgroundSearch) || false;
|
||||
|
||||
if (!query) {
|
||||
dispatch({
|
||||
type: ACTIONS.SEARCH_FAIL,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const state = getState();
|
||||
|
||||
let queryWithOptions = makeSelectQueryWithOptions(query, searchOptions)(state);
|
||||
|
||||
// If we have already searched for something, we don't need to do anything
|
||||
const urisForQuery = makeSelectSearchUris(queryWithOptions)(state);
|
||||
if (urisForQuery && !!urisForQuery.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.SEARCH_START,
|
||||
});
|
||||
|
||||
// If the user is on the file page with a pre-populated uri and they select
|
||||
// the search option without typing anything, searchQuery will be empty
|
||||
// We need to populate it so the input is filled on the search page
|
||||
// isBackgroundSearch means the search is happening in the background, don't update the search query
|
||||
if (!state.search.searchQuery && !isBackgroundSearch) {
|
||||
dispatch(doUpdateSearchQuery(query));
|
||||
}
|
||||
|
||||
fetch(`${CONNECTION_STRING}search?${queryWithOptions}`)
|
||||
.then(handleFetchResponse)
|
||||
.then((data: Array<{ name: string, claimId: string }>) => {
|
||||
const uris = [];
|
||||
const actions = [];
|
||||
|
||||
data.forEach(result => {
|
||||
if (result) {
|
||||
const { name, claimId } = result;
|
||||
const urlObj: LbryUrlObj = {};
|
||||
|
||||
if (name.startsWith('@')) {
|
||||
urlObj.channelName = name;
|
||||
urlObj.channelClaimId = claimId;
|
||||
} else {
|
||||
urlObj.streamName = name;
|
||||
urlObj.streamClaimId = claimId;
|
||||
}
|
||||
|
||||
const url = buildURI(urlObj);
|
||||
if (resolveResults) {
|
||||
actions.push(doResolveUri(url));
|
||||
}
|
||||
uris.push(url);
|
||||
}
|
||||
});
|
||||
|
||||
actions.push({
|
||||
type: ACTIONS.SEARCH_SUCCESS,
|
||||
data: {
|
||||
query: queryWithOptions,
|
||||
uris,
|
||||
},
|
||||
});
|
||||
dispatch(batchActions(...actions));
|
||||
})
|
||||
.catch(e => {
|
||||
dispatch({
|
||||
type: ACTIONS.SEARCH_FAIL,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export const doResolvedSearch = (
|
||||
rawQuery: string,
|
||||
size: ?number, // only pass in if you don't want to use the users setting (ex: related content)
|
||||
from: ?number,
|
||||
isBackgroundSearch: boolean = false,
|
||||
options: {
|
||||
related_to?: string,
|
||||
} = {},
|
||||
nsfw: boolean
|
||||
) => (dispatch: Dispatch, getState: GetState) => {
|
||||
const query = rawQuery.replace(/^lbry:\/\//i, '').replace(/\//, ' ');
|
||||
|
||||
if (!query) {
|
||||
dispatch({
|
||||
type: ACTIONS.RESOLVED_SEARCH_FAIL,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const optionsWithFrom: SearchOptions = {
|
||||
size,
|
||||
from,
|
||||
isBackgroundSearch,
|
||||
...options,
|
||||
};
|
||||
|
||||
const optionsWithoutFrom: SearchOptions = {
|
||||
size,
|
||||
isBackgroundSearch,
|
||||
...options,
|
||||
};
|
||||
|
||||
const state = getState();
|
||||
|
||||
let queryWithOptions = makeSelectQueryWithOptions(query, optionsWithFrom)(state);
|
||||
|
||||
// make from null so that we can maintain a reference to the same query for multiple pages and simply append the found results
|
||||
let queryWithoutFrom = makeSelectQueryWithOptions(query, optionsWithoutFrom)(state);
|
||||
|
||||
// If we have already searched for something, we don't need to do anything
|
||||
// TODO: Tweak this check for multiple page results
|
||||
/* const resultsForQuery = makeSelectResolvedSearchResults(queryWithOptions)(state);
|
||||
if (resultsForQuery && resultsForQuery.length && resultsForQuery.length > (from * size)) {
|
||||
return;
|
||||
} */
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.RESOLVED_SEARCH_START,
|
||||
});
|
||||
|
||||
if (!state.search.searchQuery && !isBackgroundSearch) {
|
||||
dispatch(doUpdateSearchQuery(query));
|
||||
}
|
||||
|
||||
const fetchUrl = nsfw
|
||||
? `${CONNECTION_STRING}search?resolve=true&${queryWithOptions}`
|
||||
: `${CONNECTION_STRING}search?resolve=true&nsfw=false&${queryWithOptions}`;
|
||||
fetch(fetchUrl)
|
||||
.then(handleFetchResponse)
|
||||
.then((data: Array<ResolvedSearchResult>) => {
|
||||
const results = [];
|
||||
|
||||
data.forEach(result => {
|
||||
if (result) {
|
||||
results.push(result);
|
||||
}
|
||||
});
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.RESOLVED_SEARCH_SUCCESS,
|
||||
data: {
|
||||
query: queryWithoutFrom,
|
||||
results,
|
||||
pageSize: size,
|
||||
append: parseInt(from, 10) > parseInt(size, 10) - 1,
|
||||
},
|
||||
});
|
||||
})
|
||||
.catch(e => {
|
||||
dispatch({
|
||||
type: ACTIONS.RESOLVED_SEARCH_FAIL,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
export const doFocusSearchInput = () => (dispatch: Dispatch) =>
|
||||
dispatch({
|
||||
type: ACTIONS.SEARCH_FOCUS,
|
||||
});
|
||||
|
||||
export const doBlurSearchInput = () => (dispatch: Dispatch) =>
|
||||
dispatch({
|
||||
type: ACTIONS.SEARCH_BLUR,
|
||||
});
|
||||
|
||||
export const doUpdateSearchOptions = (
|
||||
newOptions: SearchOptions,
|
||||
additionalOptions: SearchOptions
|
||||
) => (dispatch: Dispatch, getState: GetState) => {
|
||||
const state = getState();
|
||||
const searchValue = selectSearchValue(state);
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_SEARCH_OPTIONS,
|
||||
data: newOptions,
|
||||
});
|
||||
|
||||
if (searchValue) {
|
||||
// After updating, perform a search with the new options
|
||||
dispatch(doSearch(searchValue, additionalOptions));
|
||||
}
|
||||
};
|
|
@ -6,27 +6,50 @@ type SharedData = {
|
|||
version: '0.1',
|
||||
value: {
|
||||
subscriptions?: Array<string>,
|
||||
following?: Array<{ uri: string, notificationsDisabled: boolean }>,
|
||||
tags?: Array<string>,
|
||||
blocked?: Array<string>,
|
||||
coin_swap_codes?: Array<string>,
|
||||
settings?: any,
|
||||
app_welcome_version?: number,
|
||||
tv_welcome_version?: number,
|
||||
sharing_3P?: boolean,
|
||||
unpublishedCollections: CollectionGroup,
|
||||
editedCollections: CollectionGroup,
|
||||
builtinCollections: CollectionGroup,
|
||||
savedCollections: Array<string>,
|
||||
},
|
||||
};
|
||||
|
||||
function extractUserState(rawObj: SharedData) {
|
||||
if (rawObj && rawObj.version === '0.1' && rawObj.value) {
|
||||
const { subscriptions, tags, blocked, settings, app_welcome_version, tv_welcome_version, sharing_3P } = rawObj.value;
|
||||
const {
|
||||
subscriptions,
|
||||
following,
|
||||
tags,
|
||||
blocked,
|
||||
coin_swap_codes,
|
||||
settings,
|
||||
app_welcome_version,
|
||||
sharing_3P,
|
||||
unpublishedCollections,
|
||||
editedCollections,
|
||||
builtinCollections,
|
||||
savedCollections,
|
||||
} = rawObj.value;
|
||||
|
||||
return {
|
||||
...(subscriptions ? { subscriptions } : {}),
|
||||
...(following ? { following } : {}),
|
||||
...(tags ? { tags } : {}),
|
||||
...(blocked ? { blocked } : {}),
|
||||
...(coin_swap_codes ? { coin_swap_codes } : {}),
|
||||
...(settings ? { settings } : {}),
|
||||
...(app_welcome_version ? { app_welcome_version } : {}),
|
||||
...(tv_welcome_version ? { tv_welcome_version } : {}),
|
||||
...(sharing_3P ? { sharing_3P} : {}),
|
||||
...(sharing_3P ? { sharing_3P } : {}),
|
||||
...(unpublishedCollections ? { unpublishedCollections } : {}),
|
||||
...(editedCollections ? { editedCollections } : {}),
|
||||
...(builtinCollections ? { builtinCollections } : {}),
|
||||
...(savedCollections ? { savedCollections } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -35,10 +58,36 @@ function extractUserState(rawObj: SharedData) {
|
|||
|
||||
export function doPopulateSharedUserState(sharedSettings: any) {
|
||||
return (dispatch: Dispatch) => {
|
||||
const { subscriptions, tags, blocked, settings, app_welcome_version, tv_welcome_version, sharing_3P } = extractUserState(sharedSettings);
|
||||
const {
|
||||
subscriptions,
|
||||
following,
|
||||
tags,
|
||||
blocked,
|
||||
coin_swap_codes,
|
||||
settings,
|
||||
app_welcome_version,
|
||||
sharing_3P,
|
||||
unpublishedCollections,
|
||||
editedCollections,
|
||||
builtinCollections,
|
||||
savedCollections,
|
||||
} = extractUserState(sharedSettings);
|
||||
dispatch({
|
||||
type: ACTIONS.USER_STATE_POPULATE,
|
||||
data: { subscriptions, tags, blocked, settings, app_welcome_version, tv_welcome_version, sharing_3P },
|
||||
data: {
|
||||
subscriptions,
|
||||
following,
|
||||
tags,
|
||||
blocked,
|
||||
coinSwapCodes: coin_swap_codes,
|
||||
settings,
|
||||
welcomeVersion: app_welcome_version,
|
||||
allowAnalytics: sharing_3P,
|
||||
unpublishedCollections,
|
||||
editedCollections,
|
||||
builtinCollections,
|
||||
savedCollections,
|
||||
},
|
||||
});
|
||||
};
|
||||
}
|
||||
|
@ -50,45 +99,61 @@ export function doPreferenceSet(
|
|||
success: Function,
|
||||
fail: Function
|
||||
) {
|
||||
const preference = {
|
||||
type: typeof value,
|
||||
version,
|
||||
value,
|
||||
};
|
||||
return (dispatch: Dispatch) => {
|
||||
const preference = {
|
||||
type: typeof value,
|
||||
version,
|
||||
value,
|
||||
};
|
||||
|
||||
const options = {
|
||||
key,
|
||||
value: JSON.stringify(preference),
|
||||
};
|
||||
const options = {
|
||||
key,
|
||||
value: JSON.stringify(preference),
|
||||
};
|
||||
|
||||
Lbry.preference_set(options)
|
||||
.then(() => {
|
||||
success(preference);
|
||||
})
|
||||
.catch(() => {
|
||||
if (fail) {
|
||||
fail();
|
||||
}
|
||||
});
|
||||
Lbry.preference_set(options)
|
||||
.then(() => {
|
||||
if (success) {
|
||||
success(preference);
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
dispatch({
|
||||
type: ACTIONS.SYNC_FATAL_ERROR,
|
||||
error: err,
|
||||
});
|
||||
|
||||
if (fail) {
|
||||
fail();
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doPreferenceGet(key: string, success: Function, fail?: Function) {
|
||||
const options = {
|
||||
key,
|
||||
return (dispatch: Dispatch) => {
|
||||
const options = {
|
||||
key,
|
||||
};
|
||||
|
||||
return Lbry.preference_get(options)
|
||||
.then(result => {
|
||||
if (result) {
|
||||
const preference = result[key];
|
||||
return success(preference);
|
||||
}
|
||||
|
||||
return success(null);
|
||||
})
|
||||
.catch(err => {
|
||||
dispatch({
|
||||
type: ACTIONS.SYNC_FATAL_ERROR,
|
||||
error: err,
|
||||
});
|
||||
|
||||
if (fail) {
|
||||
fail(err);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
Lbry.preference_get(options)
|
||||
.then(result => {
|
||||
if (result) {
|
||||
const preference = result[key];
|
||||
return success(preference);
|
||||
}
|
||||
|
||||
return success(null);
|
||||
})
|
||||
.catch(err => {
|
||||
if (fail) {
|
||||
fail(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import Lbry from 'lbry';
|
||||
|
||||
export const doToggleTagFollow = (name: string) => ({
|
||||
type: ACTIONS.TOGGLE_TAG_FOLLOW,
|
||||
data: {
|
||||
name,
|
||||
},
|
||||
});
|
||||
|
||||
export const doAddTag = (name: string) => ({
|
||||
type: ACTIONS.TAG_ADD,
|
||||
data: {
|
||||
name,
|
||||
},
|
||||
});
|
||||
|
||||
export const doDeleteTag = (name: string) => ({
|
||||
type: ACTIONS.TAG_DELETE,
|
||||
data: {
|
||||
name,
|
||||
},
|
||||
});
|
|
@ -1,11 +1,21 @@
|
|||
import * as ACTIONS from 'constants/action_types';
|
||||
import Lbry from 'lbry';
|
||||
import { doToast } from 'redux/actions/notifications';
|
||||
import { selectBalance } from 'redux/selectors/wallet';
|
||||
import {
|
||||
selectBalance,
|
||||
selectPendingSupportTransactions,
|
||||
selectTxoPageParams,
|
||||
selectPendingOtherTransactions,
|
||||
selectPendingConsolidateTxid,
|
||||
selectPendingMassClaimTxid,
|
||||
} from 'redux/selectors/wallet';
|
||||
import { creditsToString } from 'util/format-credits';
|
||||
import { selectMyClaimsRaw } from 'redux/selectors/claims';
|
||||
import { selectMyClaimsRaw, selectClaimsById } from 'redux/selectors/claims';
|
||||
import { doFetchChannelListMine, doFetchClaimListMine, doClaimSearch } from 'redux/actions/claims';
|
||||
|
||||
const FIFTEEN_SECONDS = 15000;
|
||||
let walletBalancePromise = null;
|
||||
|
||||
export function doUpdateBalance() {
|
||||
return (dispatch, getState) => {
|
||||
const {
|
||||
|
@ -47,30 +57,102 @@ export function doUpdateBalance() {
|
|||
export function doBalanceSubscribe() {
|
||||
return dispatch => {
|
||||
dispatch(doUpdateBalance());
|
||||
setInterval(() => dispatch(doUpdateBalance()), 5000);
|
||||
setInterval(() => dispatch(doUpdateBalance()), 10000);
|
||||
};
|
||||
}
|
||||
|
||||
export function doFetchTransactions(page = 1, pageSize = 99999) {
|
||||
export function doFetchTransactions(page = 1, pageSize = 999999) {
|
||||
return dispatch => {
|
||||
dispatch(doFetchSupports());
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_TRANSACTIONS_STARTED,
|
||||
});
|
||||
|
||||
Lbry.utxo_release()
|
||||
.then(() => Lbry.transaction_list({ page, page_size: pageSize }))
|
||||
.then(result => {
|
||||
Lbry.transaction_list({ page, page_size: pageSize }).then(result => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_TRANSACTIONS_COMPLETED,
|
||||
data: {
|
||||
transactions: result.items,
|
||||
},
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doFetchTxoPage() {
|
||||
return (dispatch, getState) => {
|
||||
const fetchId = Math.random()
|
||||
.toString(36)
|
||||
.substr(2, 9);
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_TXO_PAGE_STARTED,
|
||||
data: fetchId,
|
||||
});
|
||||
|
||||
const state = getState();
|
||||
const queryParams = selectTxoPageParams(state);
|
||||
|
||||
Lbry.txo_list(queryParams)
|
||||
.then(res => {
|
||||
const items = res.items || [];
|
||||
const claimsById = selectClaimsById(state);
|
||||
|
||||
const channelIds = items.reduce((acc, cur) => {
|
||||
if (
|
||||
cur.type === 'support' &&
|
||||
cur.signing_channel &&
|
||||
!claimsById[cur.signing_channel.channel_id]
|
||||
) {
|
||||
acc.push(cur.signing_channel.channel_id);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
if (channelIds.length) {
|
||||
const searchParams = {
|
||||
page_size: 9999,
|
||||
page: 1,
|
||||
no_totals: true,
|
||||
claim_ids: channelIds,
|
||||
};
|
||||
// make sure redux has these channels resolved
|
||||
dispatch(doClaimSearch(searchParams));
|
||||
}
|
||||
|
||||
return res;
|
||||
})
|
||||
.then(res => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_TRANSACTIONS_COMPLETED,
|
||||
type: ACTIONS.FETCH_TXO_PAGE_COMPLETED,
|
||||
data: {
|
||||
transactions: result.items,
|
||||
result: res,
|
||||
fetchId: fetchId,
|
||||
},
|
||||
});
|
||||
})
|
||||
.catch(e => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_TXO_PAGE_COMPLETED,
|
||||
data: {
|
||||
error: e.message,
|
||||
fetchId: fetchId,
|
||||
},
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doUpdateTxoPageParams(params) {
|
||||
return dispatch => {
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_TXO_FETCH_PARAMS,
|
||||
data: params,
|
||||
});
|
||||
|
||||
dispatch(doFetchTxoPage());
|
||||
};
|
||||
}
|
||||
|
||||
export function doFetchSupports(page = 1, pageSize = 99999) {
|
||||
return dispatch => {
|
||||
dispatch({
|
||||
|
@ -88,6 +170,74 @@ export function doFetchSupports(page = 1, pageSize = 99999) {
|
|||
};
|
||||
}
|
||||
|
||||
export function doFetchUtxoCounts() {
|
||||
return async dispatch => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_UTXO_COUNT_STARTED,
|
||||
});
|
||||
|
||||
let resultSets = await Promise.all([
|
||||
Lbry.txo_list({ type: 'other', is_not_spent: true, page: 1, page_size: 1 }),
|
||||
Lbry.txo_list({ type: 'support', is_not_spent: true, page: 1, page_size: 1 }),
|
||||
]);
|
||||
const counts = {};
|
||||
const paymentCount = resultSets[0]['total_items'];
|
||||
const supportCount = resultSets[1]['total_items'];
|
||||
counts['other'] = typeof paymentCount === 'number' ? paymentCount : 0;
|
||||
counts['support'] = typeof supportCount === 'number' ? supportCount : 0;
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_UTXO_COUNT_COMPLETED,
|
||||
data: counts,
|
||||
debug: { resultSets },
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doUtxoConsolidate() {
|
||||
return async dispatch => {
|
||||
dispatch({
|
||||
type: ACTIONS.DO_UTXO_CONSOLIDATE_STARTED,
|
||||
});
|
||||
|
||||
const results = await Lbry.txo_spend({ type: 'other' });
|
||||
const result = results[0];
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
|
||||
data: { txids: [result.txid] },
|
||||
});
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.DO_UTXO_CONSOLIDATE_COMPLETED,
|
||||
data: { txid: result.txid },
|
||||
});
|
||||
dispatch(doCheckPendingTxs());
|
||||
};
|
||||
}
|
||||
|
||||
export function doTipClaimMass() {
|
||||
return async dispatch => {
|
||||
dispatch({
|
||||
type: ACTIONS.TIP_CLAIM_MASS_STARTED,
|
||||
});
|
||||
|
||||
const results = await Lbry.txo_spend({ type: 'support', is_not_my_input: true });
|
||||
const result = results[0];
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
|
||||
data: { txids: [result.txid] },
|
||||
});
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.TIP_CLAIM_MASS_COMPLETED,
|
||||
data: { txid: result.txid },
|
||||
});
|
||||
dispatch(doCheckPendingTxs());
|
||||
};
|
||||
}
|
||||
|
||||
export function doGetNewAddress() {
|
||||
return dispatch => {
|
||||
dispatch({
|
||||
|
@ -127,8 +277,8 @@ export function doSendDraftTransaction(address, amount) {
|
|||
if (balance - amount <= 0) {
|
||||
dispatch(
|
||||
doToast({
|
||||
title: 'Insufficient credits',
|
||||
message: 'Insufficient credits',
|
||||
title: __('Insufficient credits'),
|
||||
message: __('Insufficient credits'),
|
||||
})
|
||||
);
|
||||
return;
|
||||
|
@ -145,8 +295,8 @@ export function doSendDraftTransaction(address, amount) {
|
|||
});
|
||||
dispatch(
|
||||
doToast({
|
||||
message: `You sent ${amount} LBC`,
|
||||
linkText: 'History',
|
||||
message: __('You sent %amount% LBRY Credits', { amount: amount }),
|
||||
linkText: __('History'),
|
||||
linkTarget: '/wallet',
|
||||
})
|
||||
);
|
||||
|
@ -157,7 +307,7 @@ export function doSendDraftTransaction(address, amount) {
|
|||
});
|
||||
dispatch(
|
||||
doToast({
|
||||
message: 'Transaction failed',
|
||||
message: __('Transaction failed'),
|
||||
isError: true,
|
||||
})
|
||||
);
|
||||
|
@ -171,7 +321,7 @@ export function doSendDraftTransaction(address, amount) {
|
|||
});
|
||||
dispatch(
|
||||
doToast({
|
||||
message: 'Transaction failed',
|
||||
message: __('Transaction failed'),
|
||||
isError: true,
|
||||
})
|
||||
);
|
||||
|
@ -198,16 +348,16 @@ export function doSetDraftTransactionAddress(address) {
|
|||
};
|
||||
}
|
||||
|
||||
export function doSendTip(amount, claimId, isSupport, successCallback, errorCallback) {
|
||||
export function doSendTip(params, isSupport, successCallback, errorCallback, shouldNotify = true) {
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const balance = selectBalance(state);
|
||||
const myClaims = selectMyClaimsRaw(state);
|
||||
|
||||
const shouldSupport =
|
||||
isSupport || (myClaims ? myClaims.find(claim => claim.claim_id === claimId) : false);
|
||||
isSupport || (myClaims ? myClaims.find(claim => claim.claim_id === params.claim_id) : false);
|
||||
|
||||
if (balance - amount <= 0) {
|
||||
if (balance - params.amount <= 0) {
|
||||
dispatch(
|
||||
doToast({
|
||||
message: __('Insufficient credits'),
|
||||
|
@ -217,23 +367,25 @@ export function doSendTip(amount, claimId, isSupport, successCallback, errorCall
|
|||
return;
|
||||
}
|
||||
|
||||
const success = () => {
|
||||
dispatch(
|
||||
doToast({
|
||||
message: shouldSupport
|
||||
? __('You deposited %amount% LBC as a support!', { amount })
|
||||
: __('You sent %amount% LBC as a tip, Mahalo!', { amount }),
|
||||
linkText: __('History'),
|
||||
linkTarget: __('/wallet'),
|
||||
})
|
||||
);
|
||||
const success = response => {
|
||||
if (shouldNotify) {
|
||||
dispatch(
|
||||
doToast({
|
||||
message: shouldSupport
|
||||
? __('You deposited %amount% LBRY Credits as a support!', { amount: params.amount })
|
||||
: __('You sent %amount% LBRY Credits as a tip, Mahalo!', { amount: params.amount }),
|
||||
linkText: __('History'),
|
||||
linkTarget: '/wallet',
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.SUPPORT_TRANSACTION_COMPLETED,
|
||||
});
|
||||
|
||||
if (successCallback) {
|
||||
successCallback();
|
||||
successCallback(response);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -262,10 +414,10 @@ export function doSendTip(amount, claimId, isSupport, successCallback, errorCall
|
|||
});
|
||||
|
||||
Lbry.support_create({
|
||||
claim_id: claimId,
|
||||
amount: creditsToString(amount),
|
||||
...params,
|
||||
tip: !shouldSupport,
|
||||
blocking: true,
|
||||
amount: creditsToString(params.amount),
|
||||
}).then(success, error);
|
||||
};
|
||||
}
|
||||
|
@ -342,20 +494,71 @@ export function doWalletLock() {
|
|||
};
|
||||
}
|
||||
|
||||
// Collect all tips for a claim
|
||||
export function doSupportAbandonForClaim(claimId, claimType, keep, preview) {
|
||||
return dispatch => {
|
||||
if (preview) {
|
||||
dispatch({
|
||||
type: ACTIONS.ABANDON_CLAIM_SUPPORT_PREVIEW,
|
||||
});
|
||||
} else {
|
||||
dispatch({
|
||||
type: ACTIONS.ABANDON_CLAIM_SUPPORT_STARTED,
|
||||
});
|
||||
}
|
||||
|
||||
const params = { claim_id: claimId };
|
||||
if (preview) params['preview'] = true;
|
||||
if (keep) params['keep'] = keep;
|
||||
return Lbry.support_abandon(params)
|
||||
.then(res => {
|
||||
if (!preview) {
|
||||
dispatch({
|
||||
type: ACTIONS.ABANDON_CLAIM_SUPPORT_COMPLETED,
|
||||
data: { claimId, txid: res.txid, effective: res.outputs[0].amount, type: claimType },
|
||||
});
|
||||
dispatch(doCheckPendingTxs());
|
||||
}
|
||||
return res;
|
||||
})
|
||||
.catch(e => {
|
||||
dispatch({
|
||||
type: ACTIONS.ABANDON_CLAIM_SUPPORT_FAILED,
|
||||
data: e.message,
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doWalletReconnect() {
|
||||
return dispatch => {
|
||||
dispatch({
|
||||
type: ACTIONS.WALLET_RESTART,
|
||||
});
|
||||
let failed = false;
|
||||
// this basically returns null when it's done. :(
|
||||
// might be good to dispatch ACTIONS.WALLET_RESTARTED
|
||||
Lbry.wallet_reconnect().then(() =>
|
||||
const walletTimeout = setTimeout(() => {
|
||||
failed = true;
|
||||
dispatch({
|
||||
type: ACTIONS.WALLET_RESTART_COMPLETED,
|
||||
})
|
||||
);
|
||||
});
|
||||
dispatch(
|
||||
doToast({
|
||||
message: __(
|
||||
'Your servers were not available. Check your url and port, or switch back to defaults.'
|
||||
),
|
||||
isError: true,
|
||||
})
|
||||
);
|
||||
}, FIFTEEN_SECONDS);
|
||||
Lbry.wallet_reconnect().then(() => {
|
||||
clearTimeout(walletTimeout);
|
||||
if (!failed) dispatch({ type: ACTIONS.WALLET_RESTART_COMPLETED });
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doWalletDecrypt() {
|
||||
return dispatch => {
|
||||
dispatch({
|
||||
|
@ -413,3 +616,95 @@ export function doUpdateBlockHeight() {
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Calls transaction_show on txes until any pending txes are confirmed
|
||||
export const doCheckPendingTxs = () => (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const pendingTxsById = selectPendingSupportTransactions(state); // {}
|
||||
const pendingOtherTxes = selectPendingOtherTransactions(state);
|
||||
|
||||
if (!Object.keys(pendingTxsById).length && !pendingOtherTxes.length) {
|
||||
return;
|
||||
}
|
||||
let txCheckInterval;
|
||||
const checkTxList = () => {
|
||||
const state = getState();
|
||||
const pendingSupportTxs = selectPendingSupportTransactions(state); // {}
|
||||
const pendingConsolidateTxes = selectPendingOtherTransactions(state);
|
||||
const pendingConsTxid = selectPendingConsolidateTxid(state);
|
||||
const pendingMassCLaimTxid = selectPendingMassClaimTxid(state);
|
||||
|
||||
const promises = [];
|
||||
const newPendingTxes = {};
|
||||
const noLongerPendingConsolidate = [];
|
||||
const types = new Set([]);
|
||||
// { claimId: {txid: 123, amount 12.3}, }
|
||||
const entries = Object.entries(pendingSupportTxs);
|
||||
entries.forEach(([claim, data]) => {
|
||||
promises.push(Lbry.transaction_show({ txid: data.txid }));
|
||||
types.add(data.type);
|
||||
});
|
||||
if (pendingConsolidateTxes.length) {
|
||||
pendingConsolidateTxes.forEach(txid => promises.push(Lbry.transaction_show({ txid })));
|
||||
}
|
||||
|
||||
Promise.all(promises).then(txShows => {
|
||||
let changed = false;
|
||||
txShows.forEach(result => {
|
||||
if (pendingConsolidateTxes.includes(result.txid)) {
|
||||
if (result.height > 0) {
|
||||
noLongerPendingConsolidate.push(result.txid);
|
||||
}
|
||||
} else {
|
||||
if (result.height <= 0) {
|
||||
const match = entries.find(entry => entry[1].txid === result.txid);
|
||||
newPendingTxes[match[0]] = match[1];
|
||||
} else {
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (changed) {
|
||||
dispatch({
|
||||
type: ACTIONS.PENDING_SUPPORTS_UPDATED,
|
||||
data: newPendingTxes,
|
||||
});
|
||||
if (types.has('channel')) {
|
||||
dispatch(doFetchChannelListMine());
|
||||
}
|
||||
if (types.has('stream')) {
|
||||
dispatch(doFetchClaimListMine());
|
||||
}
|
||||
}
|
||||
if (noLongerPendingConsolidate.length) {
|
||||
if (noLongerPendingConsolidate.includes(pendingConsTxid)) {
|
||||
dispatch(
|
||||
doToast({
|
||||
message: __('Your wallet is finished consolidating'),
|
||||
})
|
||||
);
|
||||
}
|
||||
if (noLongerPendingConsolidate.includes(pendingMassCLaimTxid)) {
|
||||
dispatch(
|
||||
doToast({
|
||||
message: __('Your tips have been collected'),
|
||||
})
|
||||
);
|
||||
}
|
||||
dispatch({
|
||||
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
|
||||
data: { txids: noLongerPendingConsolidate, remove: true },
|
||||
});
|
||||
}
|
||||
|
||||
if (!Object.keys(pendingTxsById).length && !pendingOtherTxes.length) {
|
||||
clearInterval(txCheckInterval);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
txCheckInterval = setInterval(() => {
|
||||
checkTxList();
|
||||
}, 30000);
|
||||
};
|
||||
|
|
|
@ -2,50 +2,64 @@
|
|||
import isEqual from 'util/deep-equal';
|
||||
import { doPreferenceSet } from 'redux/actions/sync';
|
||||
|
||||
const SHARED_PREFERENCE_KEY = 'shared';
|
||||
const RUN_PREFERENCES_DELAY_MS = 2000;
|
||||
const SHARED_PREFERENCE_VERSION = '0.1';
|
||||
let oldShared = {};
|
||||
|
||||
let timeout;
|
||||
export const buildSharedStateMiddleware = (
|
||||
actions: Array<string>,
|
||||
sharedStateFilters: {},
|
||||
sharedStateCb?: any => void
|
||||
) => ({ getState, dispatch }: { getState: () => {}, dispatch: any => void }) => (
|
||||
next: ({}) => void
|
||||
) => (action: { type: string, data: any }) => {
|
||||
) => ({
|
||||
getState,
|
||||
dispatch,
|
||||
}: {
|
||||
getState: () => { user: any, settings: any },
|
||||
dispatch: any => void,
|
||||
}) => (next: ({}) => void) => (action: { type: string, data: any }) => {
|
||||
const currentState = getState();
|
||||
|
||||
// We don't care if sync is disabled here, we always want to backup preferences to the wallet
|
||||
if (!actions.includes(action.type)) {
|
||||
if (!actions.includes(action.type) || typeof action === 'function') {
|
||||
return next(action);
|
||||
}
|
||||
|
||||
clearTimeout(timeout);
|
||||
const actionResult = next(action);
|
||||
// Call `getState` after calling `next` to ensure the state has updated in response to the action
|
||||
const nextState = getState();
|
||||
const shared = {};
|
||||
function runPreferences() {
|
||||
const nextState: { user: any, settings: any } = getState();
|
||||
const syncEnabled =
|
||||
nextState.settings &&
|
||||
nextState.settings.clientSettings &&
|
||||
nextState.settings.clientSettings.enable_sync;
|
||||
const hasVerifiedEmail =
|
||||
nextState.user && nextState.user.user && nextState.user.user.has_verified_email;
|
||||
const preferenceKey = syncEnabled && hasVerifiedEmail ? 'shared' : 'local';
|
||||
const shared = {};
|
||||
|
||||
Object.keys(sharedStateFilters).forEach(key => {
|
||||
const filter = sharedStateFilters[key];
|
||||
const { source, property, transform } = filter;
|
||||
let value = nextState[source][property];
|
||||
if (transform) {
|
||||
value = transform(value);
|
||||
Object.keys(sharedStateFilters).forEach(key => {
|
||||
const filter = sharedStateFilters[key];
|
||||
const { source, property, transform } = filter;
|
||||
let value = nextState[source][property];
|
||||
if (transform) {
|
||||
value = transform(value);
|
||||
}
|
||||
|
||||
shared[key] = value;
|
||||
});
|
||||
|
||||
if (!isEqual(oldShared, shared)) {
|
||||
// only update if the preference changed from last call in the same session
|
||||
oldShared = shared;
|
||||
dispatch(doPreferenceSet(preferenceKey, shared, SHARED_PREFERENCE_VERSION));
|
||||
}
|
||||
|
||||
shared[key] = value;
|
||||
});
|
||||
|
||||
if (!isEqual(oldShared, shared)) {
|
||||
// only update if the preference changed from last call in the same session
|
||||
oldShared = shared;
|
||||
doPreferenceSet(SHARED_PREFERENCE_KEY, shared, SHARED_PREFERENCE_VERSION);
|
||||
if (sharedStateCb) {
|
||||
// Pass dispatch to the callback to consumers can dispatch actions in response to preference set
|
||||
sharedStateCb({ dispatch, getState });
|
||||
}
|
||||
clearTimeout(timeout);
|
||||
return actionResult;
|
||||
}
|
||||
|
||||
if (sharedStateCb) {
|
||||
// Pass dispatch to the callback to consumers can dispatch actions in response to preference set
|
||||
sharedStateCb({ dispatch, getState });
|
||||
}
|
||||
|
||||
return actionResult;
|
||||
timeout = setTimeout(runPreferences, RUN_PREFERENCES_DELAY_MS);
|
||||
};
|
||||
|
|
|
@ -9,25 +9,35 @@
|
|||
// - Sean
|
||||
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import { buildURI, parseURI } from 'lbryURI';
|
||||
import { concatClaims } from 'util/claim';
|
||||
import mergeClaim from 'util/merge-claim';
|
||||
|
||||
type State = {
|
||||
createChannelError: ?string,
|
||||
createCollectionError: ?string,
|
||||
channelClaimCounts: { [string]: number },
|
||||
claimsByUri: { [string]: string },
|
||||
byId: { [string]: Claim },
|
||||
pendingById: { [string]: Claim }, // keep pending claims
|
||||
resolvingUris: Array<string>,
|
||||
pendingById: { [string]: Claim },
|
||||
myClaims: ?Array<Claim>,
|
||||
myChannelClaims: ?Set<string>,
|
||||
reflectingById: { [string]: ReflectingUpdate },
|
||||
myClaims: ?Array<string>,
|
||||
myChannelClaims: ?Array<string>,
|
||||
myCollectionClaims: ?Array<string>,
|
||||
abandoningById: { [string]: boolean },
|
||||
fetchingChannelClaims: { [string]: number },
|
||||
fetchingMyChannels: boolean,
|
||||
fetchingMyCollections: boolean,
|
||||
fetchingClaimSearchByQuery: { [string]: boolean },
|
||||
purchaseUriSuccess: boolean,
|
||||
myPurchases: ?Array<string>,
|
||||
myPurchasesPageNumber: ?number,
|
||||
myPurchasesPageTotalResults: ?number,
|
||||
fetchingMyPurchases: boolean,
|
||||
fetchingMyPurchasesError: ?string,
|
||||
claimSearchByQuery: { [string]: Array<string> },
|
||||
claimSearchByQueryLastPageReached: { [string]: Array<boolean> },
|
||||
creatingChannel: boolean,
|
||||
creatingCollection: boolean,
|
||||
paginatedClaimsByChannel: {
|
||||
[string]: {
|
||||
all: Array<string>,
|
||||
|
@ -36,11 +46,21 @@ type State = {
|
|||
[number]: Array<string>,
|
||||
},
|
||||
},
|
||||
updateChannelError: string,
|
||||
updateChannelError: ?string,
|
||||
updateCollectionError: ?string,
|
||||
updatingChannel: boolean,
|
||||
updatingCollection: boolean,
|
||||
pendingChannelImport: string | boolean,
|
||||
repostLoading: boolean,
|
||||
repostError: ?string,
|
||||
fetchingClaimListMinePageError: ?string,
|
||||
myClaimsPageResults: Array<string>,
|
||||
myClaimsPageNumber: ?number,
|
||||
myClaimsPageTotalResults: ?number,
|
||||
isFetchingClaimListMine: boolean,
|
||||
isCheckingNameForPublish: boolean,
|
||||
checkingPending: boolean,
|
||||
checkingReflecting: boolean,
|
||||
};
|
||||
|
||||
const reducers = {};
|
||||
|
@ -51,51 +71,67 @@ const defaultState = {
|
|||
channelClaimCounts: {},
|
||||
fetchingChannelClaims: {},
|
||||
resolvingUris: [],
|
||||
// This should not be a Set
|
||||
// Storing sets in reducers can cause issues
|
||||
myChannelClaims: undefined,
|
||||
myCollectionClaims: [],
|
||||
myClaims: undefined,
|
||||
myPurchases: undefined,
|
||||
myPurchasesPageNumber: undefined,
|
||||
myPurchasesPageTotalResults: undefined,
|
||||
purchaseUriSuccess: false,
|
||||
fetchingMyPurchases: false,
|
||||
fetchingMyPurchasesError: undefined,
|
||||
fetchingMyChannels: false,
|
||||
fetchingMyCollections: false,
|
||||
abandoningById: {},
|
||||
pendingById: {},
|
||||
reflectingById: {},
|
||||
claimSearchError: false,
|
||||
claimSearchByQuery: {},
|
||||
claimSearchByQueryLastPageReached: {},
|
||||
fetchingClaimSearchByQuery: {},
|
||||
updateChannelError: '',
|
||||
updateCollectionError: '',
|
||||
updatingChannel: false,
|
||||
creatingChannel: false,
|
||||
createChannelError: undefined,
|
||||
updatingCollection: false,
|
||||
creatingCollection: false,
|
||||
createCollectionError: undefined,
|
||||
pendingChannelImport: false,
|
||||
repostLoading: false,
|
||||
repostError: undefined,
|
||||
fetchingClaimListMinePageError: undefined,
|
||||
myClaimsPageResults: [],
|
||||
myClaimsPageNumber: undefined,
|
||||
myClaimsPageTotalResults: undefined,
|
||||
isFetchingClaimListMine: false,
|
||||
isFetchingMyPurchases: false,
|
||||
isCheckingNameForPublish: false,
|
||||
checkingPending: false,
|
||||
checkingReflecting: false,
|
||||
};
|
||||
|
||||
function handleClaimAction(state: State, action: any): State {
|
||||
const {
|
||||
resolveInfo,
|
||||
}: {
|
||||
[string]: {
|
||||
stream: ?StreamClaim,
|
||||
channel: ?ChannelClaim,
|
||||
claimsInChannel: ?number,
|
||||
},
|
||||
} = action.data;
|
||||
const { resolveInfo }: ClaimActionResolveInfo = action.data;
|
||||
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const channelClaimCounts = Object.assign({}, state.channelClaimCounts);
|
||||
const pendingById = state.pendingById;
|
||||
let newResolvingUrls = new Set(state.resolvingUris);
|
||||
let myClaimIds = new Set(state.myClaims);
|
||||
|
||||
Object.entries(resolveInfo).forEach(([url: string, resolveResponse: ResolveResponse]) => {
|
||||
// $FlowFixMe
|
||||
const { claimsInChannel, stream, channel } = resolveResponse;
|
||||
if (claimsInChannel) {
|
||||
channelClaimCounts[url] = claimsInChannel;
|
||||
channelClaimCounts[channel.canonical_url] = claimsInChannel;
|
||||
}
|
||||
const { claimsInChannel, stream, channel: channelFromResolve, collection } = resolveResponse;
|
||||
const channel = channelFromResolve || (stream && stream.signing_channel);
|
||||
|
||||
if (stream) {
|
||||
byId[stream.claim_id] = stream;
|
||||
if (pendingById[stream.claim_id]) {
|
||||
byId[stream.claim_id] = mergeClaim(stream, byId[stream.claim_id]);
|
||||
} else {
|
||||
byId[stream.claim_id] = stream;
|
||||
}
|
||||
byUri[url] = stream.claim_id;
|
||||
|
||||
// If url isn't a canonical_url, make sure that is added too
|
||||
|
@ -105,23 +141,53 @@ function handleClaimAction(state: State, action: any): State {
|
|||
byUri[stream.permanent_url] = stream.claim_id;
|
||||
newResolvingUrls.delete(stream.canonical_url);
|
||||
newResolvingUrls.delete(stream.permanent_url);
|
||||
|
||||
if (stream.is_my_output) {
|
||||
myClaimIds.add(stream.claim_id);
|
||||
}
|
||||
}
|
||||
|
||||
if (channel) {
|
||||
if (channel && channel.claim_id) {
|
||||
if (!stream) {
|
||||
byUri[url] = channel.claim_id;
|
||||
}
|
||||
|
||||
byId[channel.claim_id] = channel;
|
||||
// Also add the permanent_url here until lighthouse returns canonical_url for search results
|
||||
if (claimsInChannel) {
|
||||
channelClaimCounts[url] = claimsInChannel;
|
||||
channelClaimCounts[channel.canonical_url] = claimsInChannel;
|
||||
}
|
||||
|
||||
if (pendingById[channel.claim_id]) {
|
||||
byId[channel.claim_id] = mergeClaim(channel, byId[channel.claim_id]);
|
||||
} else {
|
||||
byId[channel.claim_id] = channel;
|
||||
}
|
||||
|
||||
byUri[channel.permanent_url] = channel.claim_id;
|
||||
byUri[channel.canonical_url] = channel.claim_id;
|
||||
newResolvingUrls.delete(channel.canonical_url);
|
||||
newResolvingUrls.delete(channel.permanent_url);
|
||||
}
|
||||
|
||||
if (collection) {
|
||||
if (pendingById[collection.claim_id]) {
|
||||
byId[collection.claim_id] = mergeClaim(collection, byId[collection.claim_id]);
|
||||
} else {
|
||||
byId[collection.claim_id] = collection;
|
||||
}
|
||||
byUri[url] = collection.claim_id;
|
||||
byUri[collection.canonical_url] = collection.claim_id;
|
||||
byUri[collection.permanent_url] = collection.claim_id;
|
||||
newResolvingUrls.delete(collection.canonical_url);
|
||||
newResolvingUrls.delete(collection.permanent_url);
|
||||
|
||||
if (collection.is_my_output) {
|
||||
myClaimIds.add(collection.claim_id);
|
||||
}
|
||||
}
|
||||
|
||||
newResolvingUrls.delete(url);
|
||||
if (!stream && !channel) {
|
||||
if (!stream && !channel && !collection && !pendingById[byUri[url]]) {
|
||||
byUri[url] = null;
|
||||
}
|
||||
});
|
||||
|
@ -131,6 +197,7 @@ function handleClaimAction(state: State, action: any): State {
|
|||
claimsByUri: byUri,
|
||||
channelClaimCounts,
|
||||
resolvingUris: Array.from(newResolvingUrls),
|
||||
myClaims: Array.from(myClaimIds),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -163,42 +230,46 @@ reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED] = (state: State): State =>
|
|||
});
|
||||
|
||||
reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any): State => {
|
||||
const { claims }: { claims: Array<Claim> } = action.data;
|
||||
const { result }: { result: ClaimListResponse } = action.data;
|
||||
const claims = result.items;
|
||||
const page = result.page;
|
||||
const totalItems = result.total_items;
|
||||
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
const pendingById: { [string]: Claim } = Object.assign({}, state.pendingById);
|
||||
const myClaims = state.myClaims ? state.myClaims.slice() : [];
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
let myClaimIds = new Set(state.myClaims);
|
||||
let urlsForCurrentPage = [];
|
||||
|
||||
claims.forEach((claim: Claim) => {
|
||||
const uri = buildURI({ streamName: claim.name, streamClaimId: claim.claim_id });
|
||||
|
||||
const { permanent_url: permanentUri, claim_id: claimId, canonical_url: canonicalUri } = claim;
|
||||
if (claim.type && claim.type.match(/claim|update/)) {
|
||||
urlsForCurrentPage.push(permanentUri);
|
||||
if (claim.confirmations < 1) {
|
||||
pendingById[claim.claim_id] = claim;
|
||||
delete byId[claim.claim_id];
|
||||
delete byUri[claim.claim_id];
|
||||
pendingById[claimId] = claim;
|
||||
if (byId[claimId]) {
|
||||
byId[claimId] = mergeClaim(claim, byId[claimId]);
|
||||
} else {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
} else {
|
||||
byId[claim.claim_id] = claim;
|
||||
byUri[uri] = claim.claim_id;
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
byUri[permanentUri] = claimId;
|
||||
byUri[canonicalUri] = claimId;
|
||||
myClaimIds.add(claimId);
|
||||
}
|
||||
});
|
||||
|
||||
// Remove old pending publishes
|
||||
Object.values(pendingById)
|
||||
// $FlowFixMe
|
||||
.filter(pendingClaim => byId[pendingClaim.claim_id])
|
||||
.forEach(pendingClaim => {
|
||||
// $FlowFixMe
|
||||
delete pendingById[pendingClaim.claim_id];
|
||||
});
|
||||
|
||||
return Object.assign({}, state, {
|
||||
isFetchingClaimListMine: false,
|
||||
myClaims: concatClaims(myClaims, claims),
|
||||
myClaims: Array.from(myClaimIds),
|
||||
byId,
|
||||
claimsByUri: byUri,
|
||||
pendingById,
|
||||
claimsByUri: byUri,
|
||||
myClaimsPageResults: urlsForCurrentPage,
|
||||
myClaimsPageNumber: page,
|
||||
myClaimsPageTotalResults: totalItems,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -207,9 +278,8 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_STARTED] = (state: State): State =>
|
|||
|
||||
reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): State => {
|
||||
const { claims }: { claims: Array<ChannelClaim> } = action.data;
|
||||
const myClaims = state.myClaims || [];
|
||||
const pendingById = Object.assign(state.pendingById);
|
||||
|
||||
let myClaimIds = new Set(state.myClaims);
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
let myChannelClaims;
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
|
@ -223,7 +293,12 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
|
|||
claims.forEach(claim => {
|
||||
const { meta } = claim;
|
||||
const { claims_in_channel: claimsInChannel } = claim.meta;
|
||||
const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
|
||||
const {
|
||||
canonical_url: canonicalUrl,
|
||||
permanent_url: permanentUrl,
|
||||
claim_id: claimId,
|
||||
confirmations,
|
||||
} = claim;
|
||||
|
||||
byUri[canonicalUrl] = claimId;
|
||||
byUri[permanentUrl] = claimId;
|
||||
|
@ -232,26 +307,97 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
|
|||
|
||||
// $FlowFixMe
|
||||
myChannelClaims.add(claimId);
|
||||
if (!byId[claimId]) {
|
||||
if (confirmations < 1) {
|
||||
pendingById[claimId] = claim;
|
||||
if (byId[claimId]) {
|
||||
byId[claimId] = mergeClaim(claim, byId[claimId]);
|
||||
} else {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
} else {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
|
||||
if (pendingById[claimId] && claim.confirmations > 0) {
|
||||
delete pendingById[claimId];
|
||||
}
|
||||
myClaimIds.add(claimId);
|
||||
});
|
||||
}
|
||||
|
||||
return Object.assign({}, state, {
|
||||
byId,
|
||||
pendingById,
|
||||
claimsByUri: byUri,
|
||||
channelClaimCounts,
|
||||
fetchingMyChannels: false,
|
||||
myChannelClaims,
|
||||
myClaims: concatClaims(myClaims, claims),
|
||||
myChannelClaims: myChannelClaims ? Array.from(myChannelClaims) : null,
|
||||
myClaims: myClaimIds ? Array.from(myClaimIds) : null,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.FETCH_CHANNEL_LIST_FAILED] = (state: State, action: any): State => {
|
||||
return Object.assign({}, state, {
|
||||
fetchingMyChannels: false,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.FETCH_COLLECTION_LIST_STARTED] = (state: State): State => ({
|
||||
...state,
|
||||
fetchingMyCollections: true,
|
||||
});
|
||||
|
||||
reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any): State => {
|
||||
const { claims }: { claims: Array<CollectionClaim> } = action.data;
|
||||
const myClaims = state.myClaims || [];
|
||||
let myClaimIds = new Set(myClaims);
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
let myCollectionClaimsSet = new Set([]);
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
|
||||
if (claims.length) {
|
||||
myCollectionClaimsSet = new Set(state.myCollectionClaims);
|
||||
claims.forEach(claim => {
|
||||
const { meta } = claim;
|
||||
const {
|
||||
canonical_url: canonicalUrl,
|
||||
permanent_url: permanentUrl,
|
||||
claim_id: claimId,
|
||||
confirmations,
|
||||
} = claim;
|
||||
|
||||
byUri[canonicalUrl] = claimId;
|
||||
byUri[permanentUrl] = claimId;
|
||||
|
||||
// $FlowFixMe
|
||||
myCollectionClaimsSet.add(claimId);
|
||||
// we don't want to overwrite a pending result with a resolve
|
||||
if (confirmations < 1) {
|
||||
pendingById[claimId] = claim;
|
||||
if (byId[claimId]) {
|
||||
byId[claimId] = mergeClaim(claim, byId[claimId]);
|
||||
} else {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
} else {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
myClaimIds.add(claimId);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
byId,
|
||||
pendingById,
|
||||
claimsByUri: byUri,
|
||||
fetchingMyCollections: false,
|
||||
myCollectionClaims: Array.from(myCollectionClaimsSet),
|
||||
myClaims: myClaimIds ? Array.from(myClaimIds) : null,
|
||||
};
|
||||
};
|
||||
|
||||
reducers[ACTIONS.FETCH_COLLECTION_LIST_FAILED] = (state: State): State => {
|
||||
return { ...state, fetchingMyCollections: false };
|
||||
};
|
||||
|
||||
reducers[ACTIONS.FETCH_CHANNEL_CLAIMS_STARTED] = (state: State, action: any): State => {
|
||||
const { uri, page } = action.data;
|
||||
const fetchingChannelClaims = Object.assign({}, state.fetchingChannelClaims);
|
||||
|
@ -330,27 +476,104 @@ reducers[ACTIONS.ABANDON_CLAIM_STARTED] = (state: State, action: any): State =>
|
|||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State => {
|
||||
const { claims: pendingClaims }: { claims: Array<Claim> } = action.data;
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
let myClaimIds = new Set(state.myClaims);
|
||||
const myChannelClaims = new Set(state.myChannelClaims);
|
||||
|
||||
// $FlowFixMe
|
||||
pendingClaims.forEach((claim: Claim) => {
|
||||
let newClaim;
|
||||
const { permanent_url: uri, claim_id: claimId, type, value_type: valueType } = claim;
|
||||
pendingById[claimId] = claim; // make sure we don't need to merge?
|
||||
const oldClaim = byId[claimId];
|
||||
if (oldClaim && oldClaim.canonical_url) {
|
||||
newClaim = mergeClaim(oldClaim, claim);
|
||||
} else {
|
||||
newClaim = claim;
|
||||
}
|
||||
if (valueType === 'channel') {
|
||||
myChannelClaims.add(claimId);
|
||||
}
|
||||
|
||||
if (type && type.match(/claim|update/)) {
|
||||
byId[claimId] = newClaim;
|
||||
byUri[uri] = claimId;
|
||||
}
|
||||
myClaimIds.add(claimId);
|
||||
});
|
||||
return Object.assign({}, state, {
|
||||
myClaims: Array.from(myClaimIds),
|
||||
byId,
|
||||
pendingById,
|
||||
myChannelClaims: Array.from(myChannelClaims),
|
||||
claimsByUri: byUri,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.UPDATE_CONFIRMED_CLAIMS] = (state: State, action: any): State => {
|
||||
const {
|
||||
claims: confirmedClaims,
|
||||
pending: pendingClaims,
|
||||
}: { claims: Array<Claim>, pending: { [string]: Claim } } = action.data;
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
//
|
||||
confirmedClaims.forEach((claim: GenericClaim) => {
|
||||
const { claim_id: claimId, type } = claim;
|
||||
let newClaim = claim;
|
||||
const oldClaim = byId[claimId];
|
||||
if (oldClaim && oldClaim.canonical_url) {
|
||||
newClaim = mergeClaim(oldClaim, claim);
|
||||
}
|
||||
if (type && type.match(/claim|update|channel/)) {
|
||||
byId[claimId] = newClaim;
|
||||
}
|
||||
});
|
||||
return Object.assign({}, state, {
|
||||
pendingById: pendingClaims,
|
||||
byId,
|
||||
claimsByUri: byUri,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.ABANDON_CLAIM_SUCCEEDED] = (state: State, action: any): State => {
|
||||
const { claimId }: { claimId: string } = action.data;
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const newMyClaims = state.myClaims ? state.myClaims.slice() : [];
|
||||
const newMyChannelClaims = state.myChannelClaims ? state.myChannelClaims.slice() : [];
|
||||
const claimsByUri = Object.assign({}, state.claimsByUri);
|
||||
const newMyCollectionClaims = state.myCollectionClaims ? state.myCollectionClaims.slice() : [];
|
||||
|
||||
Object.keys(claimsByUri).forEach(uri => {
|
||||
if (claimsByUri[uri] === claimId) {
|
||||
delete claimsByUri[uri];
|
||||
}
|
||||
});
|
||||
const myClaims = newMyClaims.filter(i => i.claim_id && i.claim_id !== claimId);
|
||||
const myClaims = newMyClaims.filter(i => i !== claimId);
|
||||
const myChannelClaims = newMyChannelClaims.filter(i => i !== claimId);
|
||||
const myCollectionClaims = newMyCollectionClaims.filter(i => i !== claimId);
|
||||
|
||||
delete byId[claimId];
|
||||
|
||||
return Object.assign({}, state, {
|
||||
myClaims,
|
||||
myChannelClaims,
|
||||
myCollectionClaims,
|
||||
byId,
|
||||
claimsByUri,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.CLEAR_CHANNEL_ERRORS] = (state: State): State => ({
|
||||
...state,
|
||||
createChannelError: null,
|
||||
updateChannelError: null,
|
||||
});
|
||||
|
||||
reducers[ACTIONS.CREATE_CHANNEL_STARTED] = (state: State): State => ({
|
||||
...state,
|
||||
creatingChannel: true,
|
||||
|
@ -358,19 +581,7 @@ reducers[ACTIONS.CREATE_CHANNEL_STARTED] = (state: State): State => ({
|
|||
});
|
||||
|
||||
reducers[ACTIONS.CREATE_CHANNEL_COMPLETED] = (state: State, action: any): State => {
|
||||
const channelClaim: ChannelClaim = action.data.channelClaim;
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
const myChannelClaims = new Set(state.myChannelClaims);
|
||||
|
||||
byId[channelClaim.claim_id] = channelClaim;
|
||||
pendingById[channelClaim.claim_id] = channelClaim;
|
||||
myChannelClaims.add(channelClaim.claim_id);
|
||||
|
||||
return Object.assign({}, state, {
|
||||
byId,
|
||||
pendingById,
|
||||
myChannelClaims,
|
||||
creatingChannel: false,
|
||||
});
|
||||
};
|
||||
|
@ -390,13 +601,7 @@ reducers[ACTIONS.UPDATE_CHANNEL_STARTED] = (state: State, action: any): State =>
|
|||
};
|
||||
|
||||
reducers[ACTIONS.UPDATE_CHANNEL_COMPLETED] = (state: State, action: any): State => {
|
||||
const channelClaim: ChannelClaim = action.data.channelClaim;
|
||||
const byId = Object.assign({}, state.byId);
|
||||
|
||||
byId[channelClaim.claim_id] = channelClaim;
|
||||
|
||||
return Object.assign({}, state, {
|
||||
byId,
|
||||
updateChannelError: '',
|
||||
updatingChannel: false,
|
||||
});
|
||||
|
@ -409,6 +614,61 @@ reducers[ACTIONS.UPDATE_CHANNEL_FAILED] = (state: State, action: any): State =>
|
|||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.CLEAR_COLLECTION_ERRORS] = (state: State): State => ({
|
||||
...state,
|
||||
createCollectionError: null,
|
||||
updateCollectionError: null,
|
||||
});
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_STARTED] = (state: State): State => ({
|
||||
...state,
|
||||
creatingCollection: true,
|
||||
createCollectionError: null,
|
||||
});
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_COMPLETED] = (state: State, action: any): State => {
|
||||
const myCollections = state.myCollectionClaims || [];
|
||||
const myClaims = state.myClaims || [];
|
||||
const { claimId } = action.data;
|
||||
let myClaimIds = new Set(myClaims);
|
||||
let myCollectionClaimsSet = new Set(myCollections);
|
||||
myClaimIds.add(claimId);
|
||||
myCollectionClaimsSet.add(claimId);
|
||||
return Object.assign({}, state, {
|
||||
creatingCollection: false,
|
||||
myClaims: Array.from(myClaimIds),
|
||||
myCollectionClaims: Array.from(myCollectionClaimsSet),
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_FAILED] = (state: State, action: any): State => {
|
||||
return Object.assign({}, state, {
|
||||
creatingCollection: false,
|
||||
createCollectionError: action.data.error,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_STARTED] = (state: State, action: any): State => {
|
||||
return Object.assign({}, state, {
|
||||
updateCollectionError: '',
|
||||
updatingCollection: true,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_COMPLETED] = (state: State, action: any): State => {
|
||||
return Object.assign({}, state, {
|
||||
updateCollectionError: '',
|
||||
updatingCollection: false,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED] = (state: State, action: any): State => {
|
||||
return Object.assign({}, state, {
|
||||
updateCollectionError: action.data.error,
|
||||
updatingCollection: false,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.IMPORT_CHANNEL_STARTED] = (state: State): State =>
|
||||
Object.assign({}, state, { pendingChannelImports: true });
|
||||
|
||||
|
@ -457,11 +717,26 @@ reducers[ACTIONS.CLAIM_SEARCH_COMPLETED] = (state: State, action: any): State =>
|
|||
};
|
||||
|
||||
reducers[ACTIONS.CLAIM_SEARCH_FAILED] = (state: State, action: any): State => {
|
||||
const { query } = action.data;
|
||||
const claimSearchByQuery = Object.assign({}, state.claimSearchByQuery);
|
||||
const fetchingClaimSearchByQuery = Object.assign({}, state.fetchingClaimSearchByQuery);
|
||||
delete fetchingClaimSearchByQuery[action.data.query];
|
||||
const claimSearchByQueryLastPageReached = Object.assign(
|
||||
{},
|
||||
state.claimSearchByQueryLastPageReached
|
||||
);
|
||||
|
||||
delete fetchingClaimSearchByQuery[query];
|
||||
|
||||
if (claimSearchByQuery[query] && claimSearchByQuery[query].length !== 0) {
|
||||
claimSearchByQueryLastPageReached[query] = true;
|
||||
} else {
|
||||
claimSearchByQuery[query] = null;
|
||||
}
|
||||
|
||||
return Object.assign({}, state, {
|
||||
fetchingClaimSearchByQuery,
|
||||
claimSearchByQuery,
|
||||
claimSearchByQueryLastPageReached,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -505,6 +780,133 @@ reducers[ACTIONS.CLEAR_REPOST_ERROR] = (state: State): State => {
|
|||
repostError: null,
|
||||
};
|
||||
};
|
||||
reducers[ACTIONS.ADD_FILES_REFLECTING] = (state: State, action): State => {
|
||||
const pendingClaim = action.data;
|
||||
const { reflectingById } = state;
|
||||
const claimId = pendingClaim && pendingClaim.claim_id;
|
||||
|
||||
reflectingById[claimId] = { fileListItem: pendingClaim, progress: 0, stalled: false };
|
||||
|
||||
return Object.assign({}, state, {
|
||||
...state,
|
||||
reflectingById: reflectingById,
|
||||
});
|
||||
};
|
||||
reducers[ACTIONS.UPDATE_FILES_REFLECTING] = (state: State, action): State => {
|
||||
const newReflectingById = action.data;
|
||||
|
||||
return Object.assign({}, state, {
|
||||
...state,
|
||||
reflectingById: newReflectingById,
|
||||
});
|
||||
};
|
||||
reducers[ACTIONS.TOGGLE_CHECKING_REFLECTING] = (state: State, action): State => {
|
||||
const checkingReflecting = action.data;
|
||||
|
||||
return Object.assign({}, state, {
|
||||
...state,
|
||||
checkingReflecting,
|
||||
});
|
||||
};
|
||||
reducers[ACTIONS.TOGGLE_CHECKING_PENDING] = (state: State, action): State => {
|
||||
const checking = action.data;
|
||||
|
||||
return Object.assign({}, state, {
|
||||
...state,
|
||||
checkingPending: checking,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.PURCHASE_LIST_STARTED] = (state: State): State => {
|
||||
return {
|
||||
...state,
|
||||
fetchingMyPurchases: true,
|
||||
fetchingMyPurchasesError: null,
|
||||
};
|
||||
};
|
||||
|
||||
reducers[ACTIONS.PURCHASE_LIST_COMPLETED] = (state: State, action: any): State => {
|
||||
const { result }: { result: PurchaseListResponse, resolve: boolean } = action.data;
|
||||
const page = result.page;
|
||||
const totalItems = result.total_items;
|
||||
|
||||
let byId = Object.assign({}, state.byId);
|
||||
let byUri = Object.assign({}, state.claimsByUri);
|
||||
let urlsForCurrentPage = [];
|
||||
|
||||
result.items.forEach(item => {
|
||||
if (!item.claim) {
|
||||
// Abandoned claim
|
||||
return;
|
||||
}
|
||||
|
||||
const { claim, ...purchaseInfo } = item;
|
||||
claim.purchase_receipt = purchaseInfo;
|
||||
const claimId = claim.claim_id;
|
||||
const uri = claim.canonical_url;
|
||||
|
||||
byId[claimId] = claim;
|
||||
byUri[uri] = claimId;
|
||||
urlsForCurrentPage.push(uri);
|
||||
});
|
||||
|
||||
return Object.assign({}, state, {
|
||||
byId,
|
||||
claimsByUri: byUri,
|
||||
myPurchases: urlsForCurrentPage,
|
||||
myPurchasesPageNumber: page,
|
||||
myPurchasesPageTotalResults: totalItems,
|
||||
fetchingMyPurchases: false,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.PURCHASE_LIST_FAILED] = (state: State, action: any): State => {
|
||||
const { error } = action.data;
|
||||
|
||||
return {
|
||||
...state,
|
||||
fetchingMyPurchases: false,
|
||||
fetchingMyPurchasesError: error,
|
||||
};
|
||||
};
|
||||
|
||||
reducers[ACTIONS.PURCHASE_URI_COMPLETED] = (state: State, action: any): State => {
|
||||
const { uri, purchaseReceipt } = action.data;
|
||||
|
||||
let byId = Object.assign({}, state.byId);
|
||||
let byUri = Object.assign({}, state.claimsByUri);
|
||||
let myPurchases = state.myPurchases ? state.myPurchases.slice() : [];
|
||||
let urlsForCurrentPage = [];
|
||||
|
||||
const claimId = byUri[uri];
|
||||
if (claimId) {
|
||||
let claim = byId[claimId];
|
||||
claim.purchase_receipt = purchaseReceipt;
|
||||
}
|
||||
|
||||
myPurchases.push(uri);
|
||||
|
||||
return {
|
||||
...state,
|
||||
byId,
|
||||
myPurchases,
|
||||
purchaseUriSuccess: true,
|
||||
};
|
||||
};
|
||||
|
||||
reducers[ACTIONS.PURCHASE_URI_FAILED] = (state: State): State => {
|
||||
return {
|
||||
...state,
|
||||
purchaseUriSuccess: false,
|
||||
};
|
||||
};
|
||||
|
||||
reducers[ACTIONS.CLEAR_PURCHASED_URI_SUCCESS] = (state: State): State => {
|
||||
return {
|
||||
...state,
|
||||
purchaseUriSuccess: false,
|
||||
};
|
||||
};
|
||||
|
||||
export function claimsReducer(state: State = defaultState, action: any) {
|
||||
const handler = reducers[action.type];
|
||||
|
|
239
src/redux/reducers/collections.js
Normal file
239
src/redux/reducers/collections.js
Normal file
|
@ -0,0 +1,239 @@
|
|||
// @flow
|
||||
import { handleActions } from 'util/redux-utils';
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import * as COLS from 'constants/collections';
|
||||
|
||||
const getTimestamp = () => {
|
||||
return Math.floor(Date.now() / 1000);
|
||||
};
|
||||
|
||||
const defaultState: CollectionState = {
|
||||
builtin: {
|
||||
watchlater: {
|
||||
items: [],
|
||||
id: COLS.WATCH_LATER_ID,
|
||||
name: 'Watch Later',
|
||||
updatedAt: getTimestamp(),
|
||||
type: COLS.COL_TYPE_PLAYLIST,
|
||||
},
|
||||
favorites: {
|
||||
items: [],
|
||||
id: COLS.FAVORITES_ID,
|
||||
name: 'Favorites',
|
||||
type: COLS.COL_TYPE_PLAYLIST,
|
||||
updatedAt: getTimestamp(),
|
||||
},
|
||||
},
|
||||
resolved: {},
|
||||
unpublished: {}, // sync
|
||||
edited: {},
|
||||
pending: {},
|
||||
saved: [],
|
||||
isResolvingCollectionById: {},
|
||||
error: null,
|
||||
};
|
||||
|
||||
const collectionsReducer = handleActions(
|
||||
{
|
||||
[ACTIONS.COLLECTION_NEW]: (state, action) => {
|
||||
const { entry: params } = action.data; // { id:, items: Array<string>}
|
||||
// entry
|
||||
const newListTemplate = {
|
||||
id: params.id,
|
||||
name: params.name,
|
||||
items: [],
|
||||
updatedAt: getTimestamp(),
|
||||
type: params.type,
|
||||
};
|
||||
|
||||
const newList = Object.assign({}, newListTemplate, { ...params });
|
||||
const { unpublished: lists } = state;
|
||||
const newLists = Object.assign({}, lists, { [params.id]: newList });
|
||||
|
||||
return {
|
||||
...state,
|
||||
unpublished: newLists,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.COLLECTION_DELETE]: (state, action) => {
|
||||
const { id, collectionKey } = action.data;
|
||||
const { edited: editList, unpublished: unpublishedList, pending: pendingList } = state;
|
||||
const newEditList = Object.assign({}, editList);
|
||||
const newUnpublishedList = Object.assign({}, unpublishedList);
|
||||
|
||||
const newPendingList = Object.assign({}, pendingList);
|
||||
|
||||
if (collectionKey && state[collectionKey] && state[collectionKey][id]) {
|
||||
const newList = Object.assign({}, state[collectionKey]);
|
||||
delete newList[id];
|
||||
return {
|
||||
...state,
|
||||
[collectionKey]: newList,
|
||||
};
|
||||
} else {
|
||||
if (newEditList[id]) {
|
||||
delete newEditList[id];
|
||||
} else if (newUnpublishedList[id]) {
|
||||
delete newUnpublishedList[id];
|
||||
} else if (newPendingList[id]) {
|
||||
delete newPendingList[id];
|
||||
}
|
||||
}
|
||||
return {
|
||||
...state,
|
||||
edited: newEditList,
|
||||
unpublished: newUnpublishedList,
|
||||
pending: newPendingList,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.COLLECTION_PENDING]: (state, action) => {
|
||||
const { localId, claimId } = action.data;
|
||||
const {
|
||||
resolved: resolvedList,
|
||||
edited: editList,
|
||||
unpublished: unpublishedList,
|
||||
pending: pendingList,
|
||||
} = state;
|
||||
|
||||
const newEditList = Object.assign({}, editList);
|
||||
const newResolvedList = Object.assign({}, resolvedList);
|
||||
const newUnpublishedList = Object.assign({}, unpublishedList);
|
||||
const newPendingList = Object.assign({}, pendingList);
|
||||
|
||||
if (localId) {
|
||||
// new publish
|
||||
newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {});
|
||||
delete newUnpublishedList[localId];
|
||||
} else {
|
||||
// edit update
|
||||
newPendingList[claimId] = Object.assign(
|
||||
{},
|
||||
newEditList[claimId] || newResolvedList[claimId]
|
||||
);
|
||||
delete newEditList[claimId];
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
edited: newEditList,
|
||||
unpublished: newUnpublishedList,
|
||||
pending: newPendingList,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.COLLECTION_EDIT]: (state, action) => {
|
||||
const { id, collectionKey, collection } = action.data;
|
||||
|
||||
if (COLS.BUILTIN_LISTS.includes(id)) {
|
||||
const { builtin: lists } = state;
|
||||
return {
|
||||
...state,
|
||||
[collectionKey]: { ...lists, [id]: collection },
|
||||
};
|
||||
}
|
||||
|
||||
if (collectionKey === 'edited') {
|
||||
const { edited: lists } = state;
|
||||
return {
|
||||
...state,
|
||||
edited: { ...lists, [id]: collection },
|
||||
};
|
||||
}
|
||||
const { unpublished: lists } = state;
|
||||
return {
|
||||
...state,
|
||||
unpublished: { ...lists, [id]: collection },
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.COLLECTION_ERROR]: (state, action) => {
|
||||
return Object.assign({}, state, {
|
||||
error: action.data.message,
|
||||
});
|
||||
},
|
||||
|
||||
[ACTIONS.COLLECTION_ITEMS_RESOLVE_STARTED]: (state, action) => {
|
||||
const { ids } = action.data;
|
||||
const { isResolvingCollectionById } = state;
|
||||
const newResolving = Object.assign({}, isResolvingCollectionById);
|
||||
ids.forEach(id => {
|
||||
newResolving[id] = true;
|
||||
});
|
||||
return Object.assign({}, state, {
|
||||
...state,
|
||||
error: '',
|
||||
isResolvingCollectionById: newResolving,
|
||||
});
|
||||
},
|
||||
[ACTIONS.USER_STATE_POPULATE]: (state, action) => {
|
||||
const {
|
||||
builtinCollections,
|
||||
savedCollections,
|
||||
unpublishedCollections,
|
||||
editedCollections,
|
||||
} = action.data;
|
||||
return {
|
||||
...state,
|
||||
edited: editedCollections || state.edited,
|
||||
unpublished: unpublishedCollections || state.unpublished,
|
||||
builtin: builtinCollections || state.builtin,
|
||||
saved: savedCollections || state.saved,
|
||||
};
|
||||
},
|
||||
[ACTIONS.COLLECTION_ITEMS_RESOLVE_COMPLETED]: (state, action) => {
|
||||
const { resolvedCollections, failedCollectionIds } = action.data;
|
||||
const { pending, edited, isResolvingCollectionById, resolved } = state;
|
||||
const newPending = Object.assign({}, pending);
|
||||
const newEdited = Object.assign({}, edited);
|
||||
const newResolved = Object.assign({}, resolved, resolvedCollections);
|
||||
|
||||
const resolvedIds = Object.keys(resolvedCollections);
|
||||
const newResolving = Object.assign({}, isResolvingCollectionById);
|
||||
if (resolvedCollections && Object.keys(resolvedCollections).length) {
|
||||
resolvedIds.forEach(resolvedId => {
|
||||
if (newEdited[resolvedId]) {
|
||||
if (newEdited[resolvedId]['updatedAt'] < resolvedCollections[resolvedId]['updatedAt']) {
|
||||
delete newEdited[resolvedId];
|
||||
}
|
||||
}
|
||||
delete newResolving[resolvedId];
|
||||
if (newPending[resolvedId]) {
|
||||
delete newPending[resolvedId];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (failedCollectionIds && Object.keys(failedCollectionIds).length) {
|
||||
failedCollectionIds.forEach(failedId => {
|
||||
delete newResolving[failedId];
|
||||
});
|
||||
}
|
||||
|
||||
return Object.assign({}, state, {
|
||||
...state,
|
||||
pending: newPending,
|
||||
resolved: newResolved,
|
||||
edited: newEdited,
|
||||
isResolvingCollectionById: newResolving,
|
||||
});
|
||||
},
|
||||
[ACTIONS.COLLECTION_ITEMS_RESOLVE_FAILED]: (state, action) => {
|
||||
const { ids } = action.data;
|
||||
const { isResolvingCollectionById } = state;
|
||||
const newResolving = Object.assign({}, isResolvingCollectionById);
|
||||
ids.forEach(id => {
|
||||
delete newResolving[id];
|
||||
});
|
||||
return Object.assign({}, state, {
|
||||
...state,
|
||||
isResolvingCollectionById: newResolving,
|
||||
error: action.data.message,
|
||||
});
|
||||
},
|
||||
},
|
||||
defaultState
|
||||
);
|
||||
|
||||
export { collectionsReducer };
|
|
@ -1,153 +0,0 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import { handleActions } from 'util/redux-utils';
|
||||
|
||||
const defaultState: CommentsState = {
|
||||
commentById: {}, // commentId -> Comment
|
||||
byId: {}, // ClaimID -> list of comments
|
||||
commentsByUri: {}, // URI -> claimId
|
||||
isLoading: false,
|
||||
myComments: undefined,
|
||||
};
|
||||
|
||||
export const commentReducer = handleActions(
|
||||
{
|
||||
[ACTIONS.COMMENT_CREATE_STARTED]: (state: CommentsState, action: any): CommentsState => ({
|
||||
...state,
|
||||
isLoading: true,
|
||||
}),
|
||||
|
||||
[ACTIONS.COMMENT_CREATE_FAILED]: (state: CommentsState, action: any) => ({
|
||||
...state,
|
||||
isLoading: false,
|
||||
}),
|
||||
|
||||
[ACTIONS.COMMENT_CREATE_COMPLETED]: (state: CommentsState, action: any): CommentsState => {
|
||||
const { comment, claimId }: { comment: Comment, claimId: string } = action.data;
|
||||
const commentById = Object.assign({}, state.commentById);
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const comments = byId[claimId];
|
||||
const newCommentIds = comments.slice();
|
||||
|
||||
// add the comment by its ID
|
||||
commentById[comment.comment_id] = comment;
|
||||
|
||||
// push the comment_id to the top of ID list
|
||||
newCommentIds.unshift(comment.comment_id);
|
||||
byId[claimId] = newCommentIds;
|
||||
|
||||
return {
|
||||
...state,
|
||||
commentById,
|
||||
byId,
|
||||
isLoading: false,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.COMMENT_LIST_STARTED]: state => ({ ...state, isLoading: true }),
|
||||
|
||||
[ACTIONS.COMMENT_LIST_COMPLETED]: (state: CommentsState, action: any) => {
|
||||
const { comments, claimId, uri } = action.data;
|
||||
|
||||
const commentById = Object.assign({}, state.commentById);
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const commentsByUri = Object.assign({}, state.commentsByUri);
|
||||
|
||||
if (comments) {
|
||||
// we use an Array to preserve order of listing
|
||||
// in reality this doesn't matter and we can just
|
||||
// sort comments by their timestamp
|
||||
const commentIds = Array(comments.length);
|
||||
|
||||
// map the comment_ids to the new comments
|
||||
for (let i = 0; i < comments.length; i++) {
|
||||
commentIds[i] = comments[i].comment_id;
|
||||
commentById[commentIds[i]] = comments[i];
|
||||
}
|
||||
|
||||
byId[claimId] = commentIds;
|
||||
commentsByUri[uri] = claimId;
|
||||
}
|
||||
return {
|
||||
...state,
|
||||
byId,
|
||||
commentById,
|
||||
commentsByUri,
|
||||
isLoading: false,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.COMMENT_LIST_FAILED]: (state: CommentsState, action: any) => ({
|
||||
...state,
|
||||
isLoading: false,
|
||||
}),
|
||||
[ACTIONS.COMMENT_ABANDON_STARTED]: (state: CommentsState, action: any) => ({
|
||||
...state,
|
||||
isLoading: true,
|
||||
}),
|
||||
[ACTIONS.COMMENT_ABANDON_COMPLETED]: (state: CommentsState, action: any) => {
|
||||
const { comment_id } = action.data;
|
||||
const commentById = Object.assign({}, state.commentById);
|
||||
const byId = Object.assign({}, state.byId);
|
||||
|
||||
// to remove the comment and its references
|
||||
const claimId = commentById[comment_id].claim_id;
|
||||
for (let i = 0; i < byId[claimId].length; i++) {
|
||||
if (byId[claimId][i] === comment_id) {
|
||||
byId[claimId].splice(i, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
delete commentById[comment_id];
|
||||
|
||||
return {
|
||||
...state,
|
||||
commentById,
|
||||
byId,
|
||||
isLoading: false,
|
||||
};
|
||||
},
|
||||
// do nothing
|
||||
[ACTIONS.COMMENT_ABANDON_FAILED]: (state: CommentsState, action: any) => ({
|
||||
...state,
|
||||
isLoading: false,
|
||||
}),
|
||||
// do nothing
|
||||
[ACTIONS.COMMENT_UPDATE_STARTED]: (state: CommentsState, action: any) => ({
|
||||
...state,
|
||||
isLoading: true,
|
||||
}),
|
||||
// replace existing comment with comment returned here under its comment_id
|
||||
[ACTIONS.COMMENT_UPDATE_COMPLETED]: (state: CommentsState, action: any) => {
|
||||
const { comment } = action.data;
|
||||
const commentById = Object.assign({}, state.commentById);
|
||||
commentById[comment.comment_id] = comment;
|
||||
|
||||
return {
|
||||
...state,
|
||||
commentById,
|
||||
isLoading: false,
|
||||
};
|
||||
},
|
||||
// nothing can be done here
|
||||
[ACTIONS.COMMENT_UPDATE_FAILED]: (state: CommentsState, action: any) => ({
|
||||
...state,
|
||||
isLoading: false,
|
||||
}),
|
||||
// nothing can really be done here
|
||||
[ACTIONS.COMMENT_HIDE_STARTED]: (state: CommentsState, action: any) => ({
|
||||
...state,
|
||||
isLoading: true,
|
||||
}),
|
||||
[ACTIONS.COMMENT_HIDE_COMPLETED]: (state: CommentsState, action: any) => ({
|
||||
...state, // todo: add HiddenComments state & create selectors
|
||||
isLoading: false,
|
||||
}),
|
||||
// nothing can be done here
|
||||
[ACTIONS.COMMENT_HIDE_FAILED]: (state: CommentsState, action: any) => ({
|
||||
...state,
|
||||
isLoading: false,
|
||||
}),
|
||||
},
|
||||
defaultState
|
||||
);
|
|
@ -1,89 +0,0 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
|
||||
const reducers = {};
|
||||
const defaultState = {
|
||||
failedPurchaseUris: [],
|
||||
purchasedUris: [],
|
||||
purchaseUriErrorMessage: '',
|
||||
};
|
||||
|
||||
reducers[ACTIONS.PURCHASE_URI_STARTED] = (
|
||||
state: FileState,
|
||||
action: PurchaseUriStarted
|
||||
): FileState => {
|
||||
const { uri } = action.data;
|
||||
const newFailedPurchaseUris = state.failedPurchaseUris.slice();
|
||||
if (newFailedPurchaseUris.includes(uri)) {
|
||||
newFailedPurchaseUris.splice(newFailedPurchaseUris.indexOf(uri), 1);
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
failedPurchaseUris: newFailedPurchaseUris,
|
||||
purchaseUriErrorMessage: '',
|
||||
};
|
||||
};
|
||||
|
||||
reducers[ACTIONS.PURCHASE_URI_COMPLETED] = (
|
||||
state: FileState,
|
||||
action: PurchaseUriCompleted
|
||||
): FileState => {
|
||||
const { uri } = action.data;
|
||||
const newPurchasedUris = state.purchasedUris.slice();
|
||||
const newFailedPurchaseUris = state.failedPurchaseUris.slice();
|
||||
|
||||
if (!newPurchasedUris.includes(uri)) {
|
||||
newPurchasedUris.push(uri);
|
||||
}
|
||||
if (newFailedPurchaseUris.includes(uri)) {
|
||||
newFailedPurchaseUris.splice(newFailedPurchaseUris.indexOf(uri), 1);
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
failedPurchaseUris: newFailedPurchaseUris,
|
||||
purchasedUris: newPurchasedUris,
|
||||
purchaseUriErrorMessage: '',
|
||||
};
|
||||
};
|
||||
|
||||
reducers[ACTIONS.PURCHASE_URI_FAILED] = (
|
||||
state: FileState,
|
||||
action: PurchaseUriFailed
|
||||
): FileState => {
|
||||
const { uri, error } = action.data;
|
||||
const newFailedPurchaseUris = state.failedPurchaseUris.slice();
|
||||
|
||||
if (!newFailedPurchaseUris.includes(uri)) {
|
||||
newFailedPurchaseUris.push(uri);
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
failedPurchaseUris: newFailedPurchaseUris,
|
||||
purchaseUriErrorMessage: error,
|
||||
};
|
||||
};
|
||||
|
||||
reducers[ACTIONS.DELETE_PURCHASED_URI] = (
|
||||
state: FileState,
|
||||
action: DeletePurchasedUri
|
||||
): FileState => {
|
||||
const { uri } = action.data;
|
||||
const newPurchasedUris = state.purchasedUris.slice();
|
||||
if (newPurchasedUris.includes(uri)) {
|
||||
newPurchasedUris.splice(newPurchasedUris.indexOf(uri), 1);
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
purchasedUris: newPurchasedUris,
|
||||
};
|
||||
};
|
||||
|
||||
export function fileReducer(state: FileState = defaultState, action: any) {
|
||||
const handler = reducers[action.type];
|
||||
if (handler) return handler(state, action);
|
||||
return state;
|
||||
}
|
|
@ -7,8 +7,13 @@ import { CHANNEL_ANONYMOUS } from 'constants/claim';
|
|||
|
||||
type PublishState = {
|
||||
editingURI: ?string,
|
||||
fileText: ?string,
|
||||
filePath: ?string,
|
||||
remoteFileUrl: ?string,
|
||||
contentIsFree: boolean,
|
||||
fileDur: number,
|
||||
fileSize: number,
|
||||
fileVid: boolean,
|
||||
fee: {
|
||||
amount: number,
|
||||
currency: string,
|
||||
|
@ -17,8 +22,11 @@ type PublishState = {
|
|||
thumbnail_url: string,
|
||||
thumbnailPath: string,
|
||||
uploadThumbnailStatus: string,
|
||||
thumbnailError: ?boolean,
|
||||
description: string,
|
||||
language: string,
|
||||
releaseTime: ?number,
|
||||
releaseTimeEdited: ?number,
|
||||
channel: string,
|
||||
channelId: ?string,
|
||||
name: string,
|
||||
|
@ -28,11 +36,18 @@ type PublishState = {
|
|||
otherLicenseDescription: string,
|
||||
licenseUrl: string,
|
||||
tags: Array<string>,
|
||||
optimize: boolean,
|
||||
useLBRYUploader: boolean,
|
||||
};
|
||||
|
||||
const defaultState: PublishState = {
|
||||
editingURI: undefined,
|
||||
fileText: '',
|
||||
filePath: undefined,
|
||||
fileDur: 0,
|
||||
fileSize: 0,
|
||||
fileVid: false,
|
||||
remoteFileUrl: undefined,
|
||||
contentIsFree: true,
|
||||
fee: {
|
||||
amount: 1,
|
||||
|
@ -42,14 +57,17 @@ const defaultState: PublishState = {
|
|||
thumbnail_url: '',
|
||||
thumbnailPath: '',
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.API_DOWN,
|
||||
thumbnailError: undefined,
|
||||
description: '',
|
||||
language: '',
|
||||
releaseTime: undefined,
|
||||
releaseTimeEdited: undefined,
|
||||
nsfw: false,
|
||||
channel: CHANNEL_ANONYMOUS,
|
||||
channelId: '',
|
||||
name: '',
|
||||
nameError: undefined,
|
||||
bid: 0.1,
|
||||
bid: 0.01,
|
||||
bidError: undefined,
|
||||
licenseType: 'None',
|
||||
otherLicenseDescription: 'All rights reserved',
|
||||
|
@ -58,6 +76,8 @@ const defaultState: PublishState = {
|
|||
publishing: false,
|
||||
publishSuccess: false,
|
||||
publishError: undefined,
|
||||
optimize: false,
|
||||
useLBRYUploader: false,
|
||||
};
|
||||
|
||||
export const publishReducer = handleActions(
|
||||
|
@ -69,8 +89,13 @@ export const publishReducer = handleActions(
|
|||
...data,
|
||||
};
|
||||
},
|
||||
[ACTIONS.CLEAR_PUBLISH]: (): PublishState => ({
|
||||
[ACTIONS.CLEAR_PUBLISH]: (state: PublishState): PublishState => ({
|
||||
...defaultState,
|
||||
uri: undefined,
|
||||
channel: state.channel,
|
||||
bid: state.bid,
|
||||
optimize: state.optimize,
|
||||
language: state.language,
|
||||
}),
|
||||
[ACTIONS.PUBLISH_START]: (state: PublishState): PublishState => ({
|
||||
...state,
|
||||
|
|
|
@ -1,137 +0,0 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import { handleActions } from 'util/redux-utils';
|
||||
import { SEARCH_OPTIONS } from 'constants/search';
|
||||
|
||||
const defaultState = {
|
||||
isActive: false, // does the user have any typed text in the search input
|
||||
focused: false, // is the search input focused
|
||||
searchQuery: '', // needs to be an empty string for input focusing
|
||||
options: {
|
||||
[SEARCH_OPTIONS.RESULT_COUNT]: 30,
|
||||
[SEARCH_OPTIONS.CLAIM_TYPE]: SEARCH_OPTIONS.INCLUDE_FILES_AND_CHANNELS,
|
||||
[SEARCH_OPTIONS.MEDIA_AUDIO]: true,
|
||||
[SEARCH_OPTIONS.MEDIA_VIDEO]: true,
|
||||
[SEARCH_OPTIONS.MEDIA_TEXT]: true,
|
||||
[SEARCH_OPTIONS.MEDIA_IMAGE]: true,
|
||||
[SEARCH_OPTIONS.MEDIA_APPLICATION]: true,
|
||||
},
|
||||
suggestions: {},
|
||||
urisByQuery: {},
|
||||
resolvedResultsByQuery: {},
|
||||
resolvedResultsByQueryLastPageReached: {},
|
||||
};
|
||||
|
||||
export const searchReducer = handleActions(
|
||||
{
|
||||
[ACTIONS.SEARCH_START]: (state: SearchState): SearchState => ({
|
||||
...state,
|
||||
searching: true,
|
||||
}),
|
||||
[ACTIONS.SEARCH_SUCCESS]: (state: SearchState, action: SearchSuccess): SearchState => {
|
||||
const { query, uris } = action.data;
|
||||
|
||||
return {
|
||||
...state,
|
||||
searching: false,
|
||||
urisByQuery: Object.assign({}, state.urisByQuery, { [query]: uris }),
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.SEARCH_FAIL]: (state: SearchState): SearchState => ({
|
||||
...state,
|
||||
searching: false,
|
||||
}),
|
||||
|
||||
[ACTIONS.RESOLVED_SEARCH_START]: (state: SearchState): SearchState => ({
|
||||
...state,
|
||||
searching: true,
|
||||
}),
|
||||
[ACTIONS.RESOLVED_SEARCH_SUCCESS]: (
|
||||
state: SearchState,
|
||||
action: ResolvedSearchSuccess
|
||||
): SearchState => {
|
||||
const resolvedResultsByQuery = Object.assign({}, state.resolvedResultsByQuery);
|
||||
const resolvedResultsByQueryLastPageReached = Object.assign(
|
||||
{},
|
||||
state.resolvedResultsByQueryLastPageReached
|
||||
);
|
||||
const { append, query, results, pageSize } = action.data;
|
||||
|
||||
if (append) {
|
||||
// todo: check for duplicates when concatenating?
|
||||
resolvedResultsByQuery[query] =
|
||||
resolvedResultsByQuery[query] && resolvedResultsByQuery[query].length
|
||||
? resolvedResultsByQuery[query].concat(results)
|
||||
: results;
|
||||
} else {
|
||||
resolvedResultsByQuery[query] = results;
|
||||
}
|
||||
|
||||
// the returned number of urls is less than the page size, so we're on the last page
|
||||
resolvedResultsByQueryLastPageReached[query] = results.length < pageSize;
|
||||
|
||||
return {
|
||||
...state,
|
||||
searching: false,
|
||||
resolvedResultsByQuery,
|
||||
resolvedResultsByQueryLastPageReached,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.RESOLVED_SEARCH_FAIL]: (state: SearchState): SearchState => ({
|
||||
...state,
|
||||
searching: false,
|
||||
}),
|
||||
|
||||
[ACTIONS.UPDATE_SEARCH_QUERY]: (
|
||||
state: SearchState,
|
||||
action: UpdateSearchQuery
|
||||
): SearchState => ({
|
||||
...state,
|
||||
searchQuery: action.data.query,
|
||||
isActive: true,
|
||||
}),
|
||||
|
||||
[ACTIONS.UPDATE_SEARCH_SUGGESTIONS]: (
|
||||
state: SearchState,
|
||||
action: UpdateSearchSuggestions
|
||||
): SearchState => ({
|
||||
...state,
|
||||
suggestions: {
|
||||
...state.suggestions,
|
||||
[action.data.query]: action.data.suggestions,
|
||||
},
|
||||
}),
|
||||
|
||||
// sets isActive to false so the uri will be populated correctly if the
|
||||
// user is on a file page. The search query will still be present on any
|
||||
// other page
|
||||
[ACTIONS.DISMISS_NOTIFICATION]: (state: SearchState): SearchState => ({
|
||||
...state,
|
||||
isActive: false,
|
||||
}),
|
||||
|
||||
[ACTIONS.SEARCH_FOCUS]: (state: SearchState): SearchState => ({
|
||||
...state,
|
||||
focused: true,
|
||||
}),
|
||||
[ACTIONS.SEARCH_BLUR]: (state: SearchState): SearchState => ({
|
||||
...state,
|
||||
focused: false,
|
||||
}),
|
||||
[ACTIONS.UPDATE_SEARCH_OPTIONS]: (
|
||||
state: SearchState,
|
||||
action: UpdateSearchOptions
|
||||
): SearchState => {
|
||||
const { options: oldOptions } = state;
|
||||
const newOptions = action.data;
|
||||
const options = { ...oldOptions, ...newOptions };
|
||||
return {
|
||||
...state,
|
||||
options,
|
||||
};
|
||||
},
|
||||
},
|
||||
defaultState
|
||||
);
|
|
@ -1,80 +0,0 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import { handleActions } from 'util/redux-utils';
|
||||
import { DEFAULT_KNOWN_TAGS, DEFAULT_FOLLOWED_TAGS } from 'constants/tags';
|
||||
|
||||
function getDefaultKnownTags() {
|
||||
return DEFAULT_FOLLOWED_TAGS.concat(DEFAULT_KNOWN_TAGS).reduce(
|
||||
(tagsMap, tag) => ({
|
||||
...tagsMap,
|
||||
[tag]: { name: tag },
|
||||
}),
|
||||
{}
|
||||
);
|
||||
}
|
||||
|
||||
const defaultState: TagState = {
|
||||
followedTags: DEFAULT_FOLLOWED_TAGS,
|
||||
knownTags: getDefaultKnownTags(),
|
||||
};
|
||||
|
||||
export const tagsReducer = handleActions(
|
||||
{
|
||||
[ACTIONS.TOGGLE_TAG_FOLLOW]: (state: TagState, action: TagAction): TagState => {
|
||||
const { followedTags } = state;
|
||||
const { name } = action.data;
|
||||
|
||||
let newFollowedTags = followedTags.slice();
|
||||
|
||||
if (newFollowedTags.includes(name)) {
|
||||
newFollowedTags = newFollowedTags.filter(tag => tag !== name);
|
||||
} else {
|
||||
newFollowedTags.push(name);
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
followedTags: newFollowedTags,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.TAG_ADD]: (state: TagState, action: TagAction) => {
|
||||
const { knownTags } = state;
|
||||
const { name } = action.data;
|
||||
|
||||
let newKnownTags = { ...knownTags };
|
||||
newKnownTags[name] = { name };
|
||||
|
||||
return {
|
||||
...state,
|
||||
knownTags: newKnownTags,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.TAG_DELETE]: (state: TagState, action: TagAction) => {
|
||||
const { knownTags, followedTags } = state;
|
||||
const { name } = action.data;
|
||||
|
||||
let newKnownTags = { ...knownTags };
|
||||
delete newKnownTags[name];
|
||||
const newFollowedTags = followedTags.filter(tag => tag !== name);
|
||||
|
||||
return {
|
||||
...state,
|
||||
knownTags: newKnownTags,
|
||||
followedTags: newFollowedTags,
|
||||
};
|
||||
},
|
||||
[ACTIONS.USER_STATE_POPULATE]: (
|
||||
state: TagState,
|
||||
action: { data: { tags: ?Array<string> } }
|
||||
) => {
|
||||
const { tags } = action.data;
|
||||
return {
|
||||
...state,
|
||||
followedTags: tags && tags.length ? tags : DEFAULT_FOLLOWED_TAGS,
|
||||
};
|
||||
},
|
||||
},
|
||||
defaultState
|
||||
);
|
|
@ -26,6 +26,7 @@ type WalletState = {
|
|||
supports: { [string]: Support },
|
||||
abandoningSupportsByOutpoint: { [string]: boolean },
|
||||
fetchingTransactions: boolean,
|
||||
fetchingTransactionsError: string,
|
||||
gettingNewAddress: boolean,
|
||||
draftTransaction: any,
|
||||
sendingSupport: boolean,
|
||||
|
@ -43,6 +44,19 @@ type WalletState = {
|
|||
walletLockSucceded: ?boolean,
|
||||
walletLockResult: ?boolean,
|
||||
walletReconnecting: boolean,
|
||||
txoFetchParams: {},
|
||||
utxoCounts: {},
|
||||
txoPage: any,
|
||||
fetchId: string,
|
||||
fetchingTxos: boolean,
|
||||
fetchingTxosError?: string,
|
||||
consolidatingUtxos: boolean,
|
||||
pendingConsolidateTxid?: string,
|
||||
massClaimingTips: boolean,
|
||||
pendingMassClaimTxid?: string,
|
||||
pendingSupportTransactions: {}, // { claimId: {txid: 123, amount 12.3}, }
|
||||
pendingTxos: Array<string>,
|
||||
abandonClaimSupportError?: string,
|
||||
};
|
||||
|
||||
const defaultState = {
|
||||
|
@ -55,6 +69,7 @@ const defaultState = {
|
|||
latestBlock: undefined,
|
||||
transactions: {},
|
||||
fetchingTransactions: false,
|
||||
fetchingTransactionsError: undefined,
|
||||
supports: {},
|
||||
fetchingSupports: false,
|
||||
abandoningSupportsByOutpoint: {},
|
||||
|
@ -76,6 +91,22 @@ const defaultState = {
|
|||
walletLockResult: null,
|
||||
transactionListFilter: 'all',
|
||||
walletReconnecting: false,
|
||||
txoFetchParams: {},
|
||||
utxoCounts: {},
|
||||
fetchingUtxoCounts: false,
|
||||
fetchingUtxoError: undefined,
|
||||
consolidatingUtxos: false,
|
||||
pendingConsolidateTxid: null,
|
||||
massClaimingTips: false,
|
||||
pendingMassClaimTxid: null,
|
||||
txoPage: {},
|
||||
fetchId: '',
|
||||
fetchingTxos: false,
|
||||
fetchingTxosError: undefined,
|
||||
pendingSupportTransactions: {},
|
||||
pendingTxos: [],
|
||||
|
||||
abandonClaimSupportError: undefined,
|
||||
};
|
||||
|
||||
export const walletReducer = handleActions(
|
||||
|
@ -100,6 +131,140 @@ export const walletReducer = handleActions(
|
|||
};
|
||||
},
|
||||
|
||||
[ACTIONS.FETCH_TXO_PAGE_STARTED]: (state: WalletState, action) => {
|
||||
return {
|
||||
...state,
|
||||
fetchId: action.data,
|
||||
fetchingTxos: true,
|
||||
fetchingTxosError: undefined,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.FETCH_TXO_PAGE_COMPLETED]: (state: WalletState, action) => {
|
||||
if (state.fetchId !== action.data.fetchId) {
|
||||
// Leave 'state' and 'fetchingTxos' alone. The latter would ensure
|
||||
// the spiner would continue spinning for the latest transaction.
|
||||
return { ...state };
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
txoPage: action.data.result,
|
||||
fetchId: '',
|
||||
fetchingTxos: false,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.FETCH_TXO_PAGE_FAILED]: (state: WalletState, action) => {
|
||||
return {
|
||||
...state,
|
||||
txoPage: {},
|
||||
fetchId: '',
|
||||
fetchingTxos: false,
|
||||
fetchingTxosError: action.data,
|
||||
};
|
||||
},
|
||||
[ACTIONS.FETCH_UTXO_COUNT_STARTED]: (state: WalletState) => {
|
||||
return {
|
||||
...state,
|
||||
fetchingUtxoCounts: true,
|
||||
fetchingUtxoError: undefined,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.FETCH_UTXO_COUNT_COMPLETED]: (state: WalletState, action) => {
|
||||
return {
|
||||
...state,
|
||||
utxoCounts: action.data,
|
||||
fetchingUtxoCounts: false,
|
||||
};
|
||||
},
|
||||
[ACTIONS.FETCH_UTXO_COUNT_FAILED]: (state: WalletState, action) => {
|
||||
return {
|
||||
...state,
|
||||
utxoCounts: {},
|
||||
fetchingUtxoCounts: false,
|
||||
fetchingUtxoError: action.data,
|
||||
};
|
||||
},
|
||||
[ACTIONS.DO_UTXO_CONSOLIDATE_STARTED]: (state: WalletState) => {
|
||||
return {
|
||||
...state,
|
||||
consolidatingUtxos: true,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.DO_UTXO_CONSOLIDATE_COMPLETED]: (state: WalletState, action) => {
|
||||
const { txid } = action.data;
|
||||
return {
|
||||
...state,
|
||||
consolidatingUtxos: false,
|
||||
pendingConsolidateTxid: txid,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.DO_UTXO_CONSOLIDATE_FAILED]: (state: WalletState, action) => {
|
||||
return {
|
||||
...state,
|
||||
consolidatingUtxos: false,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.TIP_CLAIM_MASS_STARTED]: (state: WalletState) => {
|
||||
return {
|
||||
...state,
|
||||
massClaimingTips: true,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.TIP_CLAIM_MASS_COMPLETED]: (state: WalletState, action) => {
|
||||
const { txid } = action.data;
|
||||
return {
|
||||
...state,
|
||||
massClaimingTips: false,
|
||||
pendingMassClaimTxid: txid,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.TIP_CLAIM_MASS_FAILED]: (state: WalletState, action) => {
|
||||
return {
|
||||
...state,
|
||||
massClaimingTips: false,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED]: (state: WalletState, action) => {
|
||||
const { pendingTxos, pendingMassClaimTxid, pendingConsolidateTxid } = state;
|
||||
|
||||
const { txids, remove } = action.data;
|
||||
|
||||
if (remove) {
|
||||
const newTxos = pendingTxos.filter(txo => !txids.includes(txo));
|
||||
const newPendingMassClaimTxid = txids.includes(pendingMassClaimTxid)
|
||||
? undefined
|
||||
: pendingMassClaimTxid;
|
||||
const newPendingConsolidateTxid = txids.includes(pendingConsolidateTxid)
|
||||
? undefined
|
||||
: pendingConsolidateTxid;
|
||||
return {
|
||||
...state,
|
||||
pendingTxos: newTxos,
|
||||
pendingMassClaimTxid: newPendingMassClaimTxid,
|
||||
pendingConsolidateTxid: newPendingConsolidateTxid,
|
||||
};
|
||||
} else {
|
||||
const newPendingSet = new Set([...pendingTxos, ...txids]);
|
||||
return { ...state, pendingTxos: Array.from(newPendingSet) };
|
||||
}
|
||||
},
|
||||
|
||||
[ACTIONS.UPDATE_TXO_FETCH_PARAMS]: (state: WalletState, action) => {
|
||||
return {
|
||||
...state,
|
||||
txoFetchParams: action.data,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.FETCH_SUPPORTS_STARTED]: (state: WalletState) => ({
|
||||
...state,
|
||||
fetchingSupports: true,
|
||||
|
@ -140,7 +305,53 @@ export const walletReducer = handleActions(
|
|||
return {
|
||||
...state,
|
||||
supports: byOutpoint,
|
||||
abandoningSupportsById: currentlyAbandoning,
|
||||
abandoningSupportsByOutpoint: currentlyAbandoning,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.ABANDON_CLAIM_SUPPORT_STARTED]: (state: WalletState, action: any): WalletState => {
|
||||
return {
|
||||
...state,
|
||||
abandonClaimSupportError: undefined,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.ABANDON_CLAIM_SUPPORT_PREVIEW]: (state: WalletState, action: any): WalletState => {
|
||||
return {
|
||||
...state,
|
||||
abandonClaimSupportError: undefined,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.ABANDON_CLAIM_SUPPORT_COMPLETED]: (state: WalletState, action: any): WalletState => {
|
||||
const {
|
||||
claimId,
|
||||
type,
|
||||
txid,
|
||||
effective,
|
||||
}: { claimId: string, type: string, txid: string, effective: string } = action.data;
|
||||
const pendingtxs = Object.assign({}, state.pendingSupportTransactions);
|
||||
|
||||
pendingtxs[claimId] = { txid, type, effective };
|
||||
|
||||
return {
|
||||
...state,
|
||||
pendingSupportTransactions: pendingtxs,
|
||||
abandonClaimSupportError: undefined,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.ABANDON_CLAIM_SUPPORT_FAILED]: (state: WalletState, action: any): WalletState => {
|
||||
return {
|
||||
...state,
|
||||
abandonClaimSupportError: action.data,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.PENDING_SUPPORTS_UPDATED]: (state: WalletState, action: any): WalletState => {
|
||||
return {
|
||||
...state,
|
||||
pendingSupportTransactions: action.data,
|
||||
};
|
||||
},
|
||||
|
||||
|
|
|
@ -1,21 +1,35 @@
|
|||
// @flow
|
||||
import { normalizeURI, buildURI, parseURI } from 'lbryURI';
|
||||
import {
|
||||
selectResolvedSearchResultsByQuery,
|
||||
selectSearchUrisByQuery,
|
||||
} from 'redux/selectors/search';
|
||||
import { normalizeURI, parseURI } from 'lbryURI';
|
||||
import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
|
||||
import { createSelector } from 'reselect';
|
||||
import { isClaimNsfw, createNormalizedClaimSearchKey } from 'util/claim';
|
||||
import { getSearchQueryString } from 'util/query-params';
|
||||
import { PAGE_SIZE } from 'constants/claim';
|
||||
import { isClaimNsfw, filterClaims } from 'util/claim';
|
||||
import * as CLAIM from 'constants/claim';
|
||||
|
||||
const selectState = state => state.claims || {};
|
||||
|
||||
export const selectClaimsById = createSelector(
|
||||
export const selectById = createSelector(
|
||||
selectState,
|
||||
state => state.byId || {}
|
||||
);
|
||||
|
||||
export const selectPendingClaimsById = createSelector(
|
||||
selectState,
|
||||
state => state.pendingById || {}
|
||||
);
|
||||
|
||||
export const selectClaimsById = createSelector(
|
||||
selectById,
|
||||
selectPendingClaimsById,
|
||||
(byId, pendingById) => {
|
||||
return Object.assign(byId, pendingById); // do I need merged to keep metadata?
|
||||
}
|
||||
);
|
||||
|
||||
export const selectClaimIdsByUri = createSelector(
|
||||
selectState,
|
||||
state => state.claimsByUri || {}
|
||||
);
|
||||
|
||||
export const selectCurrentChannelPage = createSelector(
|
||||
selectState,
|
||||
state => state.currentChannelPage || 1
|
||||
|
@ -42,10 +56,9 @@ export const selectRepostError = createSelector(
|
|||
);
|
||||
|
||||
export const selectClaimsByUri = createSelector(
|
||||
selectState,
|
||||
selectClaimIdsByUri,
|
||||
selectClaimsById,
|
||||
(state, byId) => {
|
||||
const byUri = state.claimsByUri || {};
|
||||
(byUri, byId) => {
|
||||
const claims = {};
|
||||
|
||||
Object.keys(byUri).forEach(uri => {
|
||||
|
@ -70,82 +83,91 @@ export const selectAllClaimsByChannel = createSelector(
|
|||
state => state.paginatedClaimsByChannel || {}
|
||||
);
|
||||
|
||||
export const selectPendingById = createSelector(
|
||||
export const selectPendingIds = createSelector(
|
||||
selectState,
|
||||
state => state.pendingById || {}
|
||||
state => Object.keys(state.pendingById) || []
|
||||
);
|
||||
|
||||
export const selectPendingClaims = createSelector(
|
||||
selectState,
|
||||
state => Object.values(state.pendingById || [])
|
||||
selectPendingClaimsById,
|
||||
pendingById => Object.values(pendingById)
|
||||
);
|
||||
|
||||
export const makeSelectClaimIsPending = (uri: string) =>
|
||||
createSelector(
|
||||
selectPendingById,
|
||||
pendingById => {
|
||||
let claimId;
|
||||
|
||||
try {
|
||||
const { isChannel, channelClaimId, streamClaimId } = parseURI(uri);
|
||||
claimId = isChannel ? channelClaimId : streamClaimId;
|
||||
} catch (e) {}
|
||||
selectClaimIdsByUri,
|
||||
selectPendingClaimsById,
|
||||
(idsByUri, pendingById) => {
|
||||
const claimId = idsByUri[normalizeURI(uri)];
|
||||
|
||||
if (claimId) {
|
||||
return Boolean(pendingById[claimId]);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectPendingByUri = (uri: string) =>
|
||||
export const makeSelectClaimIdIsPending = (claimId: string) =>
|
||||
createSelector(
|
||||
selectPendingById,
|
||||
selectPendingClaimsById,
|
||||
pendingById => {
|
||||
const { isChannel, channelClaimId, streamClaimId } = parseURI(uri);
|
||||
const claimId = isChannel ? channelClaimId : streamClaimId;
|
||||
return pendingById[claimId];
|
||||
return Boolean(pendingById[claimId]);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimIdForUri = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimIdsByUri,
|
||||
claimIds => claimIds[uri]
|
||||
);
|
||||
|
||||
export const selectReflectingById = createSelector(
|
||||
selectState,
|
||||
state => state.reflectingById
|
||||
);
|
||||
|
||||
export const makeSelectClaimForClaimId = (claimId: string) =>
|
||||
createSelector(
|
||||
selectClaimsById,
|
||||
byId => byId[claimId]
|
||||
);
|
||||
|
||||
export const makeSelectClaimForUri = (uri: string, returnRepost: boolean = true) =>
|
||||
createSelector(
|
||||
selectClaimsByUri,
|
||||
selectPendingById,
|
||||
(byUri, pendingById) => {
|
||||
// Check if a claim is pending first
|
||||
// It won't be in claimsByUri because resolving it will return nothing
|
||||
|
||||
let valid;
|
||||
selectClaimIdsByUri,
|
||||
selectClaimsById,
|
||||
(byUri, byId) => {
|
||||
let validUri;
|
||||
let channelClaimId;
|
||||
let streamClaimId;
|
||||
let isChannel;
|
||||
try {
|
||||
({ isChannel, channelClaimId, streamClaimId } = parseURI(uri));
|
||||
valid = true;
|
||||
validUri = true;
|
||||
} catch (e) {}
|
||||
|
||||
if (valid && byUri) {
|
||||
const claimId = isChannel ? channelClaimId : streamClaimId;
|
||||
const pendingClaim = pendingById[claimId];
|
||||
if (validUri && byUri) {
|
||||
const claimId = uri && byUri[normalizeURI(uri)];
|
||||
const claim = byId[claimId];
|
||||
|
||||
if (pendingClaim) {
|
||||
return pendingClaim;
|
||||
// Make sure to return the claim as is so apps can check if it's been resolved before (null) or still needs to be resolved (undefined)
|
||||
if (claimId === null) {
|
||||
return null;
|
||||
} else if (claimId === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const claim = byUri[normalizeURI(uri)];
|
||||
if (claim === undefined || claim === null) {
|
||||
// Make sure to return the claim as is so apps can check if it's been resolved before (null) or still needs to be resolved (undefined)
|
||||
return claim;
|
||||
}
|
||||
|
||||
const repostedClaim = claim.reposted_claim;
|
||||
const repostedClaim = claim && claim.reposted_claim;
|
||||
if (repostedClaim && returnRepost) {
|
||||
const channelUrl = claim.signing_channel && claim.signing_channel.canonical_url;
|
||||
const channelUrl =
|
||||
claim.signing_channel &&
|
||||
(claim.signing_channel.canonical_url || claim.signing_channel.permanent_url);
|
||||
|
||||
return {
|
||||
...repostedClaim,
|
||||
repost_url: uri,
|
||||
repost_url: normalizeURI(uri),
|
||||
repost_channel_url: channelUrl,
|
||||
repost_bid_amount: claim && claim.meta && claim.meta.effective_amount,
|
||||
};
|
||||
} else {
|
||||
return claim;
|
||||
|
@ -156,7 +178,22 @@ export const makeSelectClaimForUri = (uri: string, returnRepost: boolean = true)
|
|||
|
||||
export const selectMyClaimsRaw = createSelector(
|
||||
selectState,
|
||||
state => state.myClaims
|
||||
selectClaimsById,
|
||||
(state, byId) => {
|
||||
const ids = state.myClaims;
|
||||
if (!ids) {
|
||||
return ids;
|
||||
}
|
||||
|
||||
const claims = [];
|
||||
ids.forEach(id => {
|
||||
if (byId[id]) {
|
||||
// I'm not sure why this check is necessary, but it ought to be a quick fix for https://github.com/lbryio/lbry-desktop/issues/544
|
||||
claims.push(byId[id]);
|
||||
}
|
||||
});
|
||||
return claims;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectAbandoningIds = createSelector(
|
||||
|
@ -164,6 +201,22 @@ export const selectAbandoningIds = createSelector(
|
|||
state => Object.keys(state.abandoningById || {})
|
||||
);
|
||||
|
||||
export const makeSelectAbandoningClaimById = (claimId: string) =>
|
||||
createSelector(
|
||||
selectAbandoningIds,
|
||||
ids => ids.includes(claimId)
|
||||
);
|
||||
|
||||
export const makeSelectIsAbandoningClaimForUri = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimIdsByUri,
|
||||
selectAbandoningIds,
|
||||
(claimIdsByUri, abandoningById) => {
|
||||
const claimId = claimIdsByUri[normalizeURI(uri)];
|
||||
return abandoningById.indexOf(claimId) >= 0;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectMyActiveClaims = createSelector(
|
||||
selectMyClaimsRaw,
|
||||
selectAbandoningIds,
|
||||
|
@ -192,11 +245,74 @@ export const makeSelectClaimIsMine = (rawUri: string) => {
|
|||
return false;
|
||||
}
|
||||
|
||||
return claims && claims[uri] && claims[uri].claim_id && myClaims.has(claims[uri].claim_id);
|
||||
return (
|
||||
claims &&
|
||||
claims[uri] &&
|
||||
(claims[uri].is_my_output || (claims[uri].claim_id && myClaims.has(claims[uri].claim_id)))
|
||||
);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
export const selectMyPurchases = createSelector(
|
||||
selectState,
|
||||
state => state.myPurchases
|
||||
);
|
||||
|
||||
export const selectPurchaseUriSuccess = createSelector(
|
||||
selectState,
|
||||
state => state.purchaseUriSuccess
|
||||
);
|
||||
|
||||
export const selectMyPurchasesCount = createSelector(
|
||||
selectState,
|
||||
state => state.myPurchasesPageTotalResults
|
||||
);
|
||||
|
||||
export const selectIsFetchingMyPurchases = createSelector(
|
||||
selectState,
|
||||
state => state.fetchingMyPurchases
|
||||
);
|
||||
|
||||
export const selectFetchingMyPurchasesError = createSelector(
|
||||
selectState,
|
||||
state => state.fetchingMyPurchasesError
|
||||
);
|
||||
|
||||
export const makeSelectMyPurchasesForPage = (query: ?string, page: number = 1) =>
|
||||
createSelector(
|
||||
selectMyPurchases,
|
||||
selectClaimsByUri,
|
||||
(myPurchases: Array<string>, claimsByUri: { [string]: Claim }) => {
|
||||
if (!myPurchases) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (!query) {
|
||||
// ensure no duplicates from double purchase bugs
|
||||
return [...new Set(myPurchases)];
|
||||
}
|
||||
|
||||
const fileInfos = myPurchases.map(uri => claimsByUri[uri]);
|
||||
const matchingFileInfos = filterClaims(fileInfos, query);
|
||||
const start = (Number(page) - 1) * Number(CLAIM.PAGE_SIZE);
|
||||
const end = Number(page) * Number(CLAIM.PAGE_SIZE);
|
||||
return matchingFileInfos && matchingFileInfos.length
|
||||
? matchingFileInfos
|
||||
.slice(start, end)
|
||||
.map(fileInfo => fileInfo.canonical_url || fileInfo.permanent_url)
|
||||
: [];
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimWasPurchased = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
claim => {
|
||||
return claim && claim.purchase_receipt !== undefined;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectAllFetchingChannelClaims = createSelector(
|
||||
selectState,
|
||||
state => state.fetchingChannelClaims || {}
|
||||
|
@ -222,6 +338,7 @@ export const makeSelectClaimsInChannelForPage = (uri: string, page?: number) =>
|
|||
}
|
||||
);
|
||||
|
||||
// THIS IS LEFT OVER FROM ONE TAB CHANNEL_CONTENT
|
||||
export const makeSelectTotalClaimsInChannelSearch = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimsById,
|
||||
|
@ -232,6 +349,7 @@ export const makeSelectTotalClaimsInChannelSearch = (uri: string) =>
|
|||
}
|
||||
);
|
||||
|
||||
// THIS IS LEFT OVER FROM ONE_TAB CHANNEL CONTENT
|
||||
export const makeSelectTotalPagesInChannelSearch = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimsById,
|
||||
|
@ -242,21 +360,6 @@ export const makeSelectTotalPagesInChannelSearch = (uri: string) =>
|
|||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimsInChannelForCurrentPageState = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimsById,
|
||||
selectAllClaimsByChannel,
|
||||
selectCurrentChannelPage,
|
||||
(byId, allClaims, page) => {
|
||||
const byChannel = allClaims[uri] || {};
|
||||
const claimIds = byChannel[page || 1];
|
||||
|
||||
if (!claimIds) return claimIds;
|
||||
|
||||
return claimIds.map(claimId => byId[claimId]);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectMetadataForUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
|
@ -290,8 +393,8 @@ export const makeSelectDateForUri = (uri: string) =>
|
|||
(claim.value.release_time
|
||||
? claim.value.release_time * 1000
|
||||
: claim.meta && claim.meta.creation_timestamp
|
||||
? claim.meta.creation_timestamp * 1000
|
||||
: null);
|
||||
? claim.meta.creation_timestamp * 1000
|
||||
: null);
|
||||
if (!timestamp) {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -308,6 +411,19 @@ export const makeSelectAmountForUri = (uri: string) =>
|
|||
}
|
||||
);
|
||||
|
||||
export const makeSelectEffectiveAmountForUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri, false),
|
||||
claim => {
|
||||
return (
|
||||
claim &&
|
||||
claim.meta &&
|
||||
typeof claim.meta.effective_amount === 'string' &&
|
||||
Number(claim.meta.effective_amount)
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectContentTypeForUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
|
@ -322,7 +438,9 @@ export const makeSelectThumbnailForUri = (uri: string) =>
|
|||
makeSelectClaimForUri(uri),
|
||||
claim => {
|
||||
const thumbnail = claim && claim.value && claim.value.thumbnail;
|
||||
return thumbnail && thumbnail.url ? thumbnail.url.trim() : undefined;
|
||||
return thumbnail && thumbnail.url
|
||||
? thumbnail.url.trim().replace(/^http:\/\//i, 'https://')
|
||||
: undefined;
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -331,7 +449,7 @@ export const makeSelectCoverForUri = (uri: string) =>
|
|||
makeSelectClaimForUri(uri),
|
||||
claim => {
|
||||
const cover = claim && claim.value && claim.value.cover;
|
||||
return cover && cover.url ? cover.url.trim() : undefined;
|
||||
return cover && cover.url ? cover.url.trim().replace(/^http:\/\//i, 'https://') : undefined;
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -340,12 +458,33 @@ export const selectIsFetchingClaimListMine = createSelector(
|
|||
state => state.isFetchingClaimListMine
|
||||
);
|
||||
|
||||
export const selectMyClaimsPage = createSelector(
|
||||
selectState,
|
||||
state => state.myClaimsPageResults || []
|
||||
);
|
||||
|
||||
export const selectMyClaimsPageNumber = createSelector(
|
||||
selectState,
|
||||
state => (state.claimListMinePage && state.claimListMinePage.items) || [],
|
||||
|
||||
state => (state.txoPage && state.txoPage.page) || 1
|
||||
);
|
||||
|
||||
export const selectMyClaimsPageItemCount = createSelector(
|
||||
selectState,
|
||||
state => state.myClaimsPageTotalResults || 1
|
||||
);
|
||||
|
||||
export const selectFetchingMyClaimsPageError = createSelector(
|
||||
selectState,
|
||||
state => state.fetchingClaimListMinePageError
|
||||
);
|
||||
|
||||
export const selectMyClaims = createSelector(
|
||||
selectMyActiveClaims,
|
||||
selectClaimsById,
|
||||
selectAbandoningIds,
|
||||
selectPendingClaims,
|
||||
(myClaimIds, byId, abandoningIds, pendingClaims) => {
|
||||
(myClaimIds, byId, abandoningIds) => {
|
||||
const claims = [];
|
||||
|
||||
myClaimIds.forEach(id => {
|
||||
|
@ -354,30 +493,35 @@ export const selectMyClaims = createSelector(
|
|||
if (claim && abandoningIds.indexOf(id) === -1) claims.push(claim);
|
||||
});
|
||||
|
||||
return [...claims, ...pendingClaims];
|
||||
return [...claims];
|
||||
}
|
||||
);
|
||||
|
||||
export const selectMyClaimsWithoutChannels = createSelector(
|
||||
selectMyClaims,
|
||||
myClaims =>
|
||||
myClaims.filter(claim => !claim.name.match(/^@/)).sort((a, b) => a.timestamp - b.timestamp)
|
||||
myClaims
|
||||
.filter(claim => claim && !claim.name.match(/^@/))
|
||||
.sort((a, b) => a.timestamp - b.timestamp)
|
||||
);
|
||||
|
||||
export const selectMyClaimUrisWithoutChannels = createSelector(
|
||||
selectMyClaimsWithoutChannels,
|
||||
myClaims =>
|
||||
myClaims
|
||||
myClaims => {
|
||||
return myClaims
|
||||
.sort((a, b) => {
|
||||
if (!a.timestamp) {
|
||||
if (a.height < 1) {
|
||||
return -1;
|
||||
} else if (!b.timestamp) {
|
||||
} else if (b.height < 1) {
|
||||
return 1;
|
||||
} else {
|
||||
return b.timestamp - a.timestamp;
|
||||
}
|
||||
})
|
||||
.map(claim => `lbry://${claim.name}#${claim.claim_id}`)
|
||||
.map(claim => {
|
||||
return claim.canonical_url || claim.permanent_url;
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
export const selectAllMyClaimsByOutpoint = createSelector(
|
||||
|
@ -402,6 +546,11 @@ export const selectFetchingMyChannels = createSelector(
|
|||
state => state.fetchingMyChannels
|
||||
);
|
||||
|
||||
export const selectFetchingMyCollections = createSelector(
|
||||
selectState,
|
||||
state => state.fetchingMyCollections
|
||||
);
|
||||
|
||||
export const selectMyChannelClaims = createSelector(
|
||||
selectState,
|
||||
selectClaimsById,
|
||||
|
@ -423,6 +572,16 @@ export const selectMyChannelClaims = createSelector(
|
|||
}
|
||||
);
|
||||
|
||||
export const selectMyChannelUrls = createSelector(
|
||||
selectMyChannelClaims,
|
||||
claims => (claims ? claims.map(claim => claim.canonical_url || claim.permanent_url) : undefined)
|
||||
);
|
||||
|
||||
export const selectMyCollectionIds = createSelector(
|
||||
selectState,
|
||||
state => state.myCollectionClaims
|
||||
);
|
||||
|
||||
export const selectResolvingUris = createSelector(
|
||||
selectState,
|
||||
state => state.resolvingUris || []
|
||||
|
@ -449,16 +608,35 @@ export const selectChannelClaimCounts = createSelector(
|
|||
state => state.channelClaimCounts || {}
|
||||
);
|
||||
|
||||
export const makeSelectPendingClaimForUri = (uri: string) =>
|
||||
createSelector(
|
||||
selectPendingClaimsById,
|
||||
pendingById => {
|
||||
let uriStreamName;
|
||||
let uriChannelName;
|
||||
try {
|
||||
({ streamName: uriStreamName, channelName: uriChannelName } = parseURI(uri));
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
const pendingClaims = (Object.values(pendingById): any);
|
||||
const matchingClaim = pendingClaims.find((claim: GenericClaim) => {
|
||||
return claim.normalized_name === uriChannelName || claim.normalized_name === uriStreamName;
|
||||
});
|
||||
return matchingClaim || null;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectTotalItemsForChannel = (uri: string) =>
|
||||
createSelector(
|
||||
selectChannelClaimCounts,
|
||||
byUri => byUri && byUri[uri]
|
||||
byUri => byUri && byUri[normalizeURI(uri)]
|
||||
);
|
||||
|
||||
export const makeSelectTotalPagesForChannel = (uri: string, pageSize: number = 10) =>
|
||||
createSelector(
|
||||
selectChannelClaimCounts,
|
||||
byUri => byUri && byUri[uri] && Math.ceil(byUri[uri] / pageSize)
|
||||
byUri => byUri && byUri[uri] && Math.ceil(byUri[normalizeURI(uri)] / pageSize)
|
||||
);
|
||||
|
||||
export const makeSelectNsfwCountFromUris = (uris: Array<string>) =>
|
||||
|
@ -474,27 +652,6 @@ export const makeSelectNsfwCountFromUris = (uris: Array<string>) =>
|
|||
}, 0)
|
||||
);
|
||||
|
||||
export const makeSelectNsfwCountForChannel = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimsById,
|
||||
selectAllClaimsByChannel,
|
||||
selectCurrentChannelPage,
|
||||
(byId, allClaims, page) => {
|
||||
const byChannel = allClaims[uri] || {};
|
||||
const claimIds = byChannel[page || 1];
|
||||
|
||||
if (!claimIds) return 0;
|
||||
|
||||
return claimIds.reduce((acc, claimId) => {
|
||||
const claim = byId[claimId];
|
||||
if (isClaimNsfw(claim)) {
|
||||
return acc + 1;
|
||||
}
|
||||
return acc;
|
||||
}, 0);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectOmittedCountForChannel = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectTotalItemsForChannel(uri),
|
||||
|
@ -522,53 +679,6 @@ export const makeSelectClaimIsNsfw = (uri: string): boolean =>
|
|||
}
|
||||
);
|
||||
|
||||
export const makeSelectRecommendedContentForUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
selectSearchUrisByQuery,
|
||||
makeSelectClaimIsNsfw(uri),
|
||||
(claim, searchUrisByQuery, isMature) => {
|
||||
const atVanityURI = !uri.includes('#');
|
||||
|
||||
let recommendedContent;
|
||||
if (claim) {
|
||||
// always grab full URL - this can change once search returns canonical
|
||||
const currentUri = buildURI({ streamClaimId: claim.claim_id, streamName: claim.name });
|
||||
|
||||
const { title } = claim.value;
|
||||
|
||||
if (!title) {
|
||||
return;
|
||||
}
|
||||
|
||||
const options: {
|
||||
related_to?: string,
|
||||
nsfw?: boolean,
|
||||
isBackgroundSearch?: boolean,
|
||||
} = { related_to: claim.claim_id, isBackgroundSearch: true };
|
||||
|
||||
if (!isMature) {
|
||||
options['nsfw'] = false;
|
||||
}
|
||||
const searchQuery = getSearchQueryString(title.replace(/\//, ' '), options);
|
||||
|
||||
let searchUris = searchUrisByQuery[searchQuery];
|
||||
if (searchUris) {
|
||||
searchUris = searchUris.filter(searchUri => searchUri !== currentUri);
|
||||
recommendedContent = searchUris;
|
||||
}
|
||||
}
|
||||
|
||||
return recommendedContent;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectFirstRecommendedFileForUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectRecommendedContentForUri(uri),
|
||||
recommendedContent => (recommendedContent ? recommendedContent[0] : null)
|
||||
);
|
||||
|
||||
// Returns the associated channel uri for a given claim uri
|
||||
// accepts a regular claim uri lbry://something
|
||||
// returns the channel uri that created this claim lbry://@channel
|
||||
|
@ -576,12 +686,40 @@ export const makeSelectChannelForClaimUri = (uri: string, includePrefix: boolean
|
|||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
(claim: ?Claim) => {
|
||||
if (!claim || !claim.signing_channel || !claim.signing_channel.canonical_url) {
|
||||
if (!claim || !claim.signing_channel || !claim.is_channel_signature_valid) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { canonical_url: canonicalUrl } = claim.signing_channel;
|
||||
return includePrefix ? canonicalUrl : canonicalUrl.slice('lbry://'.length);
|
||||
const { canonical_url: canonicalUrl, permanent_url: permanentUrl } = claim.signing_channel;
|
||||
|
||||
if (canonicalUrl) {
|
||||
return includePrefix ? canonicalUrl : canonicalUrl.slice('lbry://'.length);
|
||||
} else {
|
||||
return includePrefix ? permanentUrl : permanentUrl.slice('lbry://'.length);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectChannelPermUrlForClaimUri = (uri: string, includePrefix: boolean = false) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
(claim: ?Claim) => {
|
||||
if (claim && claim.value_type === 'channel') {
|
||||
return claim.permanent_url;
|
||||
}
|
||||
if (!claim || !claim.signing_channel || !claim.is_channel_signature_valid) {
|
||||
return null;
|
||||
}
|
||||
return claim.signing_channel.permanent_url;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectMyChannelPermUrlForName = (name: string) =>
|
||||
createSelector(
|
||||
selectMyChannelClaims,
|
||||
claims => {
|
||||
const matchingClaim = claims && claims.find(claim => claim.name === name);
|
||||
return matchingClaim ? matchingClaim.permanent_url : null;
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -636,7 +774,7 @@ export const makeSelectSupportsForUri = (uri: string) =>
|
|||
selectSupportsByOutpoint,
|
||||
makeSelectClaimForUri(uri),
|
||||
(byOutpoint, claim: ?StreamClaim) => {
|
||||
if (!claim || !claim.is_mine) {
|
||||
if (!claim || !claim.is_my_output) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -663,12 +801,23 @@ export const selectUpdateChannelError = createSelector(
|
|||
state => state.updateChannelError
|
||||
);
|
||||
|
||||
export const makeSelectReflectingClaimForUri = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimIdsByUri,
|
||||
selectReflectingById,
|
||||
(claimIdsByUri, reflectingById) => {
|
||||
const claimId = claimIdsByUri[normalizeURI(uri)];
|
||||
return reflectingById[claimId];
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectMyStreamUrlsForPage = (page: number = 1) =>
|
||||
createSelector(
|
||||
selectMyClaimUrisWithoutChannels,
|
||||
urls => {
|
||||
const start = (Number(page) - 1) * Number(PAGE_SIZE);
|
||||
const end = Number(page) * Number(PAGE_SIZE);
|
||||
const start = (Number(page) - 1) * Number(CLAIM.PAGE_SIZE);
|
||||
const end = Number(page) * Number(CLAIM.PAGE_SIZE);
|
||||
|
||||
return urls && urls.length ? urls.slice(start, end) : [];
|
||||
}
|
||||
);
|
||||
|
@ -678,42 +827,96 @@ export const selectMyStreamUrlsCount = createSelector(
|
|||
channels => channels.length
|
||||
);
|
||||
|
||||
export const makeSelectResolvedRecommendedContentForUri = (uri: string, size: number) =>
|
||||
export const makeSelectTagInClaimOrChannelForUri = (uri: string, tag: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
selectResolvedSearchResultsByQuery,
|
||||
makeSelectClaimIsNsfw(uri),
|
||||
(claim, resolvedResultsByQuery, isMature) => {
|
||||
const atVanityURI = !uri.includes('#');
|
||||
|
||||
let recommendedContent;
|
||||
if (claim) {
|
||||
// always grab full URL - this can change once search returns canonical
|
||||
const currentUri = buildURI({ streamClaimId: claim.claim_id, streamName: claim.name });
|
||||
|
||||
const { title } = claim.value;
|
||||
|
||||
if (!title) {
|
||||
return;
|
||||
}
|
||||
|
||||
const options: {
|
||||
related_to?: string,
|
||||
nsfw?: boolean,
|
||||
isBackgroundSearch?: boolean,
|
||||
} = { related_to: claim.claim_id, size, isBackgroundSearch: false };
|
||||
|
||||
const searchQuery = getSearchQueryString(title.replace(/\//, ' '), options);
|
||||
let results = resolvedResultsByQuery[searchQuery];
|
||||
if (results) {
|
||||
results = results.filter(
|
||||
result =>
|
||||
buildURI({ streamClaimId: result.claimId, streamName: result.name }) !== currentUri
|
||||
);
|
||||
recommendedContent = results;
|
||||
}
|
||||
}
|
||||
|
||||
return recommendedContent;
|
||||
claim => {
|
||||
const claimTags = (claim && claim.value && claim.value.tags) || [];
|
||||
const channelTags =
|
||||
(claim &&
|
||||
claim.signing_channel &&
|
||||
claim.signing_channel.value &&
|
||||
claim.signing_channel.value.tags) ||
|
||||
[];
|
||||
return claimTags.includes(tag) || channelTags.includes(tag);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimHasSource = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
claim => {
|
||||
if (!claim) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Boolean(claim.value.source);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimIsStreamPlaceholder = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
claim => {
|
||||
if (!claim) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Boolean(claim.value_type === 'stream' && !claim.value.source);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectTotalStakedAmountForChannelUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
claim => {
|
||||
if (!claim || !claim.amount || !claim.meta || !claim.meta.support_amount) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return parseFloat(claim.amount) + parseFloat(claim.meta.support_amount) || 0;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectStakedLevelForChannelUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectTotalStakedAmountForChannelUri(uri),
|
||||
amount => {
|
||||
let level = 1;
|
||||
switch (true) {
|
||||
case amount >= CLAIM.LEVEL_2_STAKED_AMOUNT && amount < CLAIM.LEVEL_3_STAKED_AMOUNT:
|
||||
level = 2;
|
||||
break;
|
||||
case amount >= CLAIM.LEVEL_3_STAKED_AMOUNT && amount < CLAIM.LEVEL_4_STAKED_AMOUNT:
|
||||
level = 3;
|
||||
break;
|
||||
case amount >= CLAIM.LEVEL_4_STAKED_AMOUNT && amount < CLAIM.LEVEL_5_STAKED_AMOUNT:
|
||||
level = 4;
|
||||
break;
|
||||
case amount >= CLAIM.LEVEL_5_STAKED_AMOUNT:
|
||||
level = 5;
|
||||
break;
|
||||
}
|
||||
return level;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectUpdatingCollection = createSelector(
|
||||
selectState,
|
||||
state => state.updatingCollection
|
||||
);
|
||||
|
||||
export const selectUpdateCollectionError = createSelector(
|
||||
selectState,
|
||||
state => state.updateCollectionError
|
||||
);
|
||||
|
||||
export const selectCreatingCollection = createSelector(
|
||||
selectState,
|
||||
state => state.creatingCollection
|
||||
);
|
||||
|
||||
export const selectCreateCollectionError = createSelector(
|
||||
selectState,
|
||||
state => state.createCollectionError
|
||||
);
|
||||
|
|
311
src/redux/selectors/collections.js
Normal file
311
src/redux/selectors/collections.js
Normal file
|
@ -0,0 +1,311 @@
|
|||
// @flow
|
||||
import fromEntries from '@ungap/from-entries';
|
||||
import { createSelector } from 'reselect';
|
||||
import {
|
||||
selectMyCollectionIds,
|
||||
makeSelectClaimForUri,
|
||||
selectClaimsByUri,
|
||||
} from 'redux/selectors/claims';
|
||||
import { parseURI } from 'lbryURI';
|
||||
|
||||
const selectState = (state: { collections: CollectionState }) => state.collections;
|
||||
|
||||
export const selectSavedCollectionIds = createSelector(
|
||||
selectState,
|
||||
collectionState => collectionState.saved
|
||||
);
|
||||
|
||||
export const selectBuiltinCollections = createSelector(
|
||||
selectState,
|
||||
state => state.builtin
|
||||
);
|
||||
export const selectResolvedCollections = createSelector(
|
||||
selectState,
|
||||
state => state.resolved
|
||||
);
|
||||
|
||||
export const selectMyUnpublishedCollections = createSelector(
|
||||
selectState,
|
||||
state => state.unpublished
|
||||
);
|
||||
|
||||
export const selectMyEditedCollections = createSelector(
|
||||
selectState,
|
||||
state => state.edited
|
||||
);
|
||||
|
||||
export const selectPendingCollections = createSelector(
|
||||
selectState,
|
||||
state => state.pending
|
||||
);
|
||||
|
||||
export const makeSelectEditedCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectMyEditedCollections,
|
||||
eLists => eLists[id]
|
||||
);
|
||||
|
||||
export const makeSelectPendingCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectPendingCollections,
|
||||
pending => pending[id]
|
||||
);
|
||||
|
||||
export const makeSelectPublishedCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectResolvedCollections,
|
||||
rLists => rLists[id]
|
||||
);
|
||||
|
||||
export const makeSelectUnpublishedCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectMyUnpublishedCollections,
|
||||
rLists => rLists[id]
|
||||
);
|
||||
|
||||
export const makeSelectCollectionIsMine = (id: string) =>
|
||||
createSelector(
|
||||
selectMyCollectionIds,
|
||||
selectMyUnpublishedCollections,
|
||||
selectBuiltinCollections,
|
||||
(publicIds, privateIds, builtinIds) => {
|
||||
return Boolean(publicIds.includes(id) || privateIds[id] || builtinIds[id]);
|
||||
}
|
||||
);
|
||||
|
||||
export const selectMyPublishedCollections = createSelector(
|
||||
selectResolvedCollections,
|
||||
selectPendingCollections,
|
||||
selectMyEditedCollections,
|
||||
selectMyCollectionIds,
|
||||
(resolved, pending, edited, myIds) => {
|
||||
// all resolved in myIds, plus those in pending and edited
|
||||
const myPublishedCollections = fromEntries(
|
||||
Object.entries(pending).concat(
|
||||
Object.entries(resolved).filter(
|
||||
([key, val]) =>
|
||||
myIds.includes(key) &&
|
||||
// $FlowFixMe
|
||||
!pending[key]
|
||||
)
|
||||
)
|
||||
);
|
||||
// now add in edited:
|
||||
Object.entries(edited).forEach(([id, item]) => {
|
||||
myPublishedCollections[id] = item;
|
||||
});
|
||||
return myPublishedCollections;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectMyPublishedMixedCollections = createSelector(
|
||||
selectMyPublishedCollections,
|
||||
published => {
|
||||
const myCollections = fromEntries(
|
||||
// $FlowFixMe
|
||||
Object.entries(published).filter(([key, collection]) => {
|
||||
// $FlowFixMe
|
||||
return collection.type === 'collection';
|
||||
})
|
||||
);
|
||||
return myCollections;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectMyPublishedPlaylistCollections = createSelector(
|
||||
selectMyPublishedCollections,
|
||||
published => {
|
||||
const myCollections = fromEntries(
|
||||
// $FlowFixMe
|
||||
Object.entries(published).filter(([key, collection]) => {
|
||||
// $FlowFixMe
|
||||
return collection.type === 'playlist';
|
||||
})
|
||||
);
|
||||
return myCollections;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectMyPublishedCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectMyPublishedCollections,
|
||||
myPublishedCollections => myPublishedCollections[id]
|
||||
);
|
||||
|
||||
// export const selectSavedCollections = createSelector(
|
||||
// selectResolvedCollections,
|
||||
// selectSavedCollectionIds,
|
||||
// (resolved, myIds) => {
|
||||
// const mySavedCollections = fromEntries(
|
||||
// Object.entries(resolved).filter(([key, val]) => myIds.includes(key))
|
||||
// );
|
||||
// return mySavedCollections;
|
||||
// }
|
||||
// );
|
||||
|
||||
export const makeSelectIsResolvingCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectState,
|
||||
state => {
|
||||
return state.isResolvingCollectionById[id];
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectBuiltinCollections,
|
||||
selectResolvedCollections,
|
||||
selectMyUnpublishedCollections,
|
||||
selectMyEditedCollections,
|
||||
selectPendingCollections,
|
||||
(bLists, rLists, uLists, eLists, pLists) => {
|
||||
const collection = bLists[id] || uLists[id] || eLists[id] || pLists[id] || rLists[id];
|
||||
return collection;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimUrlInCollection = (url: string) =>
|
||||
createSelector(
|
||||
selectBuiltinCollections,
|
||||
selectMyPublishedCollections,
|
||||
selectMyUnpublishedCollections,
|
||||
selectMyEditedCollections,
|
||||
selectPendingCollections,
|
||||
(bLists, myRLists, uLists, eLists, pLists) => {
|
||||
const collections = [bLists, uLists, eLists, myRLists, pLists];
|
||||
const itemsInCollections = [];
|
||||
collections.map(list => {
|
||||
Object.entries(list).forEach(([key, value]) => {
|
||||
// $FlowFixMe
|
||||
value.items.map(item => {
|
||||
itemsInCollections.push(item);
|
||||
});
|
||||
});
|
||||
});
|
||||
return itemsInCollections.includes(url);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectCollectionForIdHasClaimUrl = (id: string, url: string) =>
|
||||
createSelector(
|
||||
makeSelectCollectionForId(id),
|
||||
collection => collection && collection.items.includes(url)
|
||||
);
|
||||
|
||||
export const makeSelectUrlsForCollectionId = (id: string) =>
|
||||
createSelector(
|
||||
makeSelectCollectionForId(id),
|
||||
collection => collection && collection.items
|
||||
);
|
||||
|
||||
export const makeSelectClaimIdsForCollectionId = (id: string) =>
|
||||
createSelector(
|
||||
makeSelectCollectionForId(id),
|
||||
collection => {
|
||||
const items = (collection && collection.items) || [];
|
||||
const ids = items.map(item => {
|
||||
const { claimId } = parseURI(item);
|
||||
return claimId;
|
||||
});
|
||||
return ids;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectIndexForUrlInCollection = (url: string, id: string) =>
|
||||
createSelector(
|
||||
state => state.content.shuffleList,
|
||||
makeSelectUrlsForCollectionId(id),
|
||||
makeSelectClaimForUri(url),
|
||||
(shuffleState, urls, claim) => {
|
||||
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
|
||||
const listUrls = shuffleUrls || urls;
|
||||
|
||||
const index = listUrls && listUrls.findIndex(u => u === url);
|
||||
if (index > -1) {
|
||||
return index;
|
||||
} else if (claim) {
|
||||
const index = listUrls && listUrls.findIndex(u => u === claim.permanent_url);
|
||||
if (index > -1) return index;
|
||||
return claim;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectPreviousUrlForCollectionAndUrl = (id: string, url: string) =>
|
||||
createSelector(
|
||||
state => state.content.shuffleList,
|
||||
state => state.content.loopList,
|
||||
makeSelectIndexForUrlInCollection(url, id),
|
||||
makeSelectUrlsForCollectionId(id),
|
||||
(shuffleState, loopState, index, urls) => {
|
||||
const loopList = loopState && loopState.collectionId === id && loopState.loop;
|
||||
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
|
||||
|
||||
if (index > -1) {
|
||||
const listUrls = shuffleUrls || urls;
|
||||
let nextUrl;
|
||||
if (index === 0 && loopList) {
|
||||
nextUrl = listUrls[listUrls.length - 1];
|
||||
} else {
|
||||
nextUrl = listUrls[index - 1];
|
||||
}
|
||||
return nextUrl || null;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectNextUrlForCollectionAndUrl = (id: string, url: string) =>
|
||||
createSelector(
|
||||
state => state.content.shuffleList,
|
||||
state => state.content.loopList,
|
||||
makeSelectIndexForUrlInCollection(url, id),
|
||||
makeSelectUrlsForCollectionId(id),
|
||||
(shuffleState, loopState, index, urls) => {
|
||||
const loopList = loopState && loopState.collectionId === id && loopState.loop;
|
||||
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
|
||||
|
||||
if (index > -1) {
|
||||
const listUrls = shuffleUrls || urls;
|
||||
// We'll get the next playble url
|
||||
let remainingUrls = listUrls.slice(index + 1);
|
||||
if (!remainingUrls.length && loopList) {
|
||||
remainingUrls = listUrls.slice(0);
|
||||
}
|
||||
const nextUrl = remainingUrls && remainingUrls[0];
|
||||
return nextUrl || null;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectNameForCollectionId = (id: string) =>
|
||||
createSelector(
|
||||
makeSelectCollectionForId(id),
|
||||
collection => {
|
||||
return (collection && collection.name) || '';
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectCountForCollectionId = (id: string) =>
|
||||
createSelector(
|
||||
makeSelectCollectionForId(id),
|
||||
collection => {
|
||||
if (collection) {
|
||||
if (collection.itemCount !== undefined) {
|
||||
return collection.itemCount;
|
||||
}
|
||||
let itemCount = 0;
|
||||
collection.items.map(item => {
|
||||
if (item) {
|
||||
itemCount += 1;
|
||||
}
|
||||
});
|
||||
return itemCount;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
);
|
|
@ -1,66 +0,0 @@
|
|||
// @flow
|
||||
import { createSelector } from 'reselect';
|
||||
|
||||
const selectState = state => state.comments || {};
|
||||
|
||||
export const selectCommentsById = createSelector(
|
||||
selectState,
|
||||
state => state.commentById || {}
|
||||
);
|
||||
|
||||
export const selectCommentsByClaimId = createSelector(
|
||||
selectState,
|
||||
selectCommentsById,
|
||||
(state, byId) => {
|
||||
const byClaimId = state.byId || {};
|
||||
const comments = {};
|
||||
|
||||
// replace every comment_id in the list with the actual comment object
|
||||
Object.keys(byClaimId).forEach(claimId => {
|
||||
const commentIds = byClaimId[claimId];
|
||||
|
||||
comments[claimId] = Array(commentIds === null ? 0 : commentIds.length);
|
||||
for (let i = 0; i < commentIds.length; i++) {
|
||||
comments[claimId][i] = byId[commentIds[i]];
|
||||
}
|
||||
});
|
||||
|
||||
return comments;
|
||||
}
|
||||
);
|
||||
|
||||
// previously this used a mapping from claimId -> Array<Comments>
|
||||
/* export const selectCommentsById = createSelector(
|
||||
selectState,
|
||||
state => state.byId || {}
|
||||
); */
|
||||
export const selectCommentsByUri = createSelector(
|
||||
selectState,
|
||||
state => {
|
||||
const byUri = state.commentsByUri || {};
|
||||
const comments = {};
|
||||
Object.keys(byUri).forEach(uri => {
|
||||
const claimId = byUri[uri];
|
||||
if (claimId === null) {
|
||||
comments[uri] = null;
|
||||
} else {
|
||||
comments[uri] = claimId;
|
||||
}
|
||||
});
|
||||
|
||||
return comments;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectCommentsForUri = (uri: string) =>
|
||||
createSelector(
|
||||
selectCommentsByClaimId,
|
||||
selectCommentsByUri,
|
||||
(byClaimId, byUri) => {
|
||||
const claimId = byUri[uri];
|
||||
return byClaimId && byClaimId[claimId];
|
||||
}
|
||||
);
|
||||
|
||||
// todo: allow SDK to retrieve user comments through comment_list
|
||||
// todo: implement selectors for selecting comments owned by user
|
|
@ -1,36 +0,0 @@
|
|||
// @flow
|
||||
import { createSelector } from 'reselect';
|
||||
import { makeSelectFileInfoForUri } from 'redux/selectors/file_info';
|
||||
|
||||
type State = { file: FileState };
|
||||
|
||||
export const selectState = (state: State): FileState => state.file || {};
|
||||
|
||||
export const selectPurchaseUriErrorMessage: (state: State) => string = createSelector(
|
||||
selectState,
|
||||
state => state.purchaseUriErrorMessage
|
||||
);
|
||||
|
||||
export const selectFailedPurchaseUris: (state: State) => Array<string> = createSelector(
|
||||
selectState,
|
||||
state => state.failedPurchaseUris
|
||||
);
|
||||
|
||||
export const selectPurchasedUris: (state: State) => Array<string> = createSelector(
|
||||
selectState,
|
||||
state => state.purchasedUris
|
||||
);
|
||||
|
||||
export const selectLastPurchasedUri: (state: State) => string = createSelector(
|
||||
selectState,
|
||||
state =>
|
||||
state.purchasedUris.length > 0 ? state.purchasedUris[state.purchasedUris.length - 1] : null
|
||||
);
|
||||
|
||||
export const makeSelectStreamingUrlForUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectFileInfoForUri(uri),
|
||||
fileInfo => {
|
||||
return fileInfo && fileInfo.streaming_url;
|
||||
}
|
||||
);
|
|
@ -212,6 +212,7 @@ function filterFileInfos(fileInfos, query) {
|
|||
const queryMatchRegExp = new RegExp(query, 'i');
|
||||
return fileInfos.filter(fileInfo => {
|
||||
const { metadata } = fileInfo;
|
||||
|
||||
return (
|
||||
(metadata.title && metadata.title.match(queryMatchRegExp)) ||
|
||||
(fileInfo.channel_name && fileInfo.channel_name.match(queryMatchRegExp)) ||
|
||||
|
@ -233,12 +234,12 @@ export const makeSelectSearchDownloadUrlsForPage = (query, page = 1) =>
|
|||
|
||||
return matchingFileInfos && matchingFileInfos.length
|
||||
? matchingFileInfos.slice(start, end).map(fileInfo =>
|
||||
buildURI({
|
||||
streamName: fileInfo.claim_name,
|
||||
channelName: fileInfo.channel_name,
|
||||
channelClaimId: fileInfo.channel_claim_id,
|
||||
})
|
||||
)
|
||||
buildURI({
|
||||
streamName: fileInfo.claim_name,
|
||||
channelName: fileInfo.channel_name,
|
||||
channelClaimId: fileInfo.channel_claim_id,
|
||||
})
|
||||
)
|
||||
: [];
|
||||
}
|
||||
);
|
||||
|
@ -250,3 +251,11 @@ export const makeSelectSearchDownloadUrlsCount = query =>
|
|||
return fileInfos && fileInfos.length ? filterFileInfos(fileInfos, query).length : 0;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectStreamingUrlForUri = uri =>
|
||||
createSelector(
|
||||
makeSelectFileInfoForUri(uri),
|
||||
fileInfo => {
|
||||
return fileInfo && fileInfo.streaming_url;
|
||||
}
|
||||
);
|
||||
|
|
|
@ -40,17 +40,22 @@ export const selectIsStillEditing = createSelector(
|
|||
|
||||
export const selectPublishFormValues = createSelector(
|
||||
selectState,
|
||||
state => state.settings,
|
||||
selectIsStillEditing,
|
||||
(state, isStillEditing) => {
|
||||
const { pendingPublish, language, languages, ...formValues } = state;
|
||||
(publishState, settingsState, isStillEditing) => {
|
||||
const { languages, ...formValues } = publishState;
|
||||
const language = languages && languages.length && languages[0];
|
||||
const { clientSettings } = settingsState;
|
||||
const { language: languageSet } = clientSettings;
|
||||
|
||||
let actualLanguage;
|
||||
// Sets default if editing a claim with a set language
|
||||
if (!language && isStillEditing && languages && languages[0]) {
|
||||
actualLanguage = languages[0];
|
||||
if (!language && isStillEditing && languageSet) {
|
||||
actualLanguage = languageSet;
|
||||
} else {
|
||||
actualLanguage = language || 'en';
|
||||
actualLanguage = language || languageSet || 'en';
|
||||
}
|
||||
|
||||
return { ...formValues, language: actualLanguage };
|
||||
}
|
||||
);
|
||||
|
|
|
@ -1,193 +0,0 @@
|
|||
// @flow
|
||||
import { SEARCH_TYPES, SEARCH_OPTIONS } from 'constants/search';
|
||||
import { getSearchQueryString } from 'util/query-params';
|
||||
import { normalizeURI, parseURI } from 'lbryURI';
|
||||
import { createSelector } from 'reselect';
|
||||
|
||||
type State = { search: SearchState };
|
||||
|
||||
export const selectState = (state: State): SearchState => state.search;
|
||||
|
||||
export const selectSearchValue: (state: State) => string = createSelector(
|
||||
selectState,
|
||||
state => state.searchQuery
|
||||
);
|
||||
|
||||
export const selectSearchOptions: (state: State) => SearchOptions = createSelector(
|
||||
selectState,
|
||||
state => state.options
|
||||
);
|
||||
|
||||
export const selectSuggestions: (
|
||||
state: State
|
||||
) => { [string]: Array<SearchSuggestion> } = createSelector(
|
||||
selectState,
|
||||
state => state.suggestions
|
||||
);
|
||||
|
||||
export const selectIsSearching: (state: State) => boolean = createSelector(
|
||||
selectState,
|
||||
state => state.searching
|
||||
);
|
||||
|
||||
export const selectSearchUrisByQuery: (
|
||||
state: State
|
||||
) => { [string]: Array<string> } = createSelector(
|
||||
selectState,
|
||||
state => state.urisByQuery
|
||||
);
|
||||
|
||||
export const makeSelectSearchUris = (query: string): ((state: State) => Array<string>) =>
|
||||
// replace statement below is kind of ugly, and repeated in doSearch action
|
||||
createSelector(
|
||||
selectSearchUrisByQuery,
|
||||
byQuery => byQuery[query ? query.replace(/^lbry:\/\//i, '').replace(/\//, ' ') : query]
|
||||
);
|
||||
|
||||
export const selectResolvedSearchResultsByQuery: (
|
||||
state: State
|
||||
) => { [string]: Array<ResolvedSearchResult> } = createSelector(
|
||||
selectState,
|
||||
state => state.resolvedResultsByQuery
|
||||
);
|
||||
|
||||
export const selectResolvedSearchResultsByQueryLastPageReached: (
|
||||
state: State
|
||||
) => { [string]: Array<boolean> } = createSelector(
|
||||
selectState,
|
||||
state => state.resolvedResultsByQueryLastPageReached
|
||||
);
|
||||
|
||||
export const makeSelectResolvedSearchResults = (
|
||||
query: string
|
||||
): ((state: State) => Array<ResolvedSearchResult>) =>
|
||||
// replace statement below is kind of ugly, and repeated in doSearch action
|
||||
createSelector(
|
||||
selectResolvedSearchResultsByQuery,
|
||||
byQuery => byQuery[query ? query.replace(/^lbry:\/\//i, '').replace(/\//, ' ') : query]
|
||||
);
|
||||
|
||||
export const makeSelectResolvedSearchResultsLastPageReached = (
|
||||
query: string
|
||||
): ((state: State) => boolean) =>
|
||||
// replace statement below is kind of ugly, and repeated in doSearch action
|
||||
createSelector(
|
||||
selectResolvedSearchResultsByQueryLastPageReached,
|
||||
byQuery => byQuery[query ? query.replace(/^lbry:\/\//i, '').replace(/\//, ' ') : query]
|
||||
);
|
||||
|
||||
export const selectSearchBarFocused: boolean = createSelector(
|
||||
selectState,
|
||||
state => state.focused
|
||||
);
|
||||
|
||||
export const selectSearchSuggestions: Array<SearchSuggestion> = createSelector(
|
||||
selectSearchValue,
|
||||
selectSuggestions,
|
||||
(query: string, suggestions: { [string]: Array<string> }) => {
|
||||
if (!query) {
|
||||
return [];
|
||||
}
|
||||
const queryIsPrefix =
|
||||
query === 'lbry:' || query === 'lbry:/' || query === 'lbry://' || query === 'lbry://@';
|
||||
|
||||
if (queryIsPrefix) {
|
||||
// If it is a prefix, wait until something else comes to figure out what to do
|
||||
return [];
|
||||
} else if (query.startsWith('lbry://')) {
|
||||
// If it starts with a prefix, don't show any autocomplete results
|
||||
// They are probably typing/pasting in a lbry uri
|
||||
return [
|
||||
{
|
||||
value: query,
|
||||
type: query[7] === '@' ? SEARCH_TYPES.CHANNEL : SEARCH_TYPES.FILE,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
let searchSuggestions = [];
|
||||
try {
|
||||
const uri = normalizeURI(query);
|
||||
const { channelName, streamName, isChannel } = parseURI(uri);
|
||||
searchSuggestions.push(
|
||||
{
|
||||
value: query,
|
||||
type: SEARCH_TYPES.SEARCH,
|
||||
},
|
||||
{
|
||||
value: uri,
|
||||
shorthand: isChannel ? channelName : streamName,
|
||||
type: isChannel ? SEARCH_TYPES.CHANNEL : SEARCH_TYPES.FILE,
|
||||
}
|
||||
);
|
||||
} catch (e) {
|
||||
searchSuggestions.push({
|
||||
value: query,
|
||||
type: SEARCH_TYPES.SEARCH,
|
||||
});
|
||||
}
|
||||
|
||||
searchSuggestions.push({
|
||||
value: query,
|
||||
type: SEARCH_TYPES.TAG,
|
||||
});
|
||||
|
||||
const apiSuggestions = suggestions[query] || [];
|
||||
if (apiSuggestions.length) {
|
||||
searchSuggestions = searchSuggestions.concat(
|
||||
apiSuggestions
|
||||
.filter(suggestion => suggestion !== query)
|
||||
.map(suggestion => {
|
||||
// determine if it's a channel
|
||||
try {
|
||||
const uri = normalizeURI(suggestion);
|
||||
const { channelName, streamName, isChannel } = parseURI(uri);
|
||||
|
||||
return {
|
||||
value: uri,
|
||||
shorthand: isChannel ? channelName : streamName,
|
||||
type: isChannel ? SEARCH_TYPES.CHANNEL : SEARCH_TYPES.FILE,
|
||||
};
|
||||
} catch (e) {
|
||||
// search result includes some character that isn't valid in claim names
|
||||
return {
|
||||
value: suggestion,
|
||||
type: SEARCH_TYPES.SEARCH,
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return searchSuggestions;
|
||||
}
|
||||
);
|
||||
|
||||
// Creates a query string based on the state in the search reducer
|
||||
// Can be overrided by passing in custom sizes/from values for other areas pagination
|
||||
|
||||
type CustomOptions = {
|
||||
isBackgroundSearch?: boolean,
|
||||
size?: number,
|
||||
from?: number,
|
||||
related_to?: string,
|
||||
nsfw?: boolean,
|
||||
}
|
||||
|
||||
export const makeSelectQueryWithOptions = (
|
||||
customQuery: ?string,
|
||||
options: CustomOptions,
|
||||
) =>
|
||||
createSelector(
|
||||
selectSearchValue,
|
||||
selectSearchOptions,
|
||||
(query, defaultOptions) => {
|
||||
const searchOptions = { ...defaultOptions, ...options };
|
||||
const queryString = getSearchQueryString(
|
||||
customQuery || query,
|
||||
searchOptions,
|
||||
);
|
||||
|
||||
return queryString;
|
||||
}
|
||||
);
|
|
@ -1,47 +0,0 @@
|
|||
// @flow
|
||||
import { createSelector } from 'reselect';
|
||||
|
||||
const selectState = (state: { tags: TagState }) => state.tags || {};
|
||||
|
||||
export const selectKnownTagsByName = createSelector(
|
||||
selectState,
|
||||
(state: TagState): KnownTags => state.knownTags
|
||||
);
|
||||
|
||||
export const selectFollowedTagsList = createSelector(
|
||||
selectState,
|
||||
(state: TagState): Array<string> => state.followedTags.filter(tag => typeof tag === 'string')
|
||||
);
|
||||
|
||||
export const selectFollowedTags = createSelector(
|
||||
selectFollowedTagsList,
|
||||
(followedTags: Array<string>): Array<Tag> =>
|
||||
followedTags
|
||||
.map(tag => ({ name: tag.toLowerCase() }))
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
);
|
||||
|
||||
export const selectUnfollowedTags = createSelector(
|
||||
selectKnownTagsByName,
|
||||
selectFollowedTagsList,
|
||||
(tagsByName: KnownTags, followedTags: Array<string>): Array<Tag> => {
|
||||
const followedTagsSet = new Set(followedTags);
|
||||
let tagsToReturn = [];
|
||||
Object.keys(tagsByName).forEach(key => {
|
||||
if (!followedTagsSet.has(key)) {
|
||||
const { name } = tagsByName[key];
|
||||
tagsToReturn.push({ name: name.toLowerCase() });
|
||||
}
|
||||
});
|
||||
|
||||
return tagsToReturn;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectIsFollowingTag = (tag: string) =>
|
||||
createSelector(
|
||||
selectFollowedTags,
|
||||
followedTags => {
|
||||
return followedTags.some(followedTag => followedTag.name === tag.toLowerCase());
|
||||
}
|
||||
);
|
|
@ -1,7 +1,8 @@
|
|||
import { createSelector } from 'reselect';
|
||||
import * as TRANSACTIONS from 'constants/transaction_types';
|
||||
import { PAGE_SIZE, LATEST_PAGE_SIZE } from 'constants/transaction_list';
|
||||
|
||||
import { selectClaimIdsByUri } from 'redux/selectors/claims';
|
||||
import parseData from 'util/parse-data';
|
||||
export const selectState = state => state.wallet || {};
|
||||
|
||||
export const selectWalletState = selectState;
|
||||
|
@ -21,6 +22,33 @@ export const selectWalletEncryptSucceeded = createSelector(
|
|||
state => state.walletEncryptSucceded
|
||||
);
|
||||
|
||||
export const selectPendingSupportTransactions = createSelector(
|
||||
selectState,
|
||||
state => state.pendingSupportTransactions
|
||||
);
|
||||
|
||||
export const selectPendingOtherTransactions = createSelector(
|
||||
selectState,
|
||||
state => state.pendingTxos
|
||||
);
|
||||
|
||||
export const selectAbandonClaimSupportError = createSelector(
|
||||
selectState,
|
||||
state => state.abandonClaimSupportError
|
||||
);
|
||||
|
||||
export const makeSelectPendingAmountByUri = uri =>
|
||||
createSelector(
|
||||
selectClaimIdsByUri,
|
||||
selectPendingSupportTransactions,
|
||||
(claimIdsByUri, pendingSupports) => {
|
||||
const uriEntry = Object.entries(claimIdsByUri).find(([u, cid]) => u === uri);
|
||||
const claimId = uriEntry && uriEntry[1];
|
||||
const pendingSupport = claimId && pendingSupports[claimId];
|
||||
return pendingSupport ? pendingSupport.effective : undefined;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectWalletEncryptResult = createSelector(
|
||||
selectState,
|
||||
state => state.walletEncryptResult
|
||||
|
@ -240,6 +268,27 @@ export const selectIsFetchingTransactions = createSelector(
|
|||
state => state.fetchingTransactions
|
||||
);
|
||||
|
||||
/**
|
||||
* CSV of 'selectTransactionItems'.
|
||||
*/
|
||||
export const selectTransactionsFile = createSelector(
|
||||
selectTransactionItems,
|
||||
transactions => {
|
||||
if (!transactions || transactions.length === 0) {
|
||||
// No data.
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const parsed = parseData(transactions, 'csv');
|
||||
if (!parsed) {
|
||||
// Invalid data, or failed to parse.
|
||||
return null;
|
||||
}
|
||||
|
||||
return parsed;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectIsSendingSupport = createSelector(
|
||||
selectState,
|
||||
state => state.sendingSupport
|
||||
|
@ -300,6 +349,36 @@ export const selectFilteredTransactions = createSelector(
|
|||
}
|
||||
);
|
||||
|
||||
export const selectTxoPageParams = createSelector(
|
||||
selectState,
|
||||
state => state.txoFetchParams
|
||||
);
|
||||
|
||||
export const selectTxoPage = createSelector(
|
||||
selectState,
|
||||
state => (state.txoPage && state.txoPage.items) || []
|
||||
);
|
||||
|
||||
export const selectTxoPageNumber = createSelector(
|
||||
selectState,
|
||||
state => (state.txoPage && state.txoPage.page) || 1
|
||||
);
|
||||
|
||||
export const selectTxoItemCount = createSelector(
|
||||
selectState,
|
||||
state => (state.txoPage && state.txoPage.total_items) || 1
|
||||
);
|
||||
|
||||
export const selectFetchingTxosError = createSelector(
|
||||
selectState,
|
||||
state => state.fetchingTxosError
|
||||
);
|
||||
|
||||
export const selectIsFetchingTxos = createSelector(
|
||||
selectState,
|
||||
state => state.fetchingTxos
|
||||
);
|
||||
|
||||
export const makeSelectFilteredTransactionsForPage = (page = 1) =>
|
||||
createSelector(
|
||||
selectFilteredTransactions,
|
||||
|
@ -328,3 +407,33 @@ export const selectIsWalletReconnecting = createSelector(
|
|||
selectState,
|
||||
state => state.walletReconnecting
|
||||
);
|
||||
|
||||
export const selectIsFetchingUtxoCounts = createSelector(
|
||||
selectState,
|
||||
state => state.fetchingUtxoCounts
|
||||
);
|
||||
|
||||
export const selectIsConsolidatingUtxos = createSelector(
|
||||
selectState,
|
||||
state => state.consolidatingUtxos
|
||||
);
|
||||
|
||||
export const selectIsMassClaimingTips = createSelector(
|
||||
selectState,
|
||||
state => state.massClaimingTips
|
||||
);
|
||||
|
||||
export const selectPendingConsolidateTxid = createSelector(
|
||||
selectState,
|
||||
state => state.pendingConsolidateTxid
|
||||
);
|
||||
|
||||
export const selectPendingMassClaimTxid = createSelector(
|
||||
selectState,
|
||||
state => state.pendingMassClaimTxid
|
||||
);
|
||||
|
||||
export const selectUtxoCounts = createSelector(
|
||||
selectState,
|
||||
state => state.utxoCounts
|
||||
);
|
||||
|
|
|
@ -51,3 +51,20 @@ export function concatClaims(
|
|||
|
||||
return claims;
|
||||
}
|
||||
|
||||
export function filterClaims(claims: Array<Claim>, query: ?string): Array<Claim> {
|
||||
if (query) {
|
||||
const queryMatchRegExp = new RegExp(query, 'i');
|
||||
return claims.filter(claim => {
|
||||
const { value } = claim;
|
||||
|
||||
return (
|
||||
(value.title && value.title.match(queryMatchRegExp)) ||
|
||||
(claim.signing_channel && claim.signing_channel.name.match(queryMatchRegExp)) ||
|
||||
(claim.name && claim.name.match(queryMatchRegExp))
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
return claims;
|
||||
}
|
||||
|
|
7
src/util/merge-claim.js
Normal file
7
src/util/merge-claim.js
Normal file
|
@ -0,0 +1,7 @@
|
|||
/*
|
||||
new claim = { ...maybeResolvedClaim, ...pendingClaim, meta: maybeResolvedClaim['meta'] }
|
||||
*/
|
||||
|
||||
export default function mergeClaims(maybeResolved, pending){
|
||||
return { ...maybeResolved, ...pending, meta: maybeResolved.meta };
|
||||
}
|
61
src/util/parse-data.js
Normal file
61
src/util/parse-data.js
Normal file
|
@ -0,0 +1,61 @@
|
|||
// JSON parser
|
||||
const parseJson = (data, filters = []) => {
|
||||
const list = data.map(item => {
|
||||
const temp = {};
|
||||
// Apply filters
|
||||
Object.entries(item).forEach(([key, value]) => {
|
||||
if (!filters.includes(key)) temp[key] = value;
|
||||
});
|
||||
return temp;
|
||||
});
|
||||
// Beautify JSON
|
||||
return JSON.stringify(list, null, '\t');
|
||||
};
|
||||
|
||||
// CSV Parser
|
||||
// No need for an external module:
|
||||
// https://gist.github.com/btzr-io/55c3450ea3d709fc57540e762899fb85
|
||||
const parseCsv = (data, filters = []) => {
|
||||
// Get items for header
|
||||
const getHeaders = item => {
|
||||
const list = [];
|
||||
// Apply filters
|
||||
Object.entries(item).forEach(([key]) => {
|
||||
if (!filters.includes(key)) list.push(key);
|
||||
});
|
||||
// return headers
|
||||
return list.join(',');
|
||||
};
|
||||
|
||||
// Get rows content
|
||||
const getData = list =>
|
||||
list
|
||||
.map(item => {
|
||||
const row = [];
|
||||
// Apply filters
|
||||
Object.entries(item).forEach(([key, value]) => {
|
||||
if (!filters.includes(key)) row.push(value);
|
||||
});
|
||||
// return rows
|
||||
return row.join(',');
|
||||
})
|
||||
.join('\n');
|
||||
|
||||
// Return CSV string
|
||||
return `${getHeaders(data[0])} \n ${getData(data)}`;
|
||||
};
|
||||
|
||||
const parseData = (data, format, filters = []) => {
|
||||
// Check for validation
|
||||
const valid = data && data[0] && format;
|
||||
// Pick a format
|
||||
const formats = {
|
||||
csv: list => parseCsv(list, filters),
|
||||
json: list => parseJson(list, filters),
|
||||
};
|
||||
|
||||
// Return parsed data: JSON || CSV
|
||||
return valid && formats[format] ? formats[format](data) : undefined;
|
||||
};
|
||||
|
||||
export default parseData;
|
|
@ -1,8 +1,4 @@
|
|||
// @flow
|
||||
import { SEARCH_OPTIONS } from 'constants/search';
|
||||
|
||||
const DEFAULT_SEARCH_RESULT_FROM = 0;
|
||||
const DEFAULT_SEARCH_SIZE = 20;
|
||||
|
||||
export function parseQueryParams(queryString: string) {
|
||||
if (queryString === '') return {};
|
||||
|
@ -32,56 +28,3 @@ export function toQueryString(params: { [string]: string | number }) {
|
|||
|
||||
return parts.join('&');
|
||||
}
|
||||
|
||||
export const getSearchQueryString = (
|
||||
query: string,
|
||||
options: any = {},
|
||||
) => {
|
||||
const encodedQuery = encodeURIComponent(query);
|
||||
const queryParams = [
|
||||
`s=${encodedQuery}`,
|
||||
`size=${options.size || DEFAULT_SEARCH_SIZE}`,
|
||||
`from=${options.from || DEFAULT_SEARCH_RESULT_FROM}`,
|
||||
];
|
||||
const { isBackgroundSearch } = options;
|
||||
const includeUserOptions = typeof isBackgroundSearch === 'undefined' ? false : !isBackgroundSearch;
|
||||
|
||||
if (includeUserOptions) {
|
||||
const claimType = options[SEARCH_OPTIONS.CLAIM_TYPE];
|
||||
if (claimType) {
|
||||
queryParams.push(`claimType=${claimType}`);
|
||||
|
||||
// If they are only searching for channels, strip out the media info
|
||||
if (!claimType.includes(SEARCH_OPTIONS.INCLUDE_CHANNELS)) {
|
||||
queryParams.push(
|
||||
`mediaType=${[
|
||||
SEARCH_OPTIONS.MEDIA_FILE,
|
||||
SEARCH_OPTIONS.MEDIA_AUDIO,
|
||||
SEARCH_OPTIONS.MEDIA_VIDEO,
|
||||
SEARCH_OPTIONS.MEDIA_TEXT,
|
||||
SEARCH_OPTIONS.MEDIA_IMAGE,
|
||||
SEARCH_OPTIONS.MEDIA_APPLICATION,
|
||||
].reduce(
|
||||
(acc, currentOption) => (options[currentOption] ? `${acc}${currentOption},` : acc),
|
||||
''
|
||||
)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const additionalOptions = {}
|
||||
const { related_to } = options;
|
||||
const { nsfw } = options;
|
||||
if (related_to) additionalOptions['related_to'] = related_to;
|
||||
if (typeof nsfw !== 'undefined') additionalOptions['nsfw'] = nsfw;
|
||||
|
||||
if (additionalOptions) {
|
||||
Object.keys(additionalOptions).forEach(key => {
|
||||
const option = additionalOptions[key];
|
||||
queryParams.push(`${key}=${option}`);
|
||||
});
|
||||
}
|
||||
|
||||
return queryParams.join('&');
|
||||
};
|
||||
|
|
19
tests/config/jest-transformer.js
Normal file
19
tests/config/jest-transformer.js
Normal file
|
@ -0,0 +1,19 @@
|
|||
const config = {
|
||||
babelrc: false,
|
||||
presets: [
|
||||
[
|
||||
"@babel/env",
|
||||
{
|
||||
modules: false
|
||||
}
|
||||
],
|
||||
"@babel/react"
|
||||
],
|
||||
plugins: [
|
||||
["@babel/plugin-proposal-decorators", { legacy: true }],
|
||||
["@babel/plugin-proposal-class-properties", { loose: true }],
|
||||
"@babel/plugin-transform-flow-strip-types",
|
||||
"transform-es2015-modules-commonjs"
|
||||
]
|
||||
};
|
||||
module.exports = require("babel-jest").createTransformer(config);
|
44
tests/parseURI.test.js
Normal file
44
tests/parseURI.test.js
Normal file
|
@ -0,0 +1,44 @@
|
|||
import * as lbryURI from '../src/lbryURI.js';
|
||||
import {describe, test} from "@jest/globals";
|
||||
|
||||
describe('parseURI tests', () => {
|
||||
|
||||
test('Correctly parses channel URI', () => {
|
||||
let result = lbryURI.parseURI('lbry://@ChannelName');
|
||||
expect(result.isChannel).toBeTruthy();
|
||||
expect(result.path).toStrictEqual("@ChannelName");
|
||||
expect(result.channelName).toStrictEqual("ChannelName");
|
||||
expect(result.claimName).toStrictEqual("@ChannelName");
|
||||
});
|
||||
|
||||
test('Correctly parses test case channel/stream lbry URI', () => {
|
||||
let result = lbryURI.parseURI('lbry://@CryptoGnome#1/whale-pool-how-to#e');
|
||||
expect(result.isChannel).toStrictEqual(false);;
|
||||
expect(result.path).toStrictEqual("@CryptoGnome#1/whale-pool-how-to#e");
|
||||
expect(result.claimId).toStrictEqual("1");
|
||||
expect(result.streamClaimId).toStrictEqual("e");
|
||||
expect(result.streamName).toStrictEqual("whale-pool-how-to");
|
||||
expect(result.channelName).toStrictEqual("CryptoGnome");
|
||||
expect(result.contentName).toStrictEqual("whale-pool-how-to");
|
||||
});
|
||||
|
||||
test('Correctly parses lbry URI without protocol', () => {
|
||||
let result = lbryURI.parseURI('@CryptoGnome#1/whale-pool-how-to#e');
|
||||
expect(result.isChannel).toStrictEqual(false);;
|
||||
expect(result.streamName).toStrictEqual("whale-pool-how-to");
|
||||
expect(result.channelName).toStrictEqual("CryptoGnome");
|
||||
});
|
||||
|
||||
test('Throws error for http protocol', () => {
|
||||
// TODO - this catches wrong type of error..
|
||||
let uri = 'http://@CryptoGnome#1/whale-pool-how-to#e';
|
||||
expect(() => lbryURI.parseURI(uri)).toThrowError();
|
||||
});
|
||||
|
||||
test('Correctly parses search', () => {
|
||||
let result = lbryURI.parseURI('CryptoGn%ome');
|
||||
expect(result.isChannel).toStrictEqual(false);
|
||||
expect(result.path).toStrictEqual("CryptoGn%ome");
|
||||
expect(result.contentName).toStrictEqual("CryptoGn%ome");
|
||||
});
|
||||
})
|
Loading…
Reference in a new issue