Compare commits

...

418 commits

Author SHA1 Message Date
zeppi
0f930c4a7b sync-language 2021-10-08 16:12:32 -04:00
zeppi
32b5787071 pass channel_id on list update 2021-09-13 12:05:57 -04:00
Thomas Zarebczan
129b0ea3fa
Merge pull request #431 from saltrafael/list-thumb
Changes for list thumbnail upload
2021-09-13 11:24:58 -04:00
saltrafael
e3bc848263
Add cb to thumbnail upload 2021-09-13 07:38:21 -03:00
zeppi
372e559cae use replace for list updates 2021-09-11 13:18:41 -04:00
saltrafael
49b9db5aae Fix autoplay not saving 2021-09-06 13:17:07 -04:00
Thomas Zarebczan
12a2ffc708
Merge pull request #426 from saltrafael/playback-controls
Playback and List control changes
2021-09-01 14:08:36 -04:00
Thomas Zarebczan
95fa26f836
Merge pull request #428 from lbryio/ip/from.entries.poly
Fix Object.fromEntries crash on some browsers
2021-09-01 12:04:46 -04:00
infiinte-persistence
dc264ec50c
Fix Object.fromEntries crash on some browsers
## Issue
6985 fromentries app crash - fix or add polyfill
2021-09-01 10:20:40 +08:00
saltrafael
aeb1f533b5
Playback and List control changes 2021-08-25 09:01:50 -03:00
zeppi
d016d8057b cleanup 2021-08-23 10:20:22 -04:00
zeppi
0302a2f8d6 fix background collection update 2021-08-23 10:20:22 -04:00
zeppi
8fa92d872d add isBackgroundUpdate to collection update 2021-08-23 10:20:22 -04:00
zeppi
e4d0662100 build 2021-08-06 12:33:10 -04:00
zeppi
036aa59086 fix collection edit 2021-08-06 12:33:10 -04:00
Thomas Zarebczan
c76dfbde27
Merge pull request #423 from lbryio/playlist-fetch-changes
change collection fetch params
2021-08-03 14:18:20 -04:00
zeppi
7cc9923ed9 change collection fetch params 2021-08-03 14:05:03 -04:00
zeppi
60bd918d5e U_S_P edited collection
bugfix

sync edited
2021-08-02 09:45:43 -04:00
saltrafael
9ebfc927d0 Change rLists selector 2021-07-30 11:01:18 -04:00
saltrafael
54ca8c4320 Filter rLists 2021-07-30 11:01:18 -04:00
saltrafael
bee9bf38dd Add claim in collections selector 2021-07-30 11:01:18 -04:00
infiinte-persistence
aabae5ce59
Add custom comments-server settings
## Issue
5459: Add setting for changing your comment server. Visible on desktop (and possibly defaulting to Odysee URL), hidden on odysee.
2021-07-25 20:52:22 +08:00
zeppi
a327385cdf clean 2021-07-15 17:14:28 -04:00
zeppi
64ce7aa99c handle colons maybe 2021-07-15 17:14:28 -04:00
zeppi
34dfd384e4 logging 2021-07-15 17:14:28 -04:00
zeppi
707c60b813 parse either claimId separator 2021-07-15 17:14:28 -04:00
zeppi
8f66a2fe7c fix pending ids selector 2021-07-05 15:57:47 -04:00
zeppi
729a4831ad channel modlist on confirm 2021-07-05 09:42:33 -04:00
zeppi
88370997b4 cleanup 2021-07-05 09:42:33 -04:00
zeppi
b93598b0ff fix bug canceling pending check early 2021-07-05 09:42:33 -04:00
zeppi
4cbb9a35c3 prefer pending and edited collections in selector 2021-07-05 09:42:33 -04:00
zeppi
04ce1df03d collection check pending 2021-07-05 09:42:33 -04:00
zeppi
347fe25e85 cleanup 2021-07-05 09:42:33 -04:00
zeppi
e66698eadc store pendingById 2021-07-05 09:42:33 -04:00
zeppi
0b505fb0f4 selectClaimIdIsPending 2021-06-23 10:47:24 -04:00
Thomas Zarebczan
508e8d36fd
Merge pull request #416 from saltrafael/master
Fix breaking when no languages set
2021-06-14 11:13:06 -04:00
saltrafael
0758827e6d fix breaking when no languages set 2021-06-13 08:51:49 -03:00
Thomas Zarebczan
166c3b2832
Merge pull request #414 from saltrafael/master
Don't reset content language on edit
2021-06-11 11:45:35 -04:00
Thomas Zarebczan
d298c00f24
Merge pull request #415 from lbryio/fix-autoplay
fix autoplay after playlist click and select next playable
2021-06-10 17:20:08 -04:00
zeppi
06b09f5a81 add list reducer key consts 2021-06-10 16:07:41 -04:00
zeppi
4dfc4689c6 fix autoplay after playlist click and select next playable 2021-06-10 14:03:11 -04:00
saltrafael
85ad697e0a Don't reset content language on edit 2021-06-09 11:32:22 -03:00
zeppi
609f13991f bugfix 2021-06-08 11:51:49 -04:00
zeppi
503e18be1b refactor select collection index / next 2021-06-08 11:51:49 -04:00
zeppi
32a85a9ff3 cleanup 2021-06-08 11:51:49 -04:00
zeppi
40fc75320d fix crash on abandoned collection claim 2021-06-08 11:51:49 -04:00
zeppi
e20baa0683 fix sync bringing back unpublished 2021-06-08 11:51:49 -04:00
zeppi
a1cb16400d bugfix 2021-06-08 11:51:49 -04:00
zeppi
9461cf1bee cleanup
cleanup

cleanup
2021-06-08 11:51:49 -04:00
zeppi
7e049487c3 fix sync 2021-06-08 11:51:49 -04:00
zeppi
f7775fd837 finalize collections sync keys 2021-06-08 11:51:49 -04:00
zeppi
06531c6b48 fix tags bug, flow 2021-06-08 11:51:49 -04:00
zeppi
b280d66f5d return new collection on publish 2021-06-08 11:51:49 -04:00
zeppi
bfb50ebeb5 handle collection claim delete 2021-06-08 11:51:49 -04:00
zeppi
06ce8c623c refactor fetch to fix pending 2021-06-08 11:51:49 -04:00
zeppi
f8ff4cfc8f thumb param 2021-06-08 11:51:49 -04:00
zeppi
e34f451025 collections length 2021-06-08 11:51:49 -04:00
zeppi
d3c045b037 prefer title for collection name on resolve 2021-06-08 11:51:49 -04:00
zeppi
63946a0a6d pending, edit fixes, support collectionCount 2021-06-08 11:51:49 -04:00
zeppi
dd697ed70e make edits work 2021-06-08 11:51:49 -04:00
zeppi
fd2551e764 fix pending, support new collection add ui 2021-06-08 11:51:49 -04:00
zeppi
8d0f9c18fd wip
wip

clean

clean

review

wip

wallet sync

wip

collection publishing

build

refactor, publishing, pending, editing

wip

wip

fetch collections on resolve

select collections or playlists

build

return edit success

fix collection claimId selector

small rename

flow type fixes

collection edit params type param and flowtypes
2021-06-08 11:51:49 -04:00
Thomas Zarebczan
757e8c24ec
Merge pull request #413 from lbryio/ip/thumbnail-error-part-2
Clear 'thumbnailError' when uploading new one
2021-05-17 15:46:25 -04:00
infiinte-persistence
ecfcc95beb
Clear 'thumbnailError' when uploading new one
## Issue
"thumbnail is invalid" not reset with new thumbnail upload #6044
https://github.com/lbryio/lbry-desktop/issues/6044

## Change
The previous PR only covered the scenario of changing between NEW and EDIT. This one covers "uploading new".
2021-05-18 01:23:37 +08:00
Thomas Zarebczan
b2ad71fb74
Merge pull request #412 from lbryio/ip/release-time
Change how release_time is edited.
2021-05-14 17:30:25 -04:00
infiinte-persistence
35dd7650fb
Change how release_time is edited.
- `releaseTime` is now a number instead of a string, matching `release_time`. It was getting confusing what the variable units were.

- `releaseTime` will always match `release_time` for an edit. It will be used in the GUI to reset just the date to the original, instead of having to reset the entire form.

- `releaseTimeEdited` will be used by `updatePublishForm` in the GUI to represent the desired new release time. Set to `undefined` if we don't want to change the date.
2021-05-13 07:57:01 +08:00
infiinte-persistence
babfec7d43
Complete rename of 'release_time'
I believe this was missed out in c31161c4
2021-05-13 07:57:00 +08:00
Thomas Zarebczan
6fc11454eb
Merge pull request #411 from lbryio/ip/thumbnail-error
Define default value for 'thumbnailError'
2021-05-12 10:20:33 -04:00
infiinte-persistence
3b853b6ddd
Define default value for 'thumbnailError'
## Issue
"thumbnail is invalid" not reset with new thumbnail upload #6044
https://github.com/lbryio/lbry-desktop/issues/6044

## Change
Defining a default value will cover both CLEAR_PUBLISH and DO_PREPARE_EDIT
2021-05-11 11:02:48 +08:00
Thomas Zarebczan
41ef1117e5
Merge pull request #408 from lbryio/ip/bump-transaction-page-size
doFetchTransactions: bump pageSize to 999999; remove doFetchSupport
2021-04-26 15:11:24 -04:00
zeppi
66c77fc39b delay preference set two seconds 2021-04-26 15:06:00 -04:00
infiinte-persistence
e5c0b5f0a6
doFetchTransactions: bump pageSize to 999999; remove doFetchSupport
## Issue
5899 Re-add ability to export transactions
2021-04-26 12:26:36 +08:00
Sean Yesmunt
7e17344683 remove unused comment types 2021-04-23 14:52:16 -04:00
Sean Yesmunt
b511282c35 superchat support 2021-04-23 14:52:16 -04:00
Thomas Zarebczan
eb37009a98
Merge pull request #405 from lbryio/feat-supportAsyncForDesktop
support claim search async dispatch
2021-04-23 11:12:44 -04:00
zeppi
c2e03fa71d support claim search async dispatch 2021-04-22 22:31:08 -04:00
Thomas Zarebczan
4e37ab6580
Merge pull request #404 from lbryio/ip/export-transactions
Transaction export: move file-creation to background.
2021-04-19 16:44:04 -04:00
infiinte-persistence
a0bfbee958
Transaction export: move file-creation to background. 2021-04-17 22:09:02 +08:00
infiinte-persistence
3ca0c8d204 CoinSwap: handle "receiving/received LBC" 2021-04-12 16:18:51 -04:00
infiinte-persistence
5f3a40a420 CoinSwap: websocket + multi-coin
- For the active swap, switch from polling to websocket. The returned data is now the Charge data from the commerce, so some parsing will be required.

- Only save the 'chargeCode' to the wallet. The other data can be repopulated from this.
2021-04-07 14:35:15 -04:00
infiinte-persistence
8335c9d2de Save CoinSwapInfo instead of just the swap address.
User should be able to retrieve the expected send/receive amount, otherwise they might be sending insufficient amounts.

This change also includes the coin type, as we might be supporting other coins beyond BTC.
2021-04-07 14:35:15 -04:00
infiinte-persistence
9f7902aa0b Persist BTC swap address across devices
## Issue
Used by [Support for swapping into LBC](https://github.com/lbryio/lbry-desktop/pull/5654)
2021-04-07 14:35:15 -04:00
Thomas Zarebczan
9a17013728
Merge pull request #401 from lbryio/feat-remotePublishUrl
add remote publish url
2021-03-29 19:18:00 -04:00
zeppi
35088a6d10 add remote publish url 2021-03-29 17:48:35 -04:00
Thomas Zarebczan
8e74e3137a
bundle me up 2021-03-28 12:36:14 -04:00
Thomas Zarebczan
2cf645ab14
Update tags.js 2021-03-28 12:04:35 -04:00
zeppi
c494c92505 provide selector for placeholder stream 2021-03-25 18:56:48 -04:00
jessopb
e9712dc954 Revert "fix claimHasSource selector"
This reverts commit 5416b6bc42.
2021-03-25 18:37:39 -04:00
zeppi
5416b6bc42 fix claimHasSource selector 2021-03-25 17:50:36 -04:00
Thomas Zarebczan
86c7741d4c
Merge pull request #396 from lbryio/ip-txo-fetch-id
Drop old txo-fetch results
2021-03-23 21:35:10 -04:00
infiinte-persistence
a5a326e73a Drop old txo-fetch results
## Issue
Closes lbry-desktop 4317: `Transaction list shows previously requested data / pages`

## Approach
A naive approach of creating a random transaction ID for each fetch. The latest ID, stored in `state`, will be the expected one -- any other transaction results will be dropped.

The loading spinning will continue to spin until the latest ID's results are fetched.
2021-03-23 21:01:47 -04:00
zeppi
4e2a6c8201 create selectPendingClaims 2021-03-23 20:46:04 -04:00
Sean Yesmunt
629c3273f5 create makeSelectClaimHasSource 2021-03-18 11:33:54 -04:00
Sean Yesmunt
638a78695a add has_source and has_no_source to doClaimSearch options 2021-03-18 11:33:54 -04:00
Sean Yesmunt
d75e7725fe Revert "Revert "connection_status is dead, long live connected""
This reverts commit f449d7916c.
2021-03-15 15:41:14 -04:00
Sean Yesmunt
d91ec1773c update build 2021-03-15 14:20:50 -04:00
infiinte-persistence
e5b79a8400 doFetchClaimListMine: add 4th param to filter out claim_type 2021-03-15 14:20:50 -04:00
Sean Yesmunt
f449d7916c Revert "connection_status is dead, long live connected"
This reverts commit 74ab5bbf84.
2021-03-15 13:56:13 -04:00
infiinte-persistence
87e67aa714 makeSelectClaimForUri: Use either canonical or permanent url for repost data
## Issue
Closes 5673 (lbry-desktop): Reposts are all listed under "Annoymous"
2021-03-15 13:39:41 -04:00
Victor Shyba
74ab5bbf84 connection_status is dead, long live connected 2021-03-15 13:39:17 -04:00
Sean Yesmunt
bf728f8716 update staked amount constants 2021-03-11 12:57:53 -05:00
infiinte-persistence
bf3645df44 Move getChannelLevel to a selector.
## Issue
5636: Disable video previews in comments/posts made by channels below a certain channel staked level
2021-03-11 12:57:53 -05:00
Franco Montenegro
0d2d64aca7 Clear uri when clearing publish 2021-03-09 16:18:34 -05:00
Sean Yesmunt
c31161c41a rename release_time to releaseTime 2021-03-05 15:38:55 -05:00
Franco Montenegro
251c646851 Run yarn build for production 2021-03-05 15:38:55 -05:00
Franco Montenegro
5ea369ee76 Add release time to publish 2021-03-05 15:38:55 -05:00
zeppi
37f17fae0c populate channel claims for txo signing_channels 2021-03-05 15:29:04 -05:00
infiinte-persistence
f8a4264307 Bump copyright year to 2021 2021-02-24 11:58:13 -05:00
Sean Yesmunt
f37fd9bf92 store channel when claim is resolved 2021-02-18 23:30:20 -05:00
Sean Yesmunt
bcaedbcd9c add channel_sign 2021-02-11 10:07:18 -05:00
zeppi
f0849b4ce1 review changes 2021-02-05 16:04:48 -05:00
zeppi
a2f8646f95 better track txids for large wallet operations 2021-02-05 16:04:48 -05:00
zeppi
92acb9a6c9 support mass claiming tips 2021-02-02 21:35:19 -05:00
Sean Yesmunt
5d41b0656c add extra check for claim to prevent crash 2021-02-02 16:18:48 -05:00
Sean Yesmunt
7c926ad8de livestream changes 2021-01-26 16:39:29 -05:00
dependabot[bot]
9e2d80909f Bump node-notifier from 8.0.0 to 8.0.1
Bumps [node-notifier](https://github.com/mikaelbr/node-notifier) from 8.0.0 to 8.0.1.
- [Release notes](https://github.com/mikaelbr/node-notifier/releases)
- [Changelog](https://github.com/mikaelbr/node-notifier/blob/v8.0.1/CHANGELOG.md)
- [Commits](https://github.com/mikaelbr/node-notifier/compare/v8.0.0...v8.0.1)

Signed-off-by: dependabot[bot] <support@github.com>
2021-01-26 16:38:47 -05:00
infiinte-persistence
7bd6ae1824 Make 'playback rate' persistent
This is the accompanying commit for https://github.com/lbryio/lbry-desktop/pull/5310
2021-01-19 10:16:05 -05:00
jessopb
746acef66a
Merge pull request #377 from lbryio/utxoCountPageLimit
remove support utxo count for now, add page limit
2021-01-18 23:33:49 -05:00
zeppi
0274f7e13e remove support utxo count for now, add page limit 2021-01-18 23:25:55 -05:00
jessopb
acc54f157f
Merge pull request #374 from lbryio/utxoCounts
Utxo counts
2021-01-05 12:16:05 -05:00
zeppi
3b523980de cleanup 2021-01-04 17:20:27 -05:00
zeppi
b5cc1f8818 bugfix 2021-01-04 10:54:17 -05:00
zeppi
f80c71a2ab support utxo consolidation 2021-01-02 12:47:17 -05:00
zeppi
ce9f720bbd support repost amount
bugfix

resolve reposted claims inline

refactor resolve reposts

further refactor

bugfix
2020-12-28 13:34:14 -05:00
zeppi
e3c05268e5 bugfix 2020-12-18 10:26:54 -05:00
zeppi
a37f195d73 support repost amount 2020-12-18 10:26:54 -05:00
jessopb
8adf3dada3
Merge pull request #369 from lbryio/selectPendingUrl
Select pending url
2020-12-16 10:19:50 -05:00
zeppi
664df6237d effective amount number 2020-12-16 09:48:49 -05:00
zeppi
4f57992762 pending 2020-12-16 09:48:49 -05:00
zeppi
f683af3b99 add selectEffectiveAmountForUri 2020-12-16 09:48:49 -05:00
zeppi
612acc6e7f allow undefined channel_id in repost flowtype 2020-12-16 09:48:49 -05:00
zeppi
034838a23c support search finding pending claims 2020-12-16 09:48:49 -05:00
Sean Yesmunt
92a4263c90 update Txo type 2020-12-15 15:46:39 -05:00
zeppi
eb40d2c058 persist publish language
fix publish language default
2020-11-19 14:08:54 -05:00
Sean Yesmunt
70c52e42e8 better handling of bad password errors 2020-11-13 14:41:26 -05:00
Sean Yesmunt
88dbef2cd0 pass error to fatal action handler 2020-11-13 14:41:26 -05:00
Sean Yesmunt
8344379bfd add fatal error sync action 2020-11-13 14:41:26 -05:00
infiinte-persistence
8093d69807 claim_search: Don't clear previous page results if subsequent pages timeout.
## Issue
https://github.com/lbryio/lbry-desktop/issues/4609

## Change
- Don't clear existing results on timeout.
- Treat this scenario as "last page reached" by marking `claimSearchByQueryLastPageReached`.
2020-11-13 13:17:46 -05:00
jessop
c86810038c pass languages in channel create 2020-11-09 10:41:15 -05:00
Sean Yesmunt
dc70da1be2 increase wallet balance interval to 10 seconds 2020-11-07 19:40:06 -05:00
Sean Yesmunt
4d11f31914 sync 'following' 2020-10-30 13:03:06 -04:00
jessop
04789190b0 remove tags from redux (to desktop) 2020-10-29 11:41:53 -04:00
jessop
1fc5afa0c4 add makeSelectTagInClaimOrChannelForUri 2020-10-28 10:35:34 -04:00
jessop
3cb3859baf add homepage setting 2020-10-23 14:38:17 -04:00
Sean Yesmunt
823197af37 throw error instead of logging 2020-10-20 11:59:33 -04:00
Sean Yesmunt
fb4bcdb4f5 update uuid import for new version 2020-10-20 00:18:21 -04:00
Sean Yesmunt
15737f9b09 check if 'claims' exists before looping over it 2020-10-19 23:01:34 -04:00
jessopb
77b27fea99 Revert "remove tags from redux (to desktop)"
This reverts commit 22a26be26f.
2020-10-19 17:16:25 -04:00
jessop
16c6ba1a24 support channel language update 2020-10-16 13:21:06 -04:00
jessop
3f14b93cbc add searchInLangage setting 2020-10-16 13:20:16 -04:00
jessop
22a26be26f remove tags from redux (to desktop) 2020-10-16 13:19:59 -04:00
jessop
a13ddadba4 support comment pinning 2020-10-14 16:54:19 -04:00
dependabot[bot]
7fbb87d38f Bump node-fetch from 2.2.0 to 2.6.1
Bumps [node-fetch](https://github.com/bitinn/node-fetch) from 2.2.0 to 2.6.1.
- [Release notes](https://github.com/bitinn/node-fetch/releases)
- [Changelog](https://github.com/node-fetch/node-fetch/blob/master/docs/CHANGELOG.md)
- [Commits](https://github.com/bitinn/node-fetch/compare/v2.2.0...v2.6.1)

Signed-off-by: dependabot[bot] <support@github.com>
2020-10-12 23:26:05 -04:00
Jeremy Kauffman
fee1834bbc
add LBRYFoundationBoardCandidacy tag 2020-10-06 14:33:49 -04:00
jessop
ba5d6b84be flow support for comment reactions 2020-09-29 11:30:14 -04:00
jessop
90012bf47c updates supporting sync fixes
change alt preference key to local

remove enable_sync from sync list

restore to enable_sync in client settings
2020-09-21 12:56:39 -04:00
Sean Yesmunt
3bfdde4629 change default bid to 0.01 lbc 2020-09-18 10:05:56 -04:00
infiinte-persistence
437c54f164 Fix untranslatable string. 2020-09-10 13:59:07 -04:00
Sean Yesmunt
3eee65146b new settings 2020-09-08 14:41:50 -04:00
Sean Yesmunt
01df9522d5 remove comments flow type 2020-09-08 13:52:40 -04:00
Sean Yesmunt
1c02ca2b6b LBC => LBRY Credits 2020-09-02 14:33:12 -04:00
jessop
3df916548f return preference get promise 2020-08-31 14:29:51 -04:00
jessop
316dfcf06a include enable_sync in sync 2020-08-31 14:29:51 -04:00
ioancole
5c1a00b103 Add parseURI tests 2020-08-31 11:24:20 -04:00
Sean Yesmunt
7d90cba8a0 add first run flag 2020-08-27 12:29:44 -04:00
infiinte-persistence
aca15fe3e9 Add SETTINGS.ENABLE_PUBLISH_PREVIEW
This option allows users to bypass the "publish preview" modal.
2020-08-25 10:32:59 -04:00
infiinte-persistence
12ba291c3b doPublish: Add optional parameter for "preview-only" request. 2020-08-25 10:32:59 -04:00
Sean Yesmunt
903d425188 another one 2020-08-23 22:43:00 -04:00
Sean Yesmunt
0dbe6efc75 add new settings constants 2020-08-23 22:38:45 -04:00
Franco Montenegro
210bb80f2c Add to tray when closed setting 2020-08-19 16:00:08 -04:00
infiinte-persistence
05b949d470 Publish: Make 'channel' persistent by not clearing it in CLEAR_PUBLISH
Users are annoyed by the constant reset to 'Anonymous'.
2020-08-14 13:26:34 -04:00
Sean Yesmunt
7df96d4767 update build 2020-08-10 17:31:54 -04:00
Sean Yesmunt
27da80083e better handle thumbnail server being down 2020-08-10 17:31:29 -04:00
Sean Yesmunt
04e3ca8250 remove search code 2020-07-27 16:36:29 -04:00
jessop
a1d5ce7e7e provide remove abandon by uri selector 2020-07-27 10:31:01 -04:00
jessop
8c29c7e912 adds reconnect timeout and stores non-signed in prefs in different key 2020-07-24 16:47:45 -04:00
jessop
3a140c2318 provide settings constant arrays for sync 2020-07-24 16:47:45 -04:00
Sean Yesmunt
5547f53f48 add claim_id to myClaims if resolved claim is mine 2020-07-22 15:32:07 -04:00
Sean Yesmunt
3ce73c6646 remove utxo_release call before txo_list is called 2020-07-22 12:46:40 -04:00
Sean Yesmunt
11a43bae79 update types 2020-07-21 14:55:56 -04:00
Sean Yesmunt
7847224c89 update build 2020-07-14 16:05:59 -04:00
Sean Yesmunt
3dfbb7de0f remove blocked code and add additional check for middleware 2020-07-14 16:05:59 -04:00
Sean Yesmunt
6306639c34 remove comment code 2020-07-14 16:05:59 -04:00
Sean Yesmunt
7a7a1aad32 Revert "Some new translation stringfs"
This reverts commit f21f797ae3.
2020-07-14 11:07:10 -04:00
Sean Yesmunt
c57d5ea664 Revert "Removed console error strings"
This reverts commit 9c68623047.
2020-07-14 11:07:10 -04:00
Sean Yesmunt
83e20d3e6e Revert "Removed console error message from translation"
This reverts commit 10a508aaa5.
2020-07-14 11:07:10 -04:00
TigerxWood
10a508aaa5 Removed console error message from translation 2020-07-13 14:22:02 -04:00
TigerxWood
9c68623047 Removed console error strings 2020-07-13 14:22:02 -04:00
TigerxWood
f21f797ae3 Some new translation stringfs 2020-07-13 14:22:02 -04:00
Sean Yesmunt
a1673ebfa9 add permanent_url to lbryFirst publish payload 2020-07-10 13:39:55 -04:00
Sean Yesmunt
c29123e815 pass authToken to youtube.HasAuth 2020-07-10 12:36:30 -04:00
Sean Yesmunt
0be1b75343
Merge pull request #336 from infinite-persistence/ip4385--publish-status
Publish: Handle reflecting-state not updated correctly.
2020-07-10 10:04:23 -04:00
infiinte-persistence
85077f6f00
Publish: Handle reflecting-state not updated correctly.
Fixes 4385 (lbry-desktop) `Don't show upload progress if no file exists (shows 0% progress)`

An undefined result exception was causing the reflecting-state update code to be missed, so Desktop still thinks the item is still reflecting and continues to show the "Uploading(0%)" status.
2020-07-10 18:27:49 +08:00
Sean Yesmunt
e6d89b0690 send lbryfirst error to desktop for handling 2020-07-09 10:51:50 -04:00
Sean Yesmunt
ff1f95c101
Merge pull request #335 from lbryio/lbry-first 2020-07-08 11:45:02 -04:00
Sean Yesmunt
31647e4ee7 add back 'remove' function 2020-07-08 01:40:43 -04:00
Sean Yesmunt
1523bbd33f add 'lbry-first' tag to lbry-first publishes 2020-07-07 18:06:59 -04:00
Mark Beamer Jr
0f80568acd Initial LBRY First To Allow publishing to youtube channels. 2020-07-07 17:19:50 -04:00
Sean Yesmunt
8eb071d1e4
Merge pull request #334 from lbryio/fix-channelErrors 2020-07-03 11:07:28 -04:00
jessop
906199d866 provide for clearing channel errors 2020-07-02 18:03:55 -04:00
jessopb
0b867cbbdc
Merge pull request #333 from lbryio/fix-noDeletePending
prevent delete from byUri when pending
2020-07-02 16:35:11 -04:00
jessop
c0bfa4d320 prevent delete from byUri when pending 2020-07-02 12:50:54 -04:00
Sean Yesmunt
85f0f574bd update build 2020-06-29 17:43:49 -04:00
Sean Yesmunt
bcf4146e34
Merge pull request #330 from btzr-io/patch-1
Add file text to publish state
2020-06-29 17:43:13 -04:00
Sean Yesmunt
eb47b7e5b6
Merge pull request #331 from lbryio/revert 2020-06-29 17:07:36 -04:00
Sean Yesmunt
cf0b135a15 update build 2020-06-29 17:02:39 -04:00
Sean Yesmunt
cca78e9341 Revert "remove comment/blocked code"
This reverts commit e4c05cebe9.
2020-06-29 17:02:19 -04:00
Sean Yesmunt
60e41e7c9f
Merge pull request #329 from infinite-persistence/4429-unresolved-string
Fix unresolved "You sent ${amount} LBC"
2020-06-29 14:41:53 -04:00
Baltazar Gomez
ebf0d49fb0
add file text to publish state 2020-06-26 23:29:00 -05:00
infiinte-persistence
fc217d58d7
Fix unresolved "You sent ${amount} LBC"
## Fixes:
[lbry-desktop] #-4429: Unresolved string: "You sent ${amount} LBC"

## Changes:
Corrected error in commit c9492522.
2020-06-24 13:31:09 +08:00
Sean Yesmunt
e4c05cebe9 remove comment/blocked code 2020-06-23 14:48:30 -04:00
Sean Yesmunt
5ac2065b61 fix selector to properly return null for abandoned claims 2020-06-23 11:15:17 -04:00
Sean Yesmunt
d915f965b4
Merge pull request #328 from lbryio/channel-resolve-fix
handle resolving vanity channel urls
2020-06-22 14:39:30 -04:00
Sean Yesmunt
0ecf719daa handle resolving vanity channel urls 2020-06-22 14:37:37 -04:00
jessopb
c7ca11f327
Merge pull request #327 from lbryio/pendingChannels2
support pending channels
2020-06-22 13:07:32 -04:00
jessop
e9d0363588 support pending channels
bugfix

bugfix2
2020-06-22 12:38:21 -04:00
jessopb
273090d42f
Merge pull request #326 from lbryio/revert-325-pendingChannels
Revert "support pending channels"
2020-06-19 13:11:44 -04:00
jessopb
2ec145c50f
Revert "support pending channels" 2020-06-19 13:11:31 -04:00
jessopb
d00744a8b5
Merge pull request #325 from lbryio/pendingChannels
support pending channels
2020-06-19 12:15:38 -04:00
jessop
36f36cea8e bugfix 2020-06-18 19:12:29 -04:00
jessop
b32f6d0ddc support pending channels 2020-06-17 21:27:57 -04:00
Sean Yesmunt
72f9d57134
Merge pull request #323 from TigerxWood/patch-1
Update strings for translation
2020-06-17 00:56:27 -04:00
TigerxWood
c949252243
Update string for translation 2020-06-17 01:03:29 +03:00
Sean Yesmunt
f8ac5359d9
Merge pull request #322 from lbryio/signed-supports
signed support functionality
2020-06-10 18:09:23 -04:00
Sean Yesmunt
7216d2befd signed support functionality 2020-06-10 18:06:12 -04:00
Sean Yesmunt
70c2ffc0bd
Merge pull request #321 from michaelmitnick/master 2020-06-05 11:12:07 -04:00
Sean Yesmunt
a883c4f56c separate spansing and english tags 2020-06-05 11:11:03 -04:00
Sean Yesmunt
a9f1f7b61d fix typo 2020-06-04 11:40:52 -04:00
Michael Mitnick
4e0c59ee8d Default tags to spanish 2020-06-02 22:40:42 -04:00
Michael Mitnick
c7de10be2d Default tags to spanish 2020-06-02 22:26:59 -04:00
Sean Yesmunt
b2d49c2755 add case for channel_list failing 2020-06-01 14:26:26 -04:00
Jeremy Kauffman
f379c724bb update bundle 2020-05-31 10:12:48 -04:00
Jeremy Kauffman
7f1fc91b8a
add tag for current events 2020-05-31 09:31:46 -04:00
Sean Yesmunt
09ff7b0b99 remove placeholder return value 2020-05-26 22:10:52 -04:00
jessopb
65346c5977
Merge pull request #319 from lbryio/fix-uploadingState
fix uploading state selector
2020-05-26 18:00:32 -04:00
jessop
a6dce0eccf fix uploading state selector 2020-05-26 17:11:27 -04:00
Sean Yesmunt
82b1c8c51b
Merge pull request #317 from clay53/master 2020-05-26 09:47:20 -04:00
Clayton Hickey
ed68f01ff5 add case for when amount is undefined in formatCredits instead of returning '0' 2020-05-26 08:36:38 -04:00
Sean Yesmunt
d2079111b3 add selector for commet loading state 2020-05-25 14:00:22 -04:00
Sean Yesmunt
6c494eaf80
Merge pull request #315 from lbryio/paid 2020-05-21 13:16:09 -04:00
Sean Yesmunt
aa2cfa7896 fix myPurchases selector 2020-05-21 09:38:30 -04:00
Sean Yesmunt
910b55f059 don't use outpoint from get response because it might not exist 2020-05-20 15:40:25 -04:00
Sean Yesmunt
f6e5b69e5a update types 2020-05-20 13:42:36 -04:00
Sean Yesmunt
58e59acb10 update types 2020-05-20 12:06:38 -04:00
Sean Yesmunt
02831fe359 use purchase_receipt instead of content_fee 2020-05-19 14:25:46 -04:00
Sean Yesmunt
0730becb35 add more mature tags 2020-05-19 13:52:42 -04:00
Sean Yesmunt
4a59abf30c add doClearPurchasedUriSuccess 2020-05-18 22:13:18 -04:00
Sean Yesmunt
f660f1070c paid file changes for lbry.tv 2020-05-18 16:40:33 -04:00
Sean Yesmunt
486e24621e
Merge pull request #309 from lbryio/disableHttpImages 2020-05-12 10:33:00 -04:00
Sean Yesmunt
cd9c15567f
Merge pull request #311 from lbryio/purchase_list 2020-05-11 11:50:18 -04:00
Sean Yesmunt
278e12dcbe add purchase_list 2020-05-11 11:46:23 -04:00
Sean Yesmunt
259317250a update build 2020-05-11 10:09:38 -04:00
Sean Yesmunt
3af0b55b8e
Merge pull request #313 from jeffslofish/suport-lbry-link-with-timestamp 2020-05-11 10:09:07 -04:00
Sean Yesmunt
9b4bf30755
Merge pull request #312 from btzr-io/patch-1 2020-05-11 10:07:35 -04:00
jessopb
7bb6bb7ea2
Merge pull request #310 from lbryio/feat-trackReflectingFiles
track reflecting files
2020-05-08 15:07:58 -04:00
jessop
3be6fa52ac track reflecting files 2020-05-08 14:11:43 -04:00
Sean Yesmunt
1e27a854d0 update tags 2020-05-08 13:14:37 -04:00
Jeffrey Fisher
88d785a844 Support lbry:// links with timestamp 2020-05-07 14:18:40 -07:00
Baltazar Gomez
8d1ebfb9c5
enable webp images 2020-05-07 10:42:30 -05:00
Sean Yesmunt
c30e1eee2c update tags 2020-05-05 09:41:04 -04:00
jessop
efbc95f383 disable http images 2020-05-02 19:35:25 -04:00
jessopb
7d3563f856
Merge pull request #306 from lbryio/feat-paginateClaimList
fetch claim list by page
2020-05-01 14:06:24 -04:00
jessop
58ff4d8086 support paginating publishes
and removing dependencies on full claim list mine

fix pending

repost fix
2020-05-01 13:54:10 -04:00
Sean Yesmunt
f8c26fbe34 use api response for spee.ch thumbnail instead of re-creating it 2020-05-01 12:01:56 -04:00
Sean Yesmunt
17f611888c
Merge pull request #303 from lbryio/dependabot/npm_and_yarn/https-proxy-agent-2.2.4 2020-04-27 14:15:21 -04:00
Sean Yesmunt
14c8764925
Merge pull request #308 from lbryio/thumbnail-error 2020-04-27 14:15:05 -04:00
Sean Yesmunt
ce642bbae6 better handle thumbnail upload errors 2020-04-27 14:13:36 -04:00
jessopb
562a9bd40c
Merge pull request #307 from lbryio/showToHideReposts
showReposts flip it reverse it to hideReposts
2020-04-27 09:51:41 -04:00
jessop
b3b15ab0a3 showReposts flip it reverse it to hideReposts 2020-04-27 09:44:59 -04:00
Sean Yesmunt
4b4c7f9710 update tag 2020-04-22 12:14:44 -04:00
Sean Yesmunt
a64d5039b9 update list of known tags 2020-04-22 11:33:17 -04:00
Sean Yesmunt
a65d09a919
Merge pull request #305 from lbryio/utxoReleaseTxo
release utxo before txo list
2020-04-20 15:07:13 -04:00
jessop
9b63b1c7e3 release utxo before txo list 2020-04-20 14:22:15 -04:00
jessopb
3b30cd2e2d
Merge pull request #304 from lbryio/txoListConstant
add is_my_input_or_output constant
2020-04-17 08:22:00 -04:00
jessop
ee29e9a024 add is_my_input_or_output constant 2020-04-16 21:08:11 -04:00
dependabot[bot]
28c9c3e338
Bump https-proxy-agent from 2.2.1 to 2.2.4
Bumps [https-proxy-agent](https://github.com/TooTallNate/node-https-proxy-agent) from 2.2.1 to 2.2.4.
- [Release notes](https://github.com/TooTallNate/node-https-proxy-agent/releases)
- [Commits](https://github.com/TooTallNate/node-https-proxy-agent/compare/2.2.1...2.2.4)

Signed-off-by: dependabot[bot] <support@github.com>
2020-04-16 11:43:38 +00:00
jessopb
22c9e3563e
Merge pull request #302 from lbryio/fix-actuallyFixEmptyChannels
fix empty channels betterer
2020-04-15 17:57:34 -04:00
jessop
df043f3ef6 fix empty channels betterer 2020-04-15 17:36:48 -04:00
jessopb
4d374432cd
Merge pull request #301 from lbryio/fix-transactionPageTweaks
add txo exclude internal transfers constant
2020-04-15 13:43:27 -04:00
jessop
5994f9fb9e add txo exclude internal transfers constant 2020-04-15 13:36:29 -04:00
jessopb
0ade1de9e7
Merge pull request #300 from lbryio/fix-nullChannelClaims
fix empty channel claims
2020-04-15 11:59:55 -04:00
jessop
1bd142caa1 fix empty channel claims 2020-04-15 11:58:04 -04:00
jessopb
07dff852f1
Merge pull request #299 from lbryio/feat-txo-paginate
Feat txo paginate
2020-04-15 10:21:37 -04:00
jessop
a5f93bd2f7 i18n strings in abandon 2020-04-15 10:16:19 -04:00
jessop
cd3fa33066 callback for abandons 2020-04-14 16:35:27 -04:00
jessop
1e505184dc add txo list by params
fix set.slice() error

add txopage reducer

move txo constants to redux

abandon bug

abandon callback
2020-04-14 16:35:27 -04:00
Sean Yesmunt
677dd25643 update build 2020-04-14 13:54:59 -04:00
Jeremy Kauffman
320c6c5b70
add trailing comma 2020-04-14 13:45:40 -04:00
Jeremy Kauffman
f18fc99933
add some tags for topical events 2020-04-14 13:45:02 -04:00
jessopb
fde58f6d03
Merge pull request #298 from lbryio/fix-tipUnlockErrors
Fix tip unlock errors
2020-04-01 17:19:59 -04:00
jessop
1097a63d44 add preview action 2020-04-01 14:47:35 -04:00
jessop
de41f6bb8e adds abandonClaiSupportError to wallet reducer 2020-04-01 14:47:35 -04:00
Sean Yesmunt
90ba18d060
Merge pull request #297 from lbryio/pending-sort-fix
check for confirmations when sorting my own claims because of timestamp issues with pending claims
2020-04-01 10:39:25 -04:00
Sean Yesmunt
273c325d37 check for confirmations when sorting my own claims because of timestamp issues with pending claims 2020-04-01 10:37:50 -04:00
jessopb
07adf4aab3
Merge pull request #293 from lbryio/feat-unlockTips
tip management on claim
2020-04-01 07:57:46 -04:00
jessop
1d461ec088 tip management on claim
properly handle pending

respond to review
2020-03-31 21:36:46 -04:00
Sean Yesmunt
aa6830d9f3
Merge pull request #295 from lbryio/optimizeState
save optimize check state to allow page navigation
2020-03-31 12:56:33 -04:00
Thomas Zarebczan
625a624b9c
is_syncing! 2020-03-31 12:33:08 -04:00
Thomas Zarebczan
1818839133
add is_synced to types 2020-03-31 12:26:46 -04:00
Sean Yesmunt
b7ae238606
Merge pull request #296 from lbryio/claim-search
add claim search query to claimSearchByQuery when the result times out
2020-03-31 12:00:46 -04:00
Sean Yesmunt
af48f6ecd6 add claim search query to claimSearchByQuery when the result times out 2020-03-31 11:58:31 -04:00
jessop
430f989809 review changes 2020-03-31 10:05:12 -04:00
jessop
81138e1a7a save optimize check state to allow page navigation 2020-03-30 14:19:43 -04:00
Sean Yesmunt
6c0436cf14 make sure channel is valid before reading permanent_url 2020-03-27 11:52:35 -04:00
jessopb
7e093c31f3
Merge pull request #294 from lbryio/fix-falseAnonChans
fix falsely anonymous channels in publishes
2020-03-26 16:31:57 -04:00
jessop
369a0046ce fix falsely anonymous channels in publishes 2020-03-26 16:25:21 -04:00
Jeremy Kauffman
7517aceb07
Merge pull request #291 from lbryio/connection-string-override
Connection string override
2020-03-20 10:39:26 -04:00
Sean Yesmunt
db0f48b56f
Merge pull request #292 from lbryio/reposted_property
add reposted property
2020-03-19 13:21:59 -04:00
seanyesmunt
5d2fc0d22e update build 2020-03-19 13:20:58 -04:00
Jeremy Kauffman
d2f0cf1ca6 add reposted property 2020-03-19 12:54:13 -04:00
Akinwale Ariwodola
69ffd110db update type 2020-03-18 18:11:03 +01:00
Akinwale Ariwodola
81f58e3ac7 add connectionStringOverride parameter for resolve and claim_search 2020-03-17 22:57:27 +01:00
Akinwale Ariwodola
2f896bac53
resolved related tweak (#290)
* handle cases where no claim object is present
* fix variable
* update bundle
* get the title properly
2020-03-17 21:54:34 +01:00
jessopb
211e0830be
Merge pull request #289 from lbryio/defaultTags
no default tags
2020-03-16 16:44:57 -04:00
jessop
6ed0dde5cb no default tags 2020-03-16 16:42:27 -04:00
Jeremy Kauffman
4081322b44
Merge pull request #287 from lbryio/clean_tags
light tag cleanup
2020-03-16 13:51:09 -04:00
Jeremy Kauffman
37a01f56c1
light tag cleanup 2020-03-12 12:21:48 -04:00
Sean Yesmunt
8245b05574
Merge pull request #286 from lbryio/improvement/api-method
Add method name to the API query string
2020-03-12 11:26:15 -04:00
Sean Yesmunt
8c10601239 update build 2020-03-12 11:25:33 -04:00
Andrey Beletsky
a097c14c31 Add method name to the API query string 2020-03-12 15:28:47 +07:00
jessopb
0ce0cf1de3
Merge pull request #285 from lbryio/tagsAnalytics
export taglist selector
2020-03-10 10:09:31 -04:00
jessop
1dd2d4cff5 export taglist selector 2020-03-10 10:08:23 -04:00
Jeremy Kauffman
b2897c3cd5
Merge pull request #283 from lbryio/feat-transcoding
enable optimize_file flag in lbry publish
2020-03-09 15:17:02 -04:00
Sean Yesmunt
a2304c00ca
Merge pull request #280 from lbryio/enforce-channel-comments
Enforce channel comments
2020-03-09 11:56:38 -07:00
Oleg Silkin
81d3befa03 prettier & lint 2020-03-09 14:56:00 -04:00
Oleg Silkin
971ec8cbb0 Error toast for non-anonymous comment constraint 2020-03-09 14:56:00 -04:00
Oleg Silkin
09a6b6ce17 Forces the channel param to always be present, and not return a null channel. 2020-03-09 14:56:00 -04:00
Sean Yesmunt
9009708fff
Merge pull request #284 from lbryio/myclaims-return-canonical
use canonical_url when returning own claim urls and fix lint
2020-03-09 07:35:05 -07:00
Sean Yesmunt
76207fb3c8 use canonical_url when returning own claim urls and fix lint 2020-03-09 10:29:13 -04:00
jessop
fd615dbb26 enable transcoding flag 2020-03-02 01:28:36 -05:00
Sean Yesmunt
f36b91496b add show_reposts to list of settings constants 2020-02-28 12:00:53 -05:00
Sean Yesmunt
85b58eea00
Merge pull request #282 from lbryio/escape-regex
escape curly braces in regexInvalidURI
2020-02-26 13:22:02 -05:00
Akinwale Ariwodola
9c48cce570 escape curly braces in regexInvalidURI 2020-02-26 19:11:36 +01:00
Akinwale Ariwodola
2c2ac7130a
Merge pull request #281 from lbryio/mobile-related-content-fix
fix selector for resolved related content
2020-02-26 19:08:18 +01:00
Sean Yesmunt
6f9ee589a7
Merge pull request #278 from lbryio/reduxSettings
Redux settings
2020-02-23 20:20:16 -05:00
Sean Yesmunt
1771d9c39d
Merge pull request #279 from lbryio/repost-callback
return repost claim in doRepost
2020-02-23 20:19:29 -05:00
Sean Yesmunt
929c5de0e2 return repost claim in doRepost 2020-02-23 20:19:01 -05:00
jessop
b4fbc212ca remove tvWelcome 2020-02-21 12:50:05 -05:00
jessop
0bbebb186e fix bug in sync 2020-02-21 12:05:49 -05:00
Akinwale Ariwodola
5c874e9217 fix selector for resolved related content 2020-02-21 17:12:45 +01:00
Sean Yesmunt
863ffdf7dc
Merge pull request #277 from lbryio/reduxSettings
migrates settings to redux
2020-02-21 10:06:37 -05:00
jessop
04774dfe22 sync welcome and 3p analytics pref 2020-02-20 00:57:55 -05:00
jessop
67a5654ffe migrates settings to redux 2020-02-16 16:08:34 -05:00
Sean Yesmunt
3d64f8acc6 call claim_list after publishing to ensure repost is listed in my claims 2020-02-13 11:37:36 -05:00
Sean Yesmunt
50c69fe1dc
Merge pull request #276 from lbryio/resolveLists
list resolve defaults to true
2020-02-13 09:57:45 -05:00
jessop
50aa80d139 channel list results applied as resolved 2020-02-12 17:23:52 -05:00
jessop
46e5196f7c list resolve defaults to true 2020-02-12 12:17:50 -05:00
Sean Yesmunt
adebb2d625
Merge pull request #274 from lbryio/channel-list-resolve
add resolve parameter to channel_list
2020-02-12 12:11:43 -05:00
Sean Yesmunt
ad58b8c6b9
Merge pull request #275 from lbryio/repost-fixes
add repost_url and also add a way to return the original claim instead of the reposted claim
2020-02-12 09:45:12 -05:00
Sean Yesmunt
e7c42df2ad add repost_url and also add a way to return the original claim instead of the reposted claim 2020-02-12 09:43:41 -05:00
Sean Yesmunt
2f6d0b909d
Merge pull request #271 from lbryio/repost
basic repost support
2020-02-11 10:37:32 -05:00
Sean Yesmunt
bd07919a72 redux for reposts 2020-02-10 10:49:45 -05:00
Sean Yesmunt
a640c5cb0b
Merge pull request #272 from Yamboy1/patch-3
Add html to the list of "documents"
2020-02-10 10:40:47 -05:00
Akinwale Ariwodola
1de1d534c9 add resolve parameter to channel_list 2020-02-10 12:20:48 +01:00
Akinwale Ariwodola
b2ef261760 Resolved search updates (#273)
* add doResolvedSearch actions which returns resolved search results
* add recommended content selector
* update ResolvedSearchResult type
* support for multiple pages of resolved search results
* add nsfw flag
2020-02-10 12:04:34 +01:00
Yamboy1
c262a68f8f
Add html to the list of "documents"
Must have done this locally and forgot about it
2020-02-08 09:59:10 +13:00
Sean Yesmunt
0ef4b9bb0c
Update LICENSE (#270)
Update LICENSE
2020-02-03 09:19:33 -05:00
YULIUS KURNIAWAN KRISTIANTO
afc76b08d7
Update LICENSE 2020-02-03 05:55:34 +07:00
Thomas Zarebczan
f0a0a59c4c
add known tags 2020-02-01 13:33:04 -05:00
jessopb
87ae7faf1c
Merge pull request #268 from lbryio/feat-blockNsfwOnSearch
Feat block nsfw on search
2020-01-31 14:39:34 -05:00
jessop
2b01ce4e79 fix flow errors 2020-01-31 14:37:50 -05:00
jessop
635381a616 convert search options to expandable object
wip

bundle
2020-01-31 13:41:33 -05:00
Sean Yesmunt
bfbaa0dbdd fix typo 2020-01-30 17:44:08 -05:00
Sean Yesmunt
87198619df
handle repost claims (#269)
handle repost claims
2020-01-30 17:17:53 -05:00
Sean Yesmunt
f77bee9cf1 handle repost claims 2020-01-30 17:13:40 -05:00
Sean Yesmunt
b4512c25e3
Merge pull request #267 from lbryio/feat-recommendedResultsNoMature
Feat recommended results no mature
2020-01-29 14:22:22 -05:00
jessop
71e85536db rebase 2020-01-29 14:20:59 -05:00
jessop
d8c65c5bd3 support blocking mature recommended results if uri is not mature 2020-01-29 14:19:24 -05:00
Sean Yesmunt
b5bbece10d fix typo 2020-01-29 14:18:15 -05:00
Sean Yesmunt
8a7084ee98
Corrects error messages (#265)
Corrects error messages
2020-01-29 14:08:32 -05:00
Sean Yesmunt
e8602bdf5e yarn build 2020-01-29 14:07:43 -05:00
Oleg Silkin
4b4217879c Error for abandon operations as well 2020-01-28 17:34:28 -05:00
Oleg Silkin
50528607e7 Fixes bug where comments would be incorrectly marked as updated & adds user-friendly messages 2020-01-28 17:20:23 -05:00
Oleg Silkin
0a4e0ca33f Corrects error messages 2020-01-28 16:45:05 -05:00
Sean Yesmunt
968686be68 update build 2020-01-23 16:01:27 -05:00
Sean Yesmunt
2f68494b71
Updates comment-related redux code to support sdk version 0.53.0 (#259)
Updates comment-related redux code to support sdk version 0.53.0
2020-01-23 15:50:01 -05:00
Sean Yesmunt
cda1f431b7 update build 2020-01-22 09:28:47 -05:00
Sean Yesmunt
7a1feea32a
channel list will no longer overwrite resolved claims byId (#264)
channel list will no longer overwrite resolved claims byId
2020-01-22 09:27:44 -05:00
Akinwale Ariwodola
17a5260c3f
add doResolvedSearch actions which returns resolved search results (#258)
* add doResolvedSearch actions which returns resolved search results
* add recommended content selector
* update ResolvedSearchResult type
* support for multiple pages of resolved search results
2020-01-22 02:04:50 +01:00
jessop
11840e01b6 channel list will no longer overwrite resolved claims byId 2020-01-21 18:24:39 -05:00
Sean Yesmunt
f0891dd298 fix publishing 2020-01-21 13:33:24 -05:00
Sean Yesmunt
24c6dc1177
Fix timestamp if channel has the same name as URL (#263)
Fix timestamp if channel has the same name as URL
2020-01-21 12:16:15 -05:00
Thomas Zarebczan
abe72c1a82 fix: publish bug with same uri + channel 2020-01-21 12:00:18 -05:00
Sean Yesmunt
3495066d35 update build 2020-01-21 11:46:22 -05:00
Sean Yesmunt
63373519e8
Merge pull request #262 from Yamboy1/patch-2
Remove assumption that a publish has a language
2020-01-21 11:44:59 -05:00
Oleg Silkin
bc2436dce2 Corrects abandon return type & fixes issue where redux wasn't being changed 2020-01-21 00:46:14 -05:00
Oleg Silkin
1e1fcb8b18 Merge branch 'master' of https://github.com/lbryio/lbry-redux into comment-edits
 Conflicts:
	dist/bundle.es.js
2020-01-19 21:29:43 -05:00
Oleg Silkin
fd0d097c91 Cleanup 2020-01-19 21:27:36 -05:00
Yamboy1
1a12e73c2c
Remove assumption that a publish has a language 2020-01-19 16:21:17 +13:00
Sean Yesmunt
a93b09c6bd
Merge pull request #261 from lbryio/publish-language
Publish language fix (@Yamboy1's changes)
2020-01-16 11:01:43 -05:00
Sean Yesmunt
648fff0c6a cleanup 2020-01-16 10:59:35 -05:00
Yamboy1
d79b8ce226 Fix wrong language when editing claim 2020-01-15 13:17:25 +13:00
Oleg Silkin
c30a8ec66c Add todos 2020-01-13 17:04:41 -05:00
Oleg Silkin
2bdbf3d645 Implements reducers for COMMENT_UPDATE, COMMENT_HIDE, and COMMENT_ABANDON 2020-01-13 17:00:04 -05:00
Oleg Silkin
c0db949a53 lint 2020-01-13 16:55:28 -05:00
Oleg Silkin
13cd56dabc "comment edit" -> "comment update" 2020-01-13 16:52:23 -05:00
Oleg Silkin
24707eea5b fix type on parent_id param 2020-01-13 11:53:35 -05:00
Oleg Silkin
ff9493f334 Reflect changes in dist code 2020-01-09 21:29:54 -05:00
Oleg Silkin
1038ff16c8 Creates new selectors that are more consistent with the rest of the redux selectors for claims 2020-01-09 21:29:16 -05:00
Oleg Silkin
b65149fe00 Refactors old reducers to be compliant with new schema 2020-01-09 21:28:35 -05:00
Oleg Silkin
c171a38067 Adds parent_id param to comment_create to allow replies 2020-01-09 21:27:59 -05:00
Oleg Silkin
2844087334 Schema change for CommentsState 2020-01-09 21:27:23 -05:00
Oleg Silkin
586b07a243 adds edit, hide, and abandon methods to call the API with 2020-01-09 17:11:37 -05:00
Oleg Silkin
ba9e68b1da claim_id is now guaranteed 2020-01-09 17:10:54 -05:00
Oleg Silkin
78ec8c3c16 Adds edit, hide, and abandon action types from new lbry sdk api 2020-01-09 17:09:37 -05:00
Jeremy Kauffman
4cbbee4975
add lbry as default tag 2020-01-08 17:27:36 -05:00
Oleg Silkin
7c55a9e0f5 Adds edit, hide, and abandon action types from new lbry sdk api 2020-01-07 00:22:57 -05:00
Oleg Silkin
d45c160b10 Adds proper comment type 2020-01-07 00:21:13 -05:00
Akinwale Ariwodola
eb8fff0e5a
add optional resolveResults parameter to doSearch (#257) 2020-01-05 11:37:35 +01:00
Sean Yesmunt
e0eb6c232c
pass related_to flag to search for recommended content (#256)
pass related_to flag to search for recommended content
2020-01-02 10:29:44 -05:00
Thomas Zarebczan
a2be979986 fix: don't hard error on space only 2019-12-21 15:33:00 -05:00
Thomas Zarebczan
1a61569405 fix: hard errors on URL issues 2019-12-21 14:23:50 -05:00
Sean Yesmunt
0db20834f9 remove forward slash from search query when selecting 2019-12-20 12:05:35 -05:00
Sean Yesmunt
3779c05888 fix wallet balance subscribe when the api call fails 2019-12-20 11:40:29 -05:00
Akinwale Ariwodola
abe90e0665
Merge pull request #254 from lbryio/fix-send-tip-i18n
doSendTip i18n fix
2019-12-20 07:58:54 +01:00
Sean Yesmunt
8086ccdc01 pass related_to flag to search for recommended content 2019-12-19 11:27:15 -05:00
Sean Yesmunt
04615f0d6b
Merge pull request #255 from lbryio/feat-reconnectStatus
provide walletReconnecting state in wallet reducer
2019-12-19 11:26:41 -05:00
Akinwale Ariwodola
9081e377bd doSendTip i18n fix 2019-12-18 15:14:24 +01:00
87 changed files with 12770 additions and 3625 deletions

View file

@ -1,5 +1,6 @@
[ignore] [ignore]
[include] [include]
[libs] [libs]
@ -12,4 +13,5 @@ module.name_mapper='^redux\(.*\)$' -> '<PROJECT_ROOT>/src/redux\1'
module.name_mapper='^util\(.*\)$' -> '<PROJECT_ROOT>/src/util\1' module.name_mapper='^util\(.*\)$' -> '<PROJECT_ROOT>/src/util\1'
module.name_mapper='^constants\(.*\)$' -> '<PROJECT_ROOT>/src/constants\1' module.name_mapper='^constants\(.*\)$' -> '<PROJECT_ROOT>/src/constants\1'
module.name_mapper='^lbry\(.*\)$' -> '<PROJECT_ROOT>/src/lbry\1' module.name_mapper='^lbry\(.*\)$' -> '<PROJECT_ROOT>/src/lbry\1'
module.name_mapper='^lbry-first\(.*\)$' -> '<PROJECT_ROOT>/src/lbry-first\1'
module.name_mapper='^lbryURI\(.*\)$' -> '<PROJECT_ROOT>/src/lbryURI\1' module.name_mapper='^lbryURI\(.*\)$' -> '<PROJECT_ROOT>/src/lbryURI\1'

View file

@ -1,6 +1,6 @@
The MIT License (MIT) The MIT License (MIT)
Copyright (c) 2017-2019 LBRY Inc Copyright (c) 2017-2021 LBRY Inc
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish,

View file

@ -20,6 +20,9 @@ yarn link lbry-redux
### Build ### Build
Run `$ yarn build`. If the symlink does not work, just build the file and move the `bundle.js` file into the `node_modules/` folder. Run `$ yarn build`. If the symlink does not work, just build the file and move the `bundle.js` file into the `node_modules/` folder.
### Tests
Run `$ yarn test`.
## Contributing ## Contributing
We :heart: contributions from everyone! We welcome [bug reports](https://github.com/lbryio/lbry-redux/issues/), [bug fixes](https://github.com/lbryio/lbry-redux/pulls) and feedback on the module is always appreciated. We :heart: contributions from everyone! We welcome [bug reports](https://github.com/lbryio/lbry-redux/issues/), [bug fixes](https://github.com/lbryio/lbry-redux/pulls) and feedback on the module is always appreciated.

5290
dist/bundle.es.js vendored

File diff suppressed because one or more lines are too long

View file

@ -1,14 +1,16 @@
// @flow // @flow
declare type Claim = StreamClaim | ChannelClaim; declare type Claim = StreamClaim | ChannelClaim | CollectionClaim;
declare type ChannelClaim = GenericClaim & { declare type ChannelClaim = GenericClaim & {
is_channel_signature_valid?: boolean, // we may have signed channels in the future
value: ChannelMetadata, value: ChannelMetadata,
}; };
declare type CollectionClaim = GenericClaim & {
value: CollectionMetadata,
};
declare type StreamClaim = GenericClaim & { declare type StreamClaim = GenericClaim & {
is_channel_signature_valid?: boolean,
value: StreamMetadata, value: StreamMetadata,
}; };
@ -23,7 +25,8 @@ declare type GenericClaim = {
decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044 decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044
timestamp?: number, // date of last transaction timestamp?: number, // date of last transaction
height: number, // block height the tx was confirmed height: number, // block height the tx was confirmed
is_mine: boolean, is_channel_signature_valid?: boolean,
is_my_output: boolean,
name: string, name: string,
normalized_name: string, // `name` normalized via unicode NFD spec, normalized_name: string, // `name` normalized via unicode NFD spec,
nout: number, // index number for an output of a tx nout: number, // index number for an output of a tx
@ -31,8 +34,13 @@ declare type GenericClaim = {
short_url: string, // permanent_url with short id, no channel short_url: string, // permanent_url with short id, no channel
txid: string, // unique tx id txid: string, // unique tx id
type: 'claim' | 'update' | 'support', type: 'claim' | 'update' | 'support',
value_type: 'stream' | 'channel', value_type: 'stream' | 'channel' | 'collection',
signing_channel?: ChannelClaim, signing_channel?: ChannelClaim,
reposted_claim?: GenericClaim,
repost_channel_url?: string,
repost_url?: string,
repost_bid_amount?: string,
purchase_receipt?: PurchaseReceipt,
meta: { meta: {
activation_height: number, activation_height: number,
claims_in_channel?: number, claims_in_channel?: number,
@ -42,6 +50,7 @@ declare type GenericClaim = {
expiration_height: number, expiration_height: number,
is_controlling: boolean, is_controlling: boolean,
support_amount: string, support_amount: string,
reposted: number,
trending_global: number, trending_global: number,
trending_group: number, trending_group: number,
trending_local: number, trending_local: number,
@ -69,6 +78,10 @@ declare type ChannelMetadata = GenericMetadata & {
featured?: Array<string>, featured?: Array<string>,
}; };
declare type CollectionMetadata = GenericMetadata & {
claims: Array<string>,
}
declare type StreamMetadata = GenericMetadata & { declare type StreamMetadata = GenericMetadata & {
license?: string, // License "title" ex: Creative Commons, Custom copyright license?: string, // License "title" ex: Creative Commons, Custom copyright
license_url?: string, // Link to full license license_url?: string, // Link to full license
@ -119,3 +132,83 @@ declare type Fee = {
currency: string, currency: string,
address: string, address: string,
}; };
declare type PurchaseReceipt = {
address: string,
amount: string,
claim_id: string,
confirmations: number,
height: number,
nout: number,
timestamp: number,
txid: string,
type: 'purchase',
};
declare type ClaimActionResolveInfo = {
[string]: {
stream: ?StreamClaim,
channel: ?ChannelClaim,
claimsInChannel: ?number,
collection: ?CollectionClaim,
},
}
declare type ChannelUpdateParams = {
claim_id: string,
bid?: string,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
replace?: boolean,
languages?: Array<string>,
locations?: Array<string>,
blocking?: boolean,
}
declare type ChannelPublishParams = {
name: string,
bid: string,
blocking?: true,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
languages?: Array<string>,
}
declare type CollectionUpdateParams = {
claim_id: string,
claim_ids?: Array<string>,
bid?: string,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
replace?: boolean,
languages?: Array<string>,
locations?: Array<string>,
blocking?: boolean,
}
declare type CollectionPublishParams = {
name: string,
bid: string,
claim_ids: Array<string>,
blocking?: true,
title?: string,
thumbnail_url?: string,
description?: string,
tags?: Array<string>,
languages?: Array<string>,
}

29
dist/flow-typed/CoinSwap.js vendored Normal file
View file

@ -0,0 +1,29 @@
declare type CoinSwapInfo = {
chargeCode: string,
coins: Array<string>,
sendAddresses: { [string]: string},
sendAmounts: { [string]: any },
lbcAmount: number,
status?: {
status: string,
receiptCurrency: string,
receiptTxid: string,
lbcTxid: string,
},
}
declare type CoinSwapState = {
coinSwaps: Array<CoinSwapInfo>,
};
declare type CoinSwapAddAction = {
type: string,
data: CoinSwapInfo,
};
declare type CoinSwapRemoveAction = {
type: string,
data: {
chargeCode: string,
},
};

34
dist/flow-typed/Collections.js vendored Normal file
View file

@ -0,0 +1,34 @@
declare type Collection = {
id: string,
items: Array<?string>,
name: string,
type: string,
updatedAt: number,
totalItems?: number,
sourceId?: string, // if copied, claimId of original collection
};
declare type CollectionState = {
unpublished: CollectionGroup,
resolved: CollectionGroup,
pending: CollectionGroup,
edited: CollectionGroup,
builtin: CollectionGroup,
saved: Array<string>,
isResolvingCollectionById: { [string]: boolean },
error?: string | null,
};
declare type CollectionGroup = {
[string]: Collection,
}
declare type CollectionEditParams = {
claims?: Array<Claim>,
remove?: boolean,
claimIds?: Array<string>,
replace?: boolean,
order?: { from: number, to: number },
type?: string,
name?: string,
}

View file

@ -1,19 +0,0 @@
declare type Comment = {
author?: string,
author_url?: string,
claim_index?: number,
comment_id?: number,
downvotes?: number,
message: string,
omitted?: number,
reply_count?: number,
time_posted?: number,
upvotes?: number,
parent_id?: number,
};
declare type CommentsState = {
byId: {},
isLoading: boolean,
commentsByUri: { [string]: string },
}

View file

@ -11,6 +11,8 @@ declare type FileListItem = {
claim_id: string, claim_id: string,
claim_name: string, claim_name: string,
completed: false, completed: false,
content_fee?: { txid: string },
purchase_receipt?: { txid: string, amount: string },
download_directory: string, download_directory: string,
download_path: string, download_path: string,
file_name: string, file_name: string,
@ -20,6 +22,7 @@ declare type FileListItem = {
outpoint: string, outpoint: string,
points_paid: number, points_paid: number,
protobuf: string, protobuf: string,
reflector_progress: number,
sd_hash: string, sd_hash: string,
status: string, status: string,
stopped: false, stopped: false,
@ -29,10 +32,12 @@ declare type FileListItem = {
suggested_file_name: string, suggested_file_name: string,
total_bytes: number, total_bytes: number,
total_bytes_lower_bound: number, total_bytes_lower_bound: number,
is_fully_reflected: boolean,
// TODO: sdk plans to change `tx` // TODO: sdk plans to change `tx`
// It isn't currently used by the apps // It isn't currently used by the apps
tx: {}, tx: {},
txid: string, txid: string,
uploading_to_reflector: boolean,
written_bytes: number, written_bytes: number,
}; };
@ -66,7 +71,7 @@ declare type PurchaseUriStarted = {
}; };
declare type DeletePurchasedUri = { declare type DeletePurchasedUri = {
type: ACTIONS.DELETE_PURCHASED_URI, type: ACTIONS.CLEAR_PURCHASED_URI_SUCCESS,
data: { data: {
uri: string, uri: string,
}, },

View file

@ -7,10 +7,6 @@ declare type StatusResponse = {
download_progress: number, download_progress: number,
downloading_headers: boolean, downloading_headers: boolean,
}, },
connection_status: {
code: string,
message: string,
},
dht: { dht: {
node_id: string, node_id: string,
peers_in_routing_table: number, peers_in_routing_table: number,
@ -45,6 +41,7 @@ declare type StatusResponse = {
redirects: {}, redirects: {},
}, },
wallet: ?{ wallet: ?{
connected: string,
best_blockhash: string, best_blockhash: string,
blocks: number, blocks: number,
blocks_behind: number, blocks_behind: number,
@ -78,7 +75,7 @@ declare type BalanceResponse = {
declare type ResolveResponse = { declare type ResolveResponse = {
// Keys are the url(s) passed to resolve // Keys are the url(s) passed to resolve
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, claimsInChannel?: number }, [string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, collection?: CollectionClaim, claimsInChannel?: number },
}; };
declare type GetResponse = FileListItem & { error?: string }; declare type GetResponse = FileListItem & { error?: string };
@ -125,12 +122,39 @@ declare type ChannelUpdateResponse = GenericTxResponse & {
}; };
declare type CommentCreateResponse = Comment; declare type CommentCreateResponse = Comment;
declare type CommentListResponse = { declare type CommentUpdateResponse = Comment;
items: Array<Comment>,
page: number, declare type MyReactions = {
page_size: number, // Keys are the commentId
total_items: number, [string]: Array<string>,
total_pages: number, };
declare type OthersReactions = {
// Keys are the commentId
[string]: {
// Keys are the reaction_type, e.g. 'like'
[string]: number,
},
};
declare type CommentReactListResponse = {
my_reactions: Array<MyReactions>,
others_reactions: Array<OthersReactions>,
};
declare type CommentHideResponse = {
// keyed by the CommentIds entered
[string]: { hidden: boolean },
};
declare type CommentPinResponse = {
// keyed by the CommentIds entered
items: Comment,
};
declare type CommentAbandonResponse = {
// keyed by the CommentId given
abandoned: boolean,
}; };
declare type ChannelListResponse = { declare type ChannelListResponse = {
@ -141,6 +165,42 @@ declare type ChannelListResponse = {
total_pages: number, total_pages: number,
}; };
declare type ChannelSignResponse = {
signature: string,
signing_ts: string,
};
declare type CollectionCreateResponse = {
outputs: Array<Claim>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
}
declare type CollectionListResponse = {
items: Array<Claim>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
};
declare type CollectionResolveResponse = {
items: Array<Claim>,
total_items: number,
};
declare type CollectionResolveOptions = {
claim_id: string,
};
declare type CollectionListOptions = {
page: number,
page_size: number,
resolve?: boolean,
};
declare type FileListResponse = { declare type FileListResponse = {
items: Array<FileListItem>, items: Array<FileListItem>,
page: number, page: number,
@ -175,6 +235,7 @@ declare type WalletListResponse = Array<{
declare type WalletStatusResponse = { declare type WalletStatusResponse = {
is_encrypted: boolean, is_encrypted: boolean,
is_locked: boolean, is_locked: boolean,
is_syncing: boolean,
}; };
declare type SyncApplyResponse = { declare type SyncApplyResponse = {
@ -192,6 +253,31 @@ declare type StreamListResponse = {
total_pages: number, total_pages: number,
}; };
declare type StreamRepostOptions = {
name: string,
bid: string,
claim_id: string,
channel_id?: string,
};
declare type StreamRepostResponse = GenericTxResponse;
declare type PurchaseListResponse = {
items: Array<PurchaseReceipt & { claim: StreamClaim }>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
};
declare type PurchaseListOptions = {
page: number,
page_size: number,
resolve: boolean,
claim_id?: string,
channel_id?: string,
};
// //
// Types used in the generic Lbry object that is exported // Types used in the generic Lbry object that is exported
// //
@ -200,6 +286,8 @@ declare type LbryTypes = {
connectPromise: ?Promise<any>, connectPromise: ?Promise<any>,
connect: () => void, connect: () => void,
daemonConnectionString: string, daemonConnectionString: string,
alternateConnectionString: string,
methodsUsingAlternateConnectionString: Array<string>,
apiRequestHeaders: { [key: string]: string }, apiRequestHeaders: { [key: string]: string },
setDaemonConnectionString: string => void, setDaemonConnectionString: string => void,
setApiHeader: (string, string) => void, setApiHeader: (string, string) => void,
@ -222,12 +310,19 @@ declare type LbryTypes = {
channel_update: (params: {}) => Promise<ChannelUpdateResponse>, channel_update: (params: {}) => Promise<ChannelUpdateResponse>,
channel_import: (params: {}) => Promise<string>, channel_import: (params: {}) => Promise<string>,
channel_list: (params: {}) => Promise<ChannelListResponse>, channel_list: (params: {}) => Promise<ChannelListResponse>,
channel_sign: (params: {}) => Promise<ChannelSignResponse>,
stream_abandon: (params: {}) => Promise<GenericTxResponse>, stream_abandon: (params: {}) => Promise<GenericTxResponse>,
stream_list: (params: {}) => Promise<StreamListResponse>, stream_list: (params: {}) => Promise<StreamListResponse>,
channel_abandon: (params: {}) => Promise<GenericTxResponse>, channel_abandon: (params: {}) => Promise<GenericTxResponse>,
support_create: (params: {}) => Promise<GenericTxResponse>, support_create: (params: {}) => Promise<GenericTxResponse>,
support_list: (params: {}) => Promise<SupportListResponse>, support_list: (params: {}) => Promise<SupportListResponse>,
support_abandon: (params: {}) => Promise<SupportAbandonResponse>, support_abandon: (params: {}) => Promise<SupportAbandonResponse>,
stream_repost: (params: StreamRepostOptions) => Promise<StreamRepostResponse>,
purchase_list: (params: PurchaseListOptions) => Promise<PurchaseListResponse>,
collection_resolve: (params: CollectionResolveOptions) => Promise<CollectionResolveResponse>,
collection_list: (params: CollectionListOptions) => Promise<CollectionListResponse>,
collection_create: (params: {}) => Promise<CollectionCreateResponse>,
collection_update: (params: {}) => Promise<CollectionCreateResponse>,
// File fetching and manipulation // File fetching and manipulation
file_list: (params: {}) => Promise<FileListResponse>, file_list: (params: {}) => Promise<FileListResponse>,
@ -240,8 +335,10 @@ declare type LbryTypes = {
preference_set: (params: {}) => Promise<any>, preference_set: (params: {}) => Promise<any>,
// Commenting // Commenting
comment_list: (params: {}) => Promise<CommentListResponse>, comment_update: (params: {}) => Promise<CommentUpdateResponse>,
comment_create: (params: {}) => Promise<CommentCreateResponse>, comment_hide: (params: {}) => Promise<CommentHideResponse>,
comment_abandon: (params: {}) => Promise<CommentAbandonResponse>,
// Wallet utilities // Wallet utilities
wallet_balance: (params: {}) => Promise<BalanceResponse>, wallet_balance: (params: {}) => Promise<BalanceResponse>,
wallet_decrypt: (prams: {}) => Promise<boolean>, wallet_decrypt: (prams: {}) => Promise<boolean>,
@ -254,6 +351,7 @@ declare type LbryTypes = {
address_unused: (params: {}) => Promise<string>, // New address address_unused: (params: {}) => Promise<string>, // New address
address_list: (params: {}) => Promise<string>, address_list: (params: {}) => Promise<string>,
transaction_list: (params: {}) => Promise<TxListResponse>, transaction_list: (params: {}) => Promise<TxListResponse>,
txo_list: (params: {}) => Promise<any>,
// Sync // Sync
sync_hash: (params: {}) => Promise<string>, sync_hash: (params: {}) => Promise<string>,

99
dist/flow-typed/LbryFirst.js vendored Normal file
View file

@ -0,0 +1,99 @@
// @flow
declare type LbryFirstStatusResponse = {
Version: string,
Message: string,
Running: boolean,
Commit: string,
};
declare type LbryFirstVersionResponse = {
build: string,
lbrynet_version: string,
os_release: string,
os_system: string,
platform: string,
processor: string,
python_version: string,
};
/* SAMPLE UPLOAD RESPONSE (FULL)
"Video": {
"etag": "\"Dn5xIderbhAnUk5TAW0qkFFir0M/xlGLrlTox7VFTRcR8F77RbKtaU4\"",
"id": "8InjtdvVmwE",
"kind": "youtube#video",
"snippet": {
"categoryId": "22",
"channelId": "UCXiVsGTU88fJjheB2rqF0rA",
"channelTitle": "Mark Beamer",
"liveBroadcastContent": "none",
"localized": {
"title": "my title"
},
"publishedAt": "2020-05-05T04:17:53.000Z",
"thumbnails": {
"default": {
"height": 90,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/default.jpg?sqp=CMTQw_UF&rs=AOn4CLB6dlhZMSMrazDlWRsitPgCsn8fVw",
"width": 120
},
"high": {
"height": 360,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/hqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLB-Je_7l6qvASRAR_bSGWZHaXaJWQ",
"width": 480
},
"medium": {
"height": 180,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/mqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLCvSnDLqVznRNMKuvJ_0misY_chPQ",
"width": 320
}
},
"title": "my title"
},
"status": {
"embeddable": true,
"license": "youtube",
"privacyStatus": "private",
"publicStatsViewable": true,
"uploadStatus": "uploaded"
}
}
*/
declare type UploadResponse = {
Video: {
id: string,
snippet: {
channelId: string,
},
status: {
uploadStatus: string,
},
},
};
declare type HasYTAuthResponse = {
HashAuth: boolean,
};
declare type YTSignupResponse = {};
//
// Types used in the generic LbryFirst object that is exported
//
declare type LbryFirstTypes = {
isConnected: boolean,
connectPromise: ?Promise<any>,
connect: () => void,
lbryFirstConnectionString: string,
apiRequestHeaders: { [key: string]: string },
setApiHeader: (string, string) => void,
unsetApiHeader: string => void,
overrides: { [string]: ?Function },
setOverride: (string, Function) => void,
// LbryFirst Methods
stop: () => Promise<string>,
status: () => Promise<StatusResponse>,
version: () => Promise<VersionResponse>,
upload: any => Promise<?UploadResponse>,
hasYTAuth: string => Promise<HasYTAuthResponse>,
ytSignup: () => Promise<YTSignupResponse>,
};

5
dist/flow-typed/Reflector.js vendored Normal file
View file

@ -0,0 +1,5 @@
declare type ReflectingUpdate = {
fileListItem: FileListItem,
progress: number | boolean,
stalled: boolean,
};

View file

@ -1,59 +0,0 @@
// @flow
import * as ACTIONS from 'constants/action_types';
declare type SearchSuggestion = {
value: string,
shorthand: string,
type: string,
};
declare type SearchOptions = {
// :(
// https://github.com/facebook/flow/issues/6492
RESULT_COUNT: number,
CLAIM_TYPE: string,
INCLUDE_FILES: string,
INCLUDE_CHANNELS: string,
INCLUDE_FILES_AND_CHANNELS: string,
MEDIA_AUDIO: string,
MEDIA_VIDEO: string,
MEDIA_TEXT: string,
MEDIA_IMAGE: string,
MEDIA_APPLICATION: string,
};
declare type SearchState = {
isActive: boolean,
searchQuery: string,
options: SearchOptions,
suggestions: { [string]: Array<SearchSuggestion> },
urisByQuery: {},
};
declare type SearchSuccess = {
type: ACTIONS.SEARCH_SUCCESS,
data: {
query: string,
uris: Array<string>,
},
};
declare type UpdateSearchQuery = {
type: ACTIONS.UPDATE_SEARCH_QUERY,
data: {
query: string,
},
};
declare type UpdateSearchSuggestions = {
type: ACTIONS.UPDATE_SEARCH_SUGGESTIONS,
data: {
query: string,
suggestions: Array<SearchSuggestion>,
},
};
declare type UpdateSearchOptions = {
type: ACTIONS.UPDATE_SEARCH_OPTIONS,
data: SearchOptions,
};

27
dist/flow-typed/Txo.js vendored Normal file
View file

@ -0,0 +1,27 @@
declare type Txo = {
amount: number,
claim_id: string,
normalized_name: string,
nout: number,
txid: string,
type: string,
value_type: string,
timestamp: number,
is_my_output: boolean,
is_my_input: boolean,
is_spent: boolean,
signing_channel?: {
channel_id: string,
},
};
declare type TxoListParams = {
page: number,
page_size: number,
type: string,
is_my_input?: boolean,
is_my_output?: boolean,
is_not_my_input?: boolean,
is_not_my_output?: boolean,
is_spent?: boolean,
};

View file

@ -12,6 +12,7 @@ declare type LbryUrlObj = {
secondaryClaimSequence?: number, secondaryClaimSequence?: number,
primaryBidPosition?: number, primaryBidPosition?: number,
secondaryBidPosition?: number, secondaryBidPosition?: number,
startTime?: number,
// Below are considered deprecated and should not be used due to unreliableness with claim.canonical_url // Below are considered deprecated and should not be used due to unreliableness with claim.canonical_url
claimName?: string, claimName?: string,

5
dist/flow-typed/npm/from-entries.js vendored Normal file
View file

@ -0,0 +1,5 @@
// @flow
declare module '@ungap/from-entries' {
declare module.exports: any;
}

5
dist/flow-typed/npm/uuid.js vendored Normal file
View file

@ -0,0 +1,5 @@
// @flow
declare module 'uuid' {
declare module.exports: any;
}

View file

@ -1,102 +0,0 @@
// flow-typed signature: 3cf668e64747095cab0bb360cf2fb34f
// flow-typed version: d659bd0cb8/uuid_v3.x.x/flow_>=v0.32.x
declare module "uuid" {
declare class uuid {
static (
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string,
static v1(
options?: {|
node?: number[],
clockseq?: number,
msecs?: number | Date,
nsecs?: number
|},
buffer?: number[] | Buffer,
offset?: number
): string,
static v4(
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<uuid>;
}
declare module "uuid/v1" {
declare class v1 {
static (
options?: {|
node?: number[],
clockseq?: number,
msecs?: number | Date,
nsecs?: number
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<v1>;
}
declare module "uuid/v3" {
declare class v3 {
static (
name?: string | number[],
namespace?: string | number[],
buffer?: number[] | Buffer,
offset?: number
): string,
static name: string,
static DNS: string,
static URL: string
}
declare module.exports: Class<v3>;
}
declare module "uuid/v4" {
declare class v4 {
static (
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<v4>;
}
declare module "uuid/v5" {
declare class v5 {
static (
name?: string | number[],
namespace?: string | number[],
buffer?: number[] | Buffer,
offset?: number
): string,
static name: string,
static DNS: string,
static URL: string
}
declare module.exports: Class<v5>;
}

103
flow-typed/Claim.js vendored
View file

@ -1,14 +1,16 @@
// @flow // @flow
declare type Claim = StreamClaim | ChannelClaim; declare type Claim = StreamClaim | ChannelClaim | CollectionClaim;
declare type ChannelClaim = GenericClaim & { declare type ChannelClaim = GenericClaim & {
is_channel_signature_valid?: boolean, // we may have signed channels in the future
value: ChannelMetadata, value: ChannelMetadata,
}; };
declare type CollectionClaim = GenericClaim & {
value: CollectionMetadata,
};
declare type StreamClaim = GenericClaim & { declare type StreamClaim = GenericClaim & {
is_channel_signature_valid?: boolean,
value: StreamMetadata, value: StreamMetadata,
}; };
@ -23,7 +25,8 @@ declare type GenericClaim = {
decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044 decoded_claim: boolean, // Not available currently https://github.com/lbryio/lbry/issues/2044
timestamp?: number, // date of last transaction timestamp?: number, // date of last transaction
height: number, // block height the tx was confirmed height: number, // block height the tx was confirmed
is_mine: boolean, is_channel_signature_valid?: boolean,
is_my_output: boolean,
name: string, name: string,
normalized_name: string, // `name` normalized via unicode NFD spec, normalized_name: string, // `name` normalized via unicode NFD spec,
nout: number, // index number for an output of a tx nout: number, // index number for an output of a tx
@ -31,8 +34,13 @@ declare type GenericClaim = {
short_url: string, // permanent_url with short id, no channel short_url: string, // permanent_url with short id, no channel
txid: string, // unique tx id txid: string, // unique tx id
type: 'claim' | 'update' | 'support', type: 'claim' | 'update' | 'support',
value_type: 'stream' | 'channel', value_type: 'stream' | 'channel' | 'collection',
signing_channel?: ChannelClaim, signing_channel?: ChannelClaim,
reposted_claim?: GenericClaim,
repost_channel_url?: string,
repost_url?: string,
repost_bid_amount?: string,
purchase_receipt?: PurchaseReceipt,
meta: { meta: {
activation_height: number, activation_height: number,
claims_in_channel?: number, claims_in_channel?: number,
@ -42,6 +50,7 @@ declare type GenericClaim = {
expiration_height: number, expiration_height: number,
is_controlling: boolean, is_controlling: boolean,
support_amount: string, support_amount: string,
reposted: number,
trending_global: number, trending_global: number,
trending_group: number, trending_group: number,
trending_local: number, trending_local: number,
@ -69,6 +78,10 @@ declare type ChannelMetadata = GenericMetadata & {
featured?: Array<string>, featured?: Array<string>,
}; };
declare type CollectionMetadata = GenericMetadata & {
claims: Array<string>,
}
declare type StreamMetadata = GenericMetadata & { declare type StreamMetadata = GenericMetadata & {
license?: string, // License "title" ex: Creative Commons, Custom copyright license?: string, // License "title" ex: Creative Commons, Custom copyright
license_url?: string, // Link to full license license_url?: string, // Link to full license
@ -119,3 +132,83 @@ declare type Fee = {
currency: string, currency: string,
address: string, address: string,
}; };
declare type PurchaseReceipt = {
address: string,
amount: string,
claim_id: string,
confirmations: number,
height: number,
nout: number,
timestamp: number,
txid: string,
type: 'purchase',
};
declare type ClaimActionResolveInfo = {
[string]: {
stream: ?StreamClaim,
channel: ?ChannelClaim,
claimsInChannel: ?number,
collection: ?CollectionClaim,
},
}
declare type ChannelUpdateParams = {
claim_id: string,
bid?: string,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
replace?: boolean,
languages?: Array<string>,
locations?: Array<string>,
blocking?: boolean,
}
declare type ChannelPublishParams = {
name: string,
bid: string,
blocking?: true,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
languages?: Array<string>,
}
declare type CollectionUpdateParams = {
claim_id: string,
claim_ids?: Array<string>,
bid?: string,
title?: string,
cover_url?: string,
thumbnail_url?: string,
description?: string,
website_url?: string,
email?: string,
tags?: Array<string>,
replace?: boolean,
languages?: Array<string>,
locations?: Array<string>,
blocking?: boolean,
}
declare type CollectionPublishParams = {
name: string,
bid: string,
claim_ids: Array<string>,
blocking?: true,
title?: string,
thumbnail_url?: string,
description?: string,
tags?: Array<string>,
languages?: Array<string>,
}

29
flow-typed/CoinSwap.js vendored Normal file
View file

@ -0,0 +1,29 @@
declare type CoinSwapInfo = {
chargeCode: string,
coins: Array<string>,
sendAddresses: { [string]: string},
sendAmounts: { [string]: any },
lbcAmount: number,
status?: {
status: string,
receiptCurrency: string,
receiptTxid: string,
lbcTxid: string,
},
}
declare type CoinSwapState = {
coinSwaps: Array<CoinSwapInfo>,
};
declare type CoinSwapAddAction = {
type: string,
data: CoinSwapInfo,
};
declare type CoinSwapRemoveAction = {
type: string,
data: {
chargeCode: string,
},
};

34
flow-typed/Collections.js vendored Normal file
View file

@ -0,0 +1,34 @@
declare type Collection = {
id: string,
items: Array<?string>,
name: string,
type: string,
updatedAt: number,
totalItems?: number,
sourceId?: string, // if copied, claimId of original collection
};
declare type CollectionState = {
unpublished: CollectionGroup,
resolved: CollectionGroup,
pending: CollectionGroup,
edited: CollectionGroup,
builtin: CollectionGroup,
saved: Array<string>,
isResolvingCollectionById: { [string]: boolean },
error?: string | null,
};
declare type CollectionGroup = {
[string]: Collection,
}
declare type CollectionEditParams = {
claims?: Array<Claim>,
remove?: boolean,
claimIds?: Array<string>,
replace?: boolean,
order?: { from: number, to: number },
type?: string,
name?: string,
}

19
flow-typed/Comment.js vendored
View file

@ -1,19 +0,0 @@
declare type Comment = {
author?: string,
author_url?: string,
claim_index?: number,
comment_id?: number,
downvotes?: number,
message: string,
omitted?: number,
reply_count?: number,
time_posted?: number,
upvotes?: number,
parent_id?: number,
};
declare type CommentsState = {
byId: {},
isLoading: boolean,
commentsByUri: { [string]: string },
}

7
flow-typed/File.js vendored
View file

@ -11,6 +11,8 @@ declare type FileListItem = {
claim_id: string, claim_id: string,
claim_name: string, claim_name: string,
completed: false, completed: false,
content_fee?: { txid: string },
purchase_receipt?: { txid: string, amount: string },
download_directory: string, download_directory: string,
download_path: string, download_path: string,
file_name: string, file_name: string,
@ -20,6 +22,7 @@ declare type FileListItem = {
outpoint: string, outpoint: string,
points_paid: number, points_paid: number,
protobuf: string, protobuf: string,
reflector_progress: number,
sd_hash: string, sd_hash: string,
status: string, status: string,
stopped: false, stopped: false,
@ -29,10 +32,12 @@ declare type FileListItem = {
suggested_file_name: string, suggested_file_name: string,
total_bytes: number, total_bytes: number,
total_bytes_lower_bound: number, total_bytes_lower_bound: number,
is_fully_reflected: boolean,
// TODO: sdk plans to change `tx` // TODO: sdk plans to change `tx`
// It isn't currently used by the apps // It isn't currently used by the apps
tx: {}, tx: {},
txid: string, txid: string,
uploading_to_reflector: boolean,
written_bytes: number, written_bytes: number,
}; };
@ -66,7 +71,7 @@ declare type PurchaseUriStarted = {
}; };
declare type DeletePurchasedUri = { declare type DeletePurchasedUri = {
type: ACTIONS.DELETE_PURCHASED_URI, type: ACTIONS.CLEAR_PURCHASED_URI_SUCCESS,
data: { data: {
uri: string, uri: string,
}, },

124
flow-typed/Lbry.js vendored
View file

@ -7,10 +7,6 @@ declare type StatusResponse = {
download_progress: number, download_progress: number,
downloading_headers: boolean, downloading_headers: boolean,
}, },
connection_status: {
code: string,
message: string,
},
dht: { dht: {
node_id: string, node_id: string,
peers_in_routing_table: number, peers_in_routing_table: number,
@ -45,6 +41,7 @@ declare type StatusResponse = {
redirects: {}, redirects: {},
}, },
wallet: ?{ wallet: ?{
connected: string,
best_blockhash: string, best_blockhash: string,
blocks: number, blocks: number,
blocks_behind: number, blocks_behind: number,
@ -78,7 +75,7 @@ declare type BalanceResponse = {
declare type ResolveResponse = { declare type ResolveResponse = {
// Keys are the url(s) passed to resolve // Keys are the url(s) passed to resolve
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, claimsInChannel?: number }, [string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, collection?: CollectionClaim, claimsInChannel?: number },
}; };
declare type GetResponse = FileListItem & { error?: string }; declare type GetResponse = FileListItem & { error?: string };
@ -125,12 +122,39 @@ declare type ChannelUpdateResponse = GenericTxResponse & {
}; };
declare type CommentCreateResponse = Comment; declare type CommentCreateResponse = Comment;
declare type CommentListResponse = { declare type CommentUpdateResponse = Comment;
items: Array<Comment>,
page: number, declare type MyReactions = {
page_size: number, // Keys are the commentId
total_items: number, [string]: Array<string>,
total_pages: number, };
declare type OthersReactions = {
// Keys are the commentId
[string]: {
// Keys are the reaction_type, e.g. 'like'
[string]: number,
},
};
declare type CommentReactListResponse = {
my_reactions: Array<MyReactions>,
others_reactions: Array<OthersReactions>,
};
declare type CommentHideResponse = {
// keyed by the CommentIds entered
[string]: { hidden: boolean },
};
declare type CommentPinResponse = {
// keyed by the CommentIds entered
items: Comment,
};
declare type CommentAbandonResponse = {
// keyed by the CommentId given
abandoned: boolean,
}; };
declare type ChannelListResponse = { declare type ChannelListResponse = {
@ -141,6 +165,42 @@ declare type ChannelListResponse = {
total_pages: number, total_pages: number,
}; };
declare type ChannelSignResponse = {
signature: string,
signing_ts: string,
};
declare type CollectionCreateResponse = {
outputs: Array<Claim>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
}
declare type CollectionListResponse = {
items: Array<Claim>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
};
declare type CollectionResolveResponse = {
items: Array<Claim>,
total_items: number,
};
declare type CollectionResolveOptions = {
claim_id: string,
};
declare type CollectionListOptions = {
page: number,
page_size: number,
resolve?: boolean,
};
declare type FileListResponse = { declare type FileListResponse = {
items: Array<FileListItem>, items: Array<FileListItem>,
page: number, page: number,
@ -175,6 +235,7 @@ declare type WalletListResponse = Array<{
declare type WalletStatusResponse = { declare type WalletStatusResponse = {
is_encrypted: boolean, is_encrypted: boolean,
is_locked: boolean, is_locked: boolean,
is_syncing: boolean,
}; };
declare type SyncApplyResponse = { declare type SyncApplyResponse = {
@ -192,6 +253,31 @@ declare type StreamListResponse = {
total_pages: number, total_pages: number,
}; };
declare type StreamRepostOptions = {
name: string,
bid: string,
claim_id: string,
channel_id?: string,
};
declare type StreamRepostResponse = GenericTxResponse;
declare type PurchaseListResponse = {
items: Array<PurchaseReceipt & { claim: StreamClaim }>,
page: number,
page_size: number,
total_items: number,
total_pages: number,
};
declare type PurchaseListOptions = {
page: number,
page_size: number,
resolve: boolean,
claim_id?: string,
channel_id?: string,
};
// //
// Types used in the generic Lbry object that is exported // Types used in the generic Lbry object that is exported
// //
@ -200,6 +286,8 @@ declare type LbryTypes = {
connectPromise: ?Promise<any>, connectPromise: ?Promise<any>,
connect: () => void, connect: () => void,
daemonConnectionString: string, daemonConnectionString: string,
alternateConnectionString: string,
methodsUsingAlternateConnectionString: Array<string>,
apiRequestHeaders: { [key: string]: string }, apiRequestHeaders: { [key: string]: string },
setDaemonConnectionString: string => void, setDaemonConnectionString: string => void,
setApiHeader: (string, string) => void, setApiHeader: (string, string) => void,
@ -222,12 +310,19 @@ declare type LbryTypes = {
channel_update: (params: {}) => Promise<ChannelUpdateResponse>, channel_update: (params: {}) => Promise<ChannelUpdateResponse>,
channel_import: (params: {}) => Promise<string>, channel_import: (params: {}) => Promise<string>,
channel_list: (params: {}) => Promise<ChannelListResponse>, channel_list: (params: {}) => Promise<ChannelListResponse>,
channel_sign: (params: {}) => Promise<ChannelSignResponse>,
stream_abandon: (params: {}) => Promise<GenericTxResponse>, stream_abandon: (params: {}) => Promise<GenericTxResponse>,
stream_list: (params: {}) => Promise<StreamListResponse>, stream_list: (params: {}) => Promise<StreamListResponse>,
channel_abandon: (params: {}) => Promise<GenericTxResponse>, channel_abandon: (params: {}) => Promise<GenericTxResponse>,
support_create: (params: {}) => Promise<GenericTxResponse>, support_create: (params: {}) => Promise<GenericTxResponse>,
support_list: (params: {}) => Promise<SupportListResponse>, support_list: (params: {}) => Promise<SupportListResponse>,
support_abandon: (params: {}) => Promise<SupportAbandonResponse>, support_abandon: (params: {}) => Promise<SupportAbandonResponse>,
stream_repost: (params: StreamRepostOptions) => Promise<StreamRepostResponse>,
purchase_list: (params: PurchaseListOptions) => Promise<PurchaseListResponse>,
collection_resolve: (params: CollectionResolveOptions) => Promise<CollectionResolveResponse>,
collection_list: (params: CollectionListOptions) => Promise<CollectionListResponse>,
collection_create: (params: {}) => Promise<CollectionCreateResponse>,
collection_update: (params: {}) => Promise<CollectionCreateResponse>,
// File fetching and manipulation // File fetching and manipulation
file_list: (params: {}) => Promise<FileListResponse>, file_list: (params: {}) => Promise<FileListResponse>,
@ -240,8 +335,10 @@ declare type LbryTypes = {
preference_set: (params: {}) => Promise<any>, preference_set: (params: {}) => Promise<any>,
// Commenting // Commenting
comment_list: (params: {}) => Promise<CommentListResponse>, comment_update: (params: {}) => Promise<CommentUpdateResponse>,
comment_create: (params: {}) => Promise<CommentCreateResponse>, comment_hide: (params: {}) => Promise<CommentHideResponse>,
comment_abandon: (params: {}) => Promise<CommentAbandonResponse>,
// Wallet utilities // Wallet utilities
wallet_balance: (params: {}) => Promise<BalanceResponse>, wallet_balance: (params: {}) => Promise<BalanceResponse>,
wallet_decrypt: (prams: {}) => Promise<boolean>, wallet_decrypt: (prams: {}) => Promise<boolean>,
@ -254,6 +351,7 @@ declare type LbryTypes = {
address_unused: (params: {}) => Promise<string>, // New address address_unused: (params: {}) => Promise<string>, // New address
address_list: (params: {}) => Promise<string>, address_list: (params: {}) => Promise<string>,
transaction_list: (params: {}) => Promise<TxListResponse>, transaction_list: (params: {}) => Promise<TxListResponse>,
txo_list: (params: {}) => Promise<any>,
// Sync // Sync
sync_hash: (params: {}) => Promise<string>, sync_hash: (params: {}) => Promise<string>,

99
flow-typed/LbryFirst.js vendored Normal file
View file

@ -0,0 +1,99 @@
// @flow
declare type LbryFirstStatusResponse = {
Version: string,
Message: string,
Running: boolean,
Commit: string,
};
declare type LbryFirstVersionResponse = {
build: string,
lbrynet_version: string,
os_release: string,
os_system: string,
platform: string,
processor: string,
python_version: string,
};
/* SAMPLE UPLOAD RESPONSE (FULL)
"Video": {
"etag": "\"Dn5xIderbhAnUk5TAW0qkFFir0M/xlGLrlTox7VFTRcR8F77RbKtaU4\"",
"id": "8InjtdvVmwE",
"kind": "youtube#video",
"snippet": {
"categoryId": "22",
"channelId": "UCXiVsGTU88fJjheB2rqF0rA",
"channelTitle": "Mark Beamer",
"liveBroadcastContent": "none",
"localized": {
"title": "my title"
},
"publishedAt": "2020-05-05T04:17:53.000Z",
"thumbnails": {
"default": {
"height": 90,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/default.jpg?sqp=CMTQw_UF&rs=AOn4CLB6dlhZMSMrazDlWRsitPgCsn8fVw",
"width": 120
},
"high": {
"height": 360,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/hqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLB-Je_7l6qvASRAR_bSGWZHaXaJWQ",
"width": 480
},
"medium": {
"height": 180,
"url": "https://i9.ytimg.com/vi/8InjtdvVmwE/mqdefault.jpg?sqp=CMTQw_UF&rs=AOn4CLCvSnDLqVznRNMKuvJ_0misY_chPQ",
"width": 320
}
},
"title": "my title"
},
"status": {
"embeddable": true,
"license": "youtube",
"privacyStatus": "private",
"publicStatsViewable": true,
"uploadStatus": "uploaded"
}
}
*/
declare type UploadResponse = {
Video: {
id: string,
snippet: {
channelId: string,
},
status: {
uploadStatus: string,
},
},
};
declare type HasYTAuthResponse = {
HashAuth: boolean,
};
declare type YTSignupResponse = {};
//
// Types used in the generic LbryFirst object that is exported
//
declare type LbryFirstTypes = {
isConnected: boolean,
connectPromise: ?Promise<any>,
connect: () => void,
lbryFirstConnectionString: string,
apiRequestHeaders: { [key: string]: string },
setApiHeader: (string, string) => void,
unsetApiHeader: string => void,
overrides: { [string]: ?Function },
setOverride: (string, Function) => void,
// LbryFirst Methods
stop: () => Promise<string>,
status: () => Promise<StatusResponse>,
version: () => Promise<VersionResponse>,
upload: any => Promise<?UploadResponse>,
hasYTAuth: string => Promise<HasYTAuthResponse>,
ytSignup: () => Promise<YTSignupResponse>,
};

5
flow-typed/Reflector.js vendored Normal file
View file

@ -0,0 +1,5 @@
declare type ReflectingUpdate = {
fileListItem: FileListItem,
progress: number | boolean,
stalled: boolean,
};

59
flow-typed/Search.js vendored
View file

@ -1,59 +0,0 @@
// @flow
import * as ACTIONS from 'constants/action_types';
declare type SearchSuggestion = {
value: string,
shorthand: string,
type: string,
};
declare type SearchOptions = {
// :(
// https://github.com/facebook/flow/issues/6492
RESULT_COUNT: number,
CLAIM_TYPE: string,
INCLUDE_FILES: string,
INCLUDE_CHANNELS: string,
INCLUDE_FILES_AND_CHANNELS: string,
MEDIA_AUDIO: string,
MEDIA_VIDEO: string,
MEDIA_TEXT: string,
MEDIA_IMAGE: string,
MEDIA_APPLICATION: string,
};
declare type SearchState = {
isActive: boolean,
searchQuery: string,
options: SearchOptions,
suggestions: { [string]: Array<SearchSuggestion> },
urisByQuery: {},
};
declare type SearchSuccess = {
type: ACTIONS.SEARCH_SUCCESS,
data: {
query: string,
uris: Array<string>,
},
};
declare type UpdateSearchQuery = {
type: ACTIONS.UPDATE_SEARCH_QUERY,
data: {
query: string,
},
};
declare type UpdateSearchSuggestions = {
type: ACTIONS.UPDATE_SEARCH_SUGGESTIONS,
data: {
query: string,
suggestions: Array<SearchSuggestion>,
},
};
declare type UpdateSearchOptions = {
type: ACTIONS.UPDATE_SEARCH_OPTIONS,
data: SearchOptions,
};

27
flow-typed/Txo.js vendored Normal file
View file

@ -0,0 +1,27 @@
declare type Txo = {
amount: number,
claim_id: string,
normalized_name: string,
nout: number,
txid: string,
type: string,
value_type: string,
timestamp: number,
is_my_output: boolean,
is_my_input: boolean,
is_spent: boolean,
signing_channel?: {
channel_id: string,
},
};
declare type TxoListParams = {
page: number,
page_size: number,
type: string,
is_my_input?: boolean,
is_my_output?: boolean,
is_not_my_input?: boolean,
is_not_my_output?: boolean,
is_spent?: boolean,
};

View file

@ -12,6 +12,7 @@ declare type LbryUrlObj = {
secondaryClaimSequence?: number, secondaryClaimSequence?: number,
primaryBidPosition?: number, primaryBidPosition?: number,
secondaryBidPosition?: number, secondaryBidPosition?: number,
startTime?: number,
// Below are considered deprecated and should not be used due to unreliableness with claim.canonical_url // Below are considered deprecated and should not be used due to unreliableness with claim.canonical_url
claimName?: string, claimName?: string,

5
flow-typed/npm/from-entries.js vendored Normal file
View file

@ -0,0 +1,5 @@
// @flow
declare module '@ungap/from-entries' {
declare module.exports: any;
}

5
flow-typed/npm/uuid.js vendored Normal file
View file

@ -0,0 +1,5 @@
// @flow
declare module 'uuid' {
declare module.exports: any;
}

View file

@ -1,102 +0,0 @@
// flow-typed signature: 3cf668e64747095cab0bb360cf2fb34f
// flow-typed version: d659bd0cb8/uuid_v3.x.x/flow_>=v0.32.x
declare module "uuid" {
declare class uuid {
static (
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string,
static v1(
options?: {|
node?: number[],
clockseq?: number,
msecs?: number | Date,
nsecs?: number
|},
buffer?: number[] | Buffer,
offset?: number
): string,
static v4(
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<uuid>;
}
declare module "uuid/v1" {
declare class v1 {
static (
options?: {|
node?: number[],
clockseq?: number,
msecs?: number | Date,
nsecs?: number
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<v1>;
}
declare module "uuid/v3" {
declare class v3 {
static (
name?: string | number[],
namespace?: string | number[],
buffer?: number[] | Buffer,
offset?: number
): string,
static name: string,
static DNS: string,
static URL: string
}
declare module.exports: Class<v3>;
}
declare module "uuid/v4" {
declare class v4 {
static (
options?: {|
random?: number[],
rng?: () => number[] | Buffer
|},
buffer?: number[] | Buffer,
offset?: number
): string
}
declare module.exports: Class<v4>;
}
declare module "uuid/v5" {
declare class v5 {
static (
name?: string | number[],
namespace?: string | number[],
buffer?: number[] | Buffer,
offset?: number
): string,
static name: string,
static DNS: string,
static URL: string
}
declare module.exports: Class<v5>;
}

8
jest.config.js Normal file
View file

@ -0,0 +1,8 @@
module.exports = {
collectCoverageFrom: ["src/**/*.{js,jsx,mjs}"],
testMatch: ["<rootDir>/tests/**/*.test.js"],
transform: {
"^.+\\.(js|jsx|mjs)$": "<rootDir>/tests/config/jest-transformer.js",
},
transformIgnorePatterns: ["[/\\\\]node_modules[/\\\\].+\\.(js|jsx|mjs)$"]
};

View file

@ -25,14 +25,21 @@
"dev": "rollup --config --watch", "dev": "rollup --config --watch",
"precommit": "flow check && lint-staged", "precommit": "flow check && lint-staged",
"lint": "eslint 'src/**/*.js' --fix", "lint": "eslint 'src/**/*.js' --fix",
"format": "prettier 'src/**/*.{js,json}' --write" "format": "prettier 'src/**/*.{js,json}' --write",
"test": "jest"
}, },
"dependencies": { "dependencies": {
"@ungap/from-entries": "^0.2.1",
"proxy-polyfill": "0.1.6", "proxy-polyfill": "0.1.6",
"reselect": "^3.0.0", "reselect": "^3.0.0",
"uuid": "^3.3.2" "uuid": "^8.3.1"
}, },
"devDependencies": { "devDependencies": {
"@babel/plugin-proposal-class-properties": "^7.10.4",
"@babel/plugin-proposal-decorators": "^7.10.5",
"@babel/plugin-transform-flow-strip-types": "^7.10.4",
"@babel/preset-env": "^7.11.0",
"@babel/preset-react": "^7.10.4",
"babel-core": "^6.26.0", "babel-core": "^6.26.0",
"babel-eslint": "^8.0.3", "babel-eslint": "^8.0.3",
"babel-loader": "^7.1.4", "babel-loader": "^7.1.4",
@ -53,6 +60,7 @@
"flow-bin": "^0.97.0", "flow-bin": "^0.97.0",
"flow-typed": "^2.5.1", "flow-typed": "^2.5.1",
"husky": "^0.14.3", "husky": "^0.14.3",
"jest": "^26.4.2",
"lint-staged": "^7.0.4", "lint-staged": "^7.0.4",
"prettier": "^1.4.2", "prettier": "^1.4.2",
"rollup": "^1.8.0", "rollup": "^1.8.0",

View file

@ -0,0 +1,4 @@
export const PENDING = 'pending';
export const DONE = 'done';
export const READY = 'ready';
export const ERROR = 'error';

View file

@ -3,6 +3,8 @@ export const DAEMON_READY = 'DAEMON_READY';
export const DAEMON_VERSION_MATCH = 'DAEMON_VERSION_MATCH'; export const DAEMON_VERSION_MATCH = 'DAEMON_VERSION_MATCH';
export const DAEMON_VERSION_MISMATCH = 'DAEMON_VERSION_MISMATCH'; export const DAEMON_VERSION_MISMATCH = 'DAEMON_VERSION_MISMATCH';
export const VOLUME_CHANGED = 'VOLUME_CHANGED'; export const VOLUME_CHANGED = 'VOLUME_CHANGED';
export const SET_WELCOME_VERSION = 'SET_WELCOME_VERSION';
export const SET_ALLOW_ANALYTICS = 'SET_ALLOW_ANALYTICS';
// Navigation // Navigation
export const CHANGE_AFTER_AUTH_PATH = 'CHANGE_AFTER_AUTH_PATH'; export const CHANGE_AFTER_AUTH_PATH = 'CHANGE_AFTER_AUTH_PATH';
@ -33,10 +35,19 @@ export const GET_NEW_ADDRESS_STARTED = 'GET_NEW_ADDRESS_STARTED';
export const GET_NEW_ADDRESS_COMPLETED = 'GET_NEW_ADDRESS_COMPLETED'; export const GET_NEW_ADDRESS_COMPLETED = 'GET_NEW_ADDRESS_COMPLETED';
export const FETCH_TRANSACTIONS_STARTED = 'FETCH_TRANSACTIONS_STARTED'; export const FETCH_TRANSACTIONS_STARTED = 'FETCH_TRANSACTIONS_STARTED';
export const FETCH_TRANSACTIONS_COMPLETED = 'FETCH_TRANSACTIONS_COMPLETED'; export const FETCH_TRANSACTIONS_COMPLETED = 'FETCH_TRANSACTIONS_COMPLETED';
export const FETCH_TXO_PAGE_STARTED = 'FETCH_TXO_PAGE_STARTED';
export const FETCH_TXO_PAGE_COMPLETED = 'FETCH_TXO_PAGE_COMPLETED';
export const FETCH_TXO_PAGE_FAILED = 'FETCH_TXO_PAGE_FAILED';
export const UPDATE_TXO_FETCH_PARAMS = 'UPDATE_TXO_FETCH_PARAMS';
export const FETCH_SUPPORTS_STARTED = 'FETCH_SUPPORTS_STARTED'; export const FETCH_SUPPORTS_STARTED = 'FETCH_SUPPORTS_STARTED';
export const FETCH_SUPPORTS_COMPLETED = 'FETCH_SUPPORTS_COMPLETED'; export const FETCH_SUPPORTS_COMPLETED = 'FETCH_SUPPORTS_COMPLETED';
export const ABANDON_SUPPORT_STARTED = 'ABANDON_SUPPORT_STARTED'; export const ABANDON_SUPPORT_STARTED = 'ABANDON_SUPPORT_STARTED';
export const ABANDON_SUPPORT_COMPLETED = 'ABANDON_SUPPORT_COMPLETED'; export const ABANDON_SUPPORT_COMPLETED = 'ABANDON_SUPPORT_COMPLETED';
export const ABANDON_CLAIM_SUPPORT_STARTED = 'ABANDON_CLAIM_SUPPORT_STARTED';
export const ABANDON_CLAIM_SUPPORT_COMPLETED = 'ABANDON_CLAIM_SUPPORT_COMPLETED';
export const ABANDON_CLAIM_SUPPORT_FAILED = 'ABANDON_CLAIM_SUPPORT_FAILED';
export const ABANDON_CLAIM_SUPPORT_PREVIEW = 'ABANDON_CLAIM_SUPPORT_PREVIEW';
export const PENDING_SUPPORTS_UPDATED = 'PENDING_SUPPORTS_UPDATED';
export const UPDATE_BALANCE = 'UPDATE_BALANCE'; export const UPDATE_BALANCE = 'UPDATE_BALANCE';
export const UPDATE_TOTAL_BALANCE = 'UPDATE_TOTAL_BALANCE'; export const UPDATE_TOTAL_BALANCE = 'UPDATE_TOTAL_BALANCE';
export const CHECK_ADDRESS_IS_MINE_STARTED = 'CHECK_ADDRESS_IS_MINE_STARTED'; export const CHECK_ADDRESS_IS_MINE_STARTED = 'CHECK_ADDRESS_IS_MINE_STARTED';
@ -68,6 +79,16 @@ export const SET_TRANSACTION_LIST_FILTER = 'SET_TRANSACTION_LIST_FILTER';
export const UPDATE_CURRENT_HEIGHT = 'UPDATE_CURRENT_HEIGHT'; export const UPDATE_CURRENT_HEIGHT = 'UPDATE_CURRENT_HEIGHT';
export const SET_DRAFT_TRANSACTION_AMOUNT = 'SET_DRAFT_TRANSACTION_AMOUNT'; export const SET_DRAFT_TRANSACTION_AMOUNT = 'SET_DRAFT_TRANSACTION_AMOUNT';
export const SET_DRAFT_TRANSACTION_ADDRESS = 'SET_DRAFT_TRANSACTION_ADDRESS'; export const SET_DRAFT_TRANSACTION_ADDRESS = 'SET_DRAFT_TRANSACTION_ADDRESS';
export const FETCH_UTXO_COUNT_STARTED = 'FETCH_UTXO_COUNT_STARTED';
export const FETCH_UTXO_COUNT_COMPLETED = 'FETCH_UTXO_COUNT_COMPLETED';
export const FETCH_UTXO_COUNT_FAILED = 'FETCH_UTXO_COUNT_FAILED';
export const TIP_CLAIM_MASS_STARTED = 'TIP_CLAIM_MASS_STARTED';
export const TIP_CLAIM_MASS_COMPLETED = 'TIP_CLAIM_MASS_COMPLETED';
export const TIP_CLAIM_MASS_FAILED = 'TIP_CLAIM_MASS_FAILED';
export const DO_UTXO_CONSOLIDATE_STARTED = 'DO_UTXO_CONSOLIDATE_STARTED';
export const DO_UTXO_CONSOLIDATE_COMPLETED = 'DO_UTXO_CONSOLIDATE_COMPLETED';
export const DO_UTXO_CONSOLIDATE_FAILED = 'DO_UTXO_CONSOLIDATE_FAILED';
export const PENDING_CONSOLIDATED_TXOS_UPDATED = 'PENDING_CONSOLIDATED_TXOS_UPDATED';
// Claims // Claims
export const RESOLVE_URIS_STARTED = 'RESOLVE_URIS_STARTED'; export const RESOLVE_URIS_STARTED = 'RESOLVE_URIS_STARTED';
@ -80,6 +101,10 @@ export const ABANDON_CLAIM_STARTED = 'ABANDON_CLAIM_STARTED';
export const ABANDON_CLAIM_SUCCEEDED = 'ABANDON_CLAIM_SUCCEEDED'; export const ABANDON_CLAIM_SUCCEEDED = 'ABANDON_CLAIM_SUCCEEDED';
export const FETCH_CHANNEL_LIST_STARTED = 'FETCH_CHANNEL_LIST_STARTED'; export const FETCH_CHANNEL_LIST_STARTED = 'FETCH_CHANNEL_LIST_STARTED';
export const FETCH_CHANNEL_LIST_COMPLETED = 'FETCH_CHANNEL_LIST_COMPLETED'; export const FETCH_CHANNEL_LIST_COMPLETED = 'FETCH_CHANNEL_LIST_COMPLETED';
export const FETCH_CHANNEL_LIST_FAILED = 'FETCH_CHANNEL_LIST_FAILED';
export const FETCH_COLLECTION_LIST_STARTED = 'FETCH_COLLECTION_LIST_STARTED';
export const FETCH_COLLECTION_LIST_COMPLETED = 'FETCH_COLLECTION_LIST_COMPLETED';
export const FETCH_COLLECTION_LIST_FAILED = 'FETCH_COLLECTION_LIST_FAILED';
export const CREATE_CHANNEL_STARTED = 'CREATE_CHANNEL_STARTED'; export const CREATE_CHANNEL_STARTED = 'CREATE_CHANNEL_STARTED';
export const CREATE_CHANNEL_COMPLETED = 'CREATE_CHANNEL_COMPLETED'; export const CREATE_CHANNEL_COMPLETED = 'CREATE_CHANNEL_COMPLETED';
export const CREATE_CHANNEL_FAILED = 'CREATE_CHANNEL_FAILED'; export const CREATE_CHANNEL_FAILED = 'CREATE_CHANNEL_FAILED';
@ -89,6 +114,7 @@ export const UPDATE_CHANNEL_FAILED = 'UPDATE_CHANNEL_FAILED';
export const IMPORT_CHANNEL_STARTED = 'IMPORT_CHANNEL_STARTED'; export const IMPORT_CHANNEL_STARTED = 'IMPORT_CHANNEL_STARTED';
export const IMPORT_CHANNEL_COMPLETED = 'IMPORT_CHANNEL_COMPLETED'; export const IMPORT_CHANNEL_COMPLETED = 'IMPORT_CHANNEL_COMPLETED';
export const IMPORT_CHANNEL_FAILED = 'IMPORT_CHANNEL_FAILED'; export const IMPORT_CHANNEL_FAILED = 'IMPORT_CHANNEL_FAILED';
export const CLEAR_CHANNEL_ERRORS = 'CLEAR_CHANNEL_ERRORS';
export const PUBLISH_STARTED = 'PUBLISH_STARTED'; export const PUBLISH_STARTED = 'PUBLISH_STARTED';
export const PUBLISH_COMPLETED = 'PUBLISH_COMPLETED'; export const PUBLISH_COMPLETED = 'PUBLISH_COMPLETED';
export const PUBLISH_FAILED = 'PUBLISH_FAILED'; export const PUBLISH_FAILED = 'PUBLISH_FAILED';
@ -103,6 +129,42 @@ export const CLAIM_SEARCH_FAILED = 'CLAIM_SEARCH_FAILED';
export const CLAIM_SEARCH_BY_TAGS_STARTED = 'CLAIM_SEARCH_BY_TAGS_STARTED'; export const CLAIM_SEARCH_BY_TAGS_STARTED = 'CLAIM_SEARCH_BY_TAGS_STARTED';
export const CLAIM_SEARCH_BY_TAGS_COMPLETED = 'CLAIM_SEARCH_BY_TAGS_COMPLETED'; export const CLAIM_SEARCH_BY_TAGS_COMPLETED = 'CLAIM_SEARCH_BY_TAGS_COMPLETED';
export const CLAIM_SEARCH_BY_TAGS_FAILED = 'CLAIM_SEARCH_BY_TAGS_FAILED'; export const CLAIM_SEARCH_BY_TAGS_FAILED = 'CLAIM_SEARCH_BY_TAGS_FAILED';
export const CLAIM_REPOST_STARTED = 'CLAIM_REPOST_STARTED';
export const CLAIM_REPOST_COMPLETED = 'CLAIM_REPOST_COMPLETED';
export const CLAIM_REPOST_FAILED = 'CLAIM_REPOST_FAILED';
export const CLEAR_REPOST_ERROR = 'CLEAR_REPOST_ERROR';
export const CHECK_PUBLISH_NAME_STARTED = 'CHECK_PUBLISH_NAME_STARTED';
export const CHECK_PUBLISH_NAME_COMPLETED = 'CHECK_PUBLISH_NAME_COMPLETED';
export const UPDATE_PENDING_CLAIMS = 'UPDATE_PENDING_CLAIMS';
export const UPDATE_CONFIRMED_CLAIMS = 'UPDATE_CONFIRMED_CLAIMS';
export const ADD_FILES_REFLECTING = 'ADD_FILES_REFLECTING';
export const UPDATE_FILES_REFLECTING = 'UPDATE_FILES_REFLECTING';
export const TOGGLE_CHECKING_REFLECTING = 'TOGGLE_CHECKING_REFLECTING';
export const TOGGLE_CHECKING_PENDING = 'TOGGLE_CHECKING_PENDING';
export const PURCHASE_LIST_STARTED = 'PURCHASE_LIST_STARTED';
export const PURCHASE_LIST_COMPLETED = 'PURCHASE_LIST_COMPLETED';
export const PURCHASE_LIST_FAILED = 'PURCHASE_LIST_FAILED';
export const COLLECTION_PUBLISH_STARTED = 'COLLECTION_PUBLISH_STARTED';
export const COLLECTION_PUBLISH_COMPLETED = 'COLLECTION_PUBLISH_COMPLETED';
export const COLLECTION_PUBLISH_FAILED = 'COLLECTION_PUBLISH_FAILED';
export const COLLECTION_PUBLISH_UPDATE_STARTED = 'COLLECTION_PUBLISH_UPDATE_STARTED';
export const COLLECTION_PUBLISH_UPDATE_COMPLETED = 'COLLECTION_PUBLISH_UPDATE_COMPLETED';
export const COLLECTION_PUBLISH_UPDATE_FAILED = 'COLLECTION_PUBLISH_UPDATE_FAILED';
export const COLLECTION_PUBLISH_ABANDON_STARTED = 'COLLECTION_PUBLISH_ABANDON_STARTED';
export const COLLECTION_PUBLISH_ABANDON_COMPLETED = 'COLLECTION_PUBLISH_ABANDON_COMPLETED';
export const COLLECTION_PUBLISH_ABANDON_FAILED = 'COLLECTION_PUBLISH_ABANDON_FAILED';
export const CLEAR_COLLECTION_ERRORS = 'CLEAR_COLLECTION_ERRORS';
export const COLLECTION_ITEMS_RESOLVE_STARTED = 'COLLECTION_ITEMS_RESOLVE_STARTED';
export const COLLECTION_ITEMS_RESOLVE_COMPLETED = 'COLLECTION_ITEMS_RESOLVE_COMPLETED';
export const COLLECTION_ITEMS_RESOLVE_FAILED = 'COLLECTION_ITEMS_RESOLVE_FAILED';
export const COLLECTION_NEW = 'COLLECTION_NEW';
export const COLLECTION_DELETE = 'COLLECTION_DELETE';
export const COLLECTION_PENDING = 'COLLECTION_PENDING';
export const COLLECTION_EDIT = 'COLLECTION_EDIT';
export const COLLECTION_COPY = 'COLLECTION_COPY';
export const COLLECTION_SAVE = 'COLLECTION_SAVE';
export const COLLECTION_ERROR = 'COLLECTION_ERROR';
// Comments // Comments
export const COMMENT_LIST_STARTED = 'COMMENT_LIST_STARTED'; export const COMMENT_LIST_STARTED = 'COMMENT_LIST_STARTED';
@ -111,6 +173,15 @@ export const COMMENT_LIST_FAILED = 'COMMENT_LIST_FAILED';
export const COMMENT_CREATE_STARTED = 'COMMENT_CREATE_STARTED'; export const COMMENT_CREATE_STARTED = 'COMMENT_CREATE_STARTED';
export const COMMENT_CREATE_COMPLETED = 'COMMENT_CREATE_COMPLETED'; export const COMMENT_CREATE_COMPLETED = 'COMMENT_CREATE_COMPLETED';
export const COMMENT_CREATE_FAILED = 'COMMENT_CREATE_FAILED'; export const COMMENT_CREATE_FAILED = 'COMMENT_CREATE_FAILED';
export const COMMENT_ABANDON_STARTED = 'COMMENT_ABANDON_STARTED';
export const COMMENT_ABANDON_COMPLETED = 'COMMENT_ABANDON_COMPLETED';
export const COMMENT_ABANDON_FAILED = 'COMMENT_ABANDON_FAILED';
export const COMMENT_UPDATE_STARTED = 'COMMENT_UPDATE_STARTED';
export const COMMENT_UPDATE_COMPLETED = 'COMMENT_UPDATE_COMPLETED';
export const COMMENT_UPDATE_FAILED = 'COMMENT_UPDATE_FAILED';
export const COMMENT_HIDE_STARTED = 'COMMENT_HIDE_STARTED';
export const COMMENT_HIDE_COMPLETED = 'COMMENT_HIDE_COMPLETED';
export const COMMENT_HIDE_FAILED = 'COMMENT_HIDE_FAILED';
// Files // Files
export const FILE_LIST_STARTED = 'FILE_LIST_STARTED'; export const FILE_LIST_STARTED = 'FILE_LIST_STARTED';
@ -133,17 +204,7 @@ export const SET_FILE_LIST_SORT = 'SET_FILE_LIST_SORT';
export const PURCHASE_URI_STARTED = 'PURCHASE_URI_STARTED'; export const PURCHASE_URI_STARTED = 'PURCHASE_URI_STARTED';
export const PURCHASE_URI_COMPLETED = 'PURCHASE_URI_COMPLETED'; export const PURCHASE_URI_COMPLETED = 'PURCHASE_URI_COMPLETED';
export const PURCHASE_URI_FAILED = 'PURCHASE_URI_FAILED'; export const PURCHASE_URI_FAILED = 'PURCHASE_URI_FAILED';
export const DELETE_PURCHASED_URI = 'DELETE_PURCHASED_URI'; export const CLEAR_PURCHASED_URI_SUCCESS = 'CLEAR_PURCHASED_URI_SUCCESS';
// Search
export const SEARCH_START = 'SEARCH_START';
export const SEARCH_SUCCESS = 'SEARCH_SUCCESS';
export const SEARCH_FAIL = 'SEARCH_FAIL';
export const UPDATE_SEARCH_QUERY = 'UPDATE_SEARCH_QUERY';
export const UPDATE_SEARCH_OPTIONS = 'UPDATE_SEARCH_OPTIONS';
export const UPDATE_SEARCH_SUGGESTIONS = 'UPDATE_SEARCH_SUGGESTIONS';
export const SEARCH_FOCUS = 'SEARCH_FOCUS';
export const SEARCH_BLUR = 'SEARCH_BLUR';
// Settings // Settings
export const DAEMON_SETTINGS_RECEIVED = 'DAEMON_SETTINGS_RECEIVED'; export const DAEMON_SETTINGS_RECEIVED = 'DAEMON_SETTINGS_RECEIVED';
@ -239,13 +300,6 @@ export const FETCH_COST_INFO_STARTED = 'FETCH_COST_INFO_STARTED';
export const FETCH_COST_INFO_COMPLETED = 'FETCH_COST_INFO_COMPLETED'; export const FETCH_COST_INFO_COMPLETED = 'FETCH_COST_INFO_COMPLETED';
export const FETCH_COST_INFO_FAILED = 'FETCH_COST_INFO_FAILED'; export const FETCH_COST_INFO_FAILED = 'FETCH_COST_INFO_FAILED';
// Tags
export const TOGGLE_TAG_FOLLOW = 'TOGGLE_TAG_FOLLOW';
export const TAG_ADD = 'TAG_ADD';
export const TAG_DELETE = 'TAG_DELETE';
// Blocked Channels
export const TOGGLE_BLOCK_CHANNEL = 'TOGGLE_BLOCK_CHANNEL';
// Sync // Sync
export const USER_STATE_POPULATE = 'USER_STATE_POPULATE'; export const USER_STATE_POPULATE = 'USER_STATE_POPULATE';
export const SYNC_FATAL_ERROR = 'SYNC_FATAL_ERROR';

View file

@ -3,3 +3,9 @@ export const MINIMUM_PUBLISH_BID = 0.00000001;
export const CHANNEL_ANONYMOUS = 'anonymous'; export const CHANNEL_ANONYMOUS = 'anonymous';
export const CHANNEL_NEW = 'new'; export const CHANNEL_NEW = 'new';
export const PAGE_SIZE = 20; export const PAGE_SIZE = 20;
export const LEVEL_1_STAKED_AMOUNT = 0;
export const LEVEL_2_STAKED_AMOUNT = 1;
export const LEVEL_3_STAKED_AMOUNT = 50;
export const LEVEL_4_STAKED_AMOUNT = 250;
export const LEVEL_5_STAKED_AMOUNT = 1000;

View file

@ -0,0 +1,15 @@
export const COLLECTION_ID = 'lid';
export const COLLECTION_INDEX = 'linx';
export const COL_TYPE_PLAYLIST = 'playlist';
export const COL_TYPE_CHANNELS = 'channelList';
export const WATCH_LATER_ID = 'watchlater';
export const FAVORITES_ID = 'favorites';
export const FAVORITE_CHANNELS_ID = 'favoriteChannels';
export const BUILTIN_LISTS = [WATCH_LATER_ID, FAVORITES_ID, FAVORITE_CHANNELS_ID];
export const COL_KEY_EDITED = 'edited';
export const COL_KEY_UNPUBLISHED = 'unpublished';
export const COL_KEY_PENDING = 'pending';
export const COL_KEY_SAVED = 'saved';

View file

@ -1,19 +0,0 @@
export const SEARCH_TYPES = {
FILE: 'file',
CHANNEL: 'channel',
SEARCH: 'search',
TAG: 'tag',
};
export const SEARCH_OPTIONS = {
RESULT_COUNT: 'size',
CLAIM_TYPE: 'claimType',
INCLUDE_FILES: 'file',
INCLUDE_CHANNELS: 'channel',
INCLUDE_FILES_AND_CHANNELS: 'file,channel',
MEDIA_AUDIO: 'audio',
MEDIA_VIDEO: 'video',
MEDIA_TEXT: 'text',
MEDIA_IMAGE: 'image',
MEDIA_APPLICATION: 'application',
};

View file

@ -1,17 +1,46 @@
/* hardcoded names still exist for these in reducers/settings.js - only discovered when debugging */ /* hardcoded names still exist for these in reducers/settings.js - only discovered when debugging */
/* Many SETTINGS are stored in the localStorage by their name - /* Many SETTINGS are stored in the localStorage by their name -
be careful about changing the value of a SETTINGS constant, as doing so can invalidate existing SETTINGS */ be careful about changing the value of a SETTINGS constant, as doing so can invalidate existing SETTINGS */
export const SHOW_NSFW = 'showNsfw';
export const CREDIT_REQUIRED_ACKNOWLEDGED = 'credit_required_acknowledged'; export const CREDIT_REQUIRED_ACKNOWLEDGED = 'credit_required_acknowledged';
export const NEW_USER_ACKNOWLEDGED = 'welcome_acknowledged'; export const NEW_USER_ACKNOWLEDGED = 'welcome_acknowledged';
export const EMAIL_COLLECTION_ACKNOWLEDGED = 'email_collection_acknowledged'; export const EMAIL_COLLECTION_ACKNOWLEDGED = 'email_collection_acknowledged';
export const FIRST_RUN_STARTED = 'first_run_started';
export const INVITE_ACKNOWLEDGED = 'invite_acknowledged';
export const FOLLOWING_ACKNOWLEDGED = 'following_acknowledged';
export const TAGS_ACKNOWLEDGED = 'tags_acknowledged';
export const REWARDS_ACKNOWLEDGED = 'rewards_acknowledged';
export const LANGUAGE = 'language'; export const LANGUAGE = 'language';
export const SHOW_NSFW = 'showNsfw'; export const SEARCH_IN_LANGUAGE = 'search_in_language';
export const SHOW_UNAVAILABLE = 'showUnavailable'; export const SHOW_MATURE = 'show_mature';
export const INSTANT_PURCHASE_ENABLED = 'instantPurchaseEnabled'; export const HOMEPAGE = 'homepage';
export const INSTANT_PURCHASE_MAX = 'instantPurchaseMax'; export const HIDE_REPOSTS = 'hide_reposts';
export const SHOW_ANONYMOUS = 'show_anonymous';
export const SHOW_UNAVAILABLE = 'show_unavailable';
export const INSTANT_PURCHASE_ENABLED = 'instant_purchase_enabled';
export const INSTANT_PURCHASE_MAX = 'instant_purchase_max';
export const THEME = 'theme'; export const THEME = 'theme';
export const THEMES = 'themes'; export const THEMES = 'themes';
export const AUTOMATIC_DARK_MODE_ENABLED = 'automaticDarkModeEnabled'; export const AUTOMATIC_DARK_MODE_ENABLED = 'automatic_dark_mode_enabled';
export const AUTOPLAY_MEDIA = 'autoplay';
export const AUTOPLAY_NEXT = 'autoplay_next';
export const OS_NOTIFICATIONS_ENABLED = 'os_notifications_enabled';
export const AUTO_DOWNLOAD = 'auto_download';
export const AUTO_LAUNCH = 'auto_launch';
export const TO_TRAY_WHEN_CLOSED = 'to_tray_when_closed';
export const SUPPORT_OPTION = 'support_option';
export const HIDE_BALANCE = 'hide_balance';
export const HIDE_SPLASH_ANIMATION = 'hide_splash_animation';
export const FLOATING_PLAYER = 'floating_player';
export const DARK_MODE_TIMES = 'dark_mode_times';
export const ENABLE_SYNC = 'enable_sync';
export const ENABLE_PUBLISH_PREVIEW = 'enable-publish-preview';
export const TILE_LAYOUT = 'tile_layout';
export const VIDEO_THEATER_MODE = 'video_theater_mode';
export const VIDEO_PLAYBACK_RATE = 'video_playback_rate';
export const CUSTOM_COMMENTS_SERVER_ENABLED = 'custom_comments_server_enabled';
export const CUSTOM_COMMENTS_SERVER_URL = 'custom_comments_server_url';
// mobile settings // mobile settings
export const BACKGROUND_PLAY_ENABLED = 'backgroundPlayEnabled'; export const BACKGROUND_PLAY_ENABLED = 'backgroundPlayEnabled';

View file

@ -1,12 +1,32 @@
/* /*
* How to use this file: * How to use this file:
* Settings exported from here will trigger the setting to be * Settings exported from here will trigger the setting to be
* sent to the preference middleware when set using the * sent to the preference middleware when set using the
* usual setDaemonSettings and clearDaemonSettings methods. * usual setDaemonSettings and clearDaemonSettings methods.
* *
* See redux/settings/actions in the app for where this is used. * See redux/settings/actions in the app for where this is used.
*/ */
import * as DAEMON_SETTINGS from './daemon_settings'; import * as DAEMON_SETTINGS from './daemon_settings';
import * as SETTINGS from './settings';
export const WALLET_SERVERS = DAEMON_SETTINGS.LBRYUM_SERVERS; // DAEMON
export const SDK_SYNC_KEYS = [DAEMON_SETTINGS.LBRYUM_SERVERS, DAEMON_SETTINGS.SHARE_USAGE_DATA];
// CLIENT
export const CLIENT_SYNC_KEYS = [
SETTINGS.SHOW_MATURE,
SETTINGS.HIDE_REPOSTS,
SETTINGS.SHOW_ANONYMOUS,
SETTINGS.INSTANT_PURCHASE_ENABLED,
SETTINGS.INSTANT_PURCHASE_MAX,
SETTINGS.THEME,
SETTINGS.AUTOPLAY_MEDIA,
SETTINGS.AUTOPLAY_NEXT,
SETTINGS.HIDE_BALANCE,
SETTINGS.HIDE_SPLASH_ANIMATION,
SETTINGS.FLOATING_PLAYER,
SETTINGS.DARK_MODE_TIMES,
SETTINGS.AUTOMATIC_DARK_MODE_ENABLED,
SETTINGS.LANGUAGE,
];

View file

@ -13,16 +13,39 @@ export const DEFAULT_FOLLOWED_TAGS = [
'technology', 'technology',
]; ];
export const MATURE_TAGS = ['porn', 'nsfw', 'mature', 'xxx']; export const MATURE_TAGS = [
'porn',
'porno',
'nsfw',
'mature',
'xxx',
'sex',
'creampie',
'blowjob',
'handjob',
'vagina',
'boobs',
'big boobs',
'big dick',
'pussy',
'cumshot',
'anal',
'hard fucking',
'ass',
'fuck',
'hentai',
];
export const DEFAULT_KNOWN_TAGS = [ const DEFAULT_ENGLISH_KNOWN_TAGS = [
'free speech',
'censorship',
'gaming', 'gaming',
'pop culture', 'pop culture',
'Entertainment', 'entertainment',
'technology', 'technology',
'music', 'music',
'funny', 'funny',
'Education', 'education',
'learning', 'learning',
'news', 'news',
'gameplay', 'gameplay',
@ -30,16 +53,14 @@ export const DEFAULT_KNOWN_TAGS = [
'beliefs', 'beliefs',
'comedy', 'comedy',
'games', 'games',
'sony interactive entertainment',
'film & animation', 'film & animation',
'game', 'game',
'weapons', 'weapons',
"let's play",
'blockchain', 'blockchain',
'video game', 'video game',
'sports', 'sports',
'walkthrough', 'walkthrough',
'ps4live', 'lbrytvpaidbeta',
'art', 'art',
'pc', 'pc',
'minecraft', 'minecraft',
@ -47,7 +68,6 @@ export const DEFAULT_KNOWN_TAGS = [
'economics', 'economics',
'automotive', 'automotive',
'play', 'play',
'ps4share',
'tutorial', 'tutorial',
'twitch', 'twitch',
'how to', 'how to',
@ -58,17 +78,16 @@ export const DEFAULT_KNOWN_TAGS = [
'lets play', 'lets play',
'fun', 'fun',
'politics', 'politics',
'xbox', 'travel',
'autos & vehicles',
'Travel & Events',
'food', 'food',
'science', 'science',
'xbox one', 'xbox',
'liberal', 'liberal',
'democrat', 'democrat',
'progressive', 'progressive',
'survival', 'survival',
'Nonprofits & Activism', 'non-profits',
'activism',
'cryptocurrency', 'cryptocurrency',
'playstation', 'playstation',
'nintendo', 'nintendo',
@ -123,7 +142,6 @@ export const DEFAULT_KNOWN_TAGS = [
'lol', 'lol',
'sony', 'sony',
'god', 'god',
"let's",
'dance', 'dance',
'pvp', 'pvp',
'tech', 'tech',
@ -131,12 +149,10 @@ export const DEFAULT_KNOWN_TAGS = [
'zombies', 'zombies',
'fail', 'fail',
'film', 'film',
'xbox 360', 'xbox360',
'animation', 'animation',
'unboxing', 'unboxing',
'money', 'money',
'how',
'travel',
'wwe', 'wwe',
'mods', 'mods',
'indie', 'indie',
@ -144,7 +160,6 @@ export const DEFAULT_KNOWN_TAGS = [
'ios', 'ios',
'history', 'history',
'rap', 'rap',
'sony computer entertainment',
'mobile', 'mobile',
'trump', 'trump',
'hack', 'hack',
@ -168,7 +183,7 @@ export const DEFAULT_KNOWN_TAGS = [
'mining', 'mining',
'daily', 'daily',
'diy', 'diy',
'pets & animals', 'pets',
'videogame', 'videogame',
'death', 'death',
'funny moments', 'funny moments',
@ -196,14 +211,12 @@ export const DEFAULT_KNOWN_TAGS = [
'house', 'house',
'fire', 'fire',
'bass', 'bass',
'bitcoin news',
'truth', 'truth',
'crash', 'crash',
'mario', 'mario',
'league of legends', 'league of legends',
'wii', 'wii',
'mmorpg', 'mmorpg',
'grand theft auto v',
'health', 'health',
'marvel', 'marvel',
'racing', 'racing',
@ -237,7 +250,6 @@ export const DEFAULT_KNOWN_TAGS = [
'dota 2', 'dota 2',
'studio', 'studio',
'star wars', 'star wars',
'gta 5',
'shooting', 'shooting',
'nasa', 'nasa',
'rock', 'rock',
@ -270,7 +282,6 @@ export const DEFAULT_KNOWN_TAGS = [
'world of warcraft', 'world of warcraft',
'industry', 'industry',
'cartoon', 'cartoon',
'crypto news',
'garden', 'garden',
'animals', 'animals',
'windows', 'windows',
@ -284,7 +295,6 @@ export const DEFAULT_KNOWN_TAGS = [
'parody', 'parody',
'rv', 'rv',
'beats', 'beats',
'fortnite battle royale',
'building', 'building',
'disney', 'disney',
'drone', 'drone',
@ -317,7 +327,6 @@ export const DEFAULT_KNOWN_TAGS = [
'canon', 'canon',
'microsoft', 'microsoft',
'camping', 'camping',
'cryptocurrency news',
'ufo', 'ufo',
'progressive talk', 'progressive talk',
'switch', 'switch',
@ -353,7 +362,6 @@ export const DEFAULT_KNOWN_TAGS = [
'manga', 'manga',
'howto', 'howto',
'insane', 'insane',
'xbox360',
'press', 'press',
'special', 'special',
'church', 'church',
@ -370,7 +378,7 @@ export const DEFAULT_KNOWN_TAGS = [
'sound', 'sound',
'christ', 'christ',
'duty', 'duty',
'Juvenile fiction', 'juvenile fiction',
'pc game', 'pc game',
'how-to', 'how-to',
'ww2', 'ww2',
@ -409,7 +417,6 @@ export const DEFAULT_KNOWN_TAGS = [
'style', 'style',
'travel trailer', 'travel trailer',
'rda', 'rda',
'5859dfec-026f-46ba-bea0-02bf43aa1a6f',
'gun', 'gun',
'secret', 'secret',
'far cry 5', 'far cry 5',
@ -450,8 +457,6 @@ export const DEFAULT_KNOWN_TAGS = [
'capcom', 'capcom',
'rta', 'rta',
'discord', 'discord',
'action role-playing game',
'playthrough part',
'batman', 'batman',
'player', 'player',
'server', 'server',
@ -492,7 +497,7 @@ export const DEFAULT_KNOWN_TAGS = [
'paladins', 'paladins',
'warrior', 'warrior',
'creepypasta', 'creepypasta',
'role-playing video game', 'role-playing',
'solar', 'solar',
'vr', 'vr',
'animal', 'animal',
@ -501,7 +506,7 @@ export const DEFAULT_KNOWN_TAGS = [
'dota', 'dota',
'audio', 'audio',
'mass effect', 'mass effect',
'Humour', 'humour',
'first look', 'first look',
'videogames', 'videogames',
'future bass', 'future bass',
@ -510,4 +515,33 @@ export const DEFAULT_KNOWN_TAGS = [
'portugal', 'portugal',
'dantdm', 'dantdm',
'teaser', 'teaser',
'lbry',
'coronavirus',
'2020protests',
'covidcuts',
'covid-19',
'LBRYFoundationBoardCandidacy',
'helplbrysavecrypto'
]; ];
const DEFAULT_SPANISH_KNOWN_TAGS = [
'español',
'tecnología',
'criptomonedas',
'economía',
'bitcoin',
'educación',
'videojuegos',
'música',
'noticias',
'ciencia',
'deportes',
'latinoamérica',
'latam',
'conspiración',
'humor',
'política',
'tutoriales',
];
export const DEFAULT_KNOWN_TAGS = [...DEFAULT_ENGLISH_KNOWN_TAGS, ...DEFAULT_SPANISH_KNOWN_TAGS];

36
src/constants/txo_list.js Normal file
View file

@ -0,0 +1,36 @@
export const ACTIVE = 'active'; // spent, active, all
export const TYPE = 'type'; // all, payment, support, channel, stream, repost
export const SUB_TYPE = 'subtype'; // other, purchase, tip
export const PAGE_SIZE = 'page_size';
export const PAGE = 'page';
export const ALL = 'all';
// dropdown types
export const SENT = 'sent';
export const RECEIVED = 'received';
export const SUPPORT = 'support';
export const CHANNEL = 'channel';
export const PUBLISH = 'publish';
export const REPOST = 'repost';
export const DROPDOWN_TYPES = [ALL, SENT, RECEIVED, SUPPORT, CHANNEL, PUBLISH, REPOST];
// dropdown subtypes
export const TIP = 'tip';
export const PURCHASE = 'purchase';
export const PAYMENT = 'payment';
export const DROPDOWN_SUBTYPES = [ALL, TIP, PURCHASE, PAYMENT];
// rpc params
export const TX_TYPE = 'type'; // = other, stream, repost, channel, support, purchase
export const IS_SPENT = 'is_spent';
export const IS_NOT_SPENT = 'is_not_spent';
export const IS_MY_INPUT = 'is_my_input';
export const IS_MY_OUTPUT = 'is_my_output';
export const IS_NOT_MY_INPUT = 'is_not_my_input';
export const IS_NOT_MY_OUTPUT = 'is_not_my_output'; // use to further distinguish payments to self / from self.
export const IS_MY_INPUT_OR_OUTPUT = 'is_my_input_or_output';
export const EXCLUDE_INTERNAL_TRANSFERS = 'exclude_internal_transfers';
// sdk unique types
export const OTHER = 'other';
export const STREAM = 'stream';
export const PAGE_SIZE_DEFAULT = 20;

View file

@ -7,13 +7,15 @@ import * as SORT_OPTIONS from 'constants/sort_options';
import * as THUMBNAIL_STATUSES from 'constants/thumbnail_upload_statuses'; import * as THUMBNAIL_STATUSES from 'constants/thumbnail_upload_statuses';
import * as TRANSACTIONS from 'constants/transaction_types'; import * as TRANSACTIONS from 'constants/transaction_types';
import * as TX_LIST from 'constants/transaction_list'; import * as TX_LIST from 'constants/transaction_list';
import * as ABANDON_STATES from 'constants/abandon_states';
import * as TXO_LIST from 'constants/txo_list';
import * as SPEECH_URLS from 'constants/speech_urls'; import * as SPEECH_URLS from 'constants/speech_urls';
import * as DAEMON_SETTINGS from 'constants/daemon_settings'; import * as DAEMON_SETTINGS from 'constants/daemon_settings';
import * as SHARED_PREFERENCES from 'constants/shared_preferences'; import * as SHARED_PREFERENCES from 'constants/shared_preferences';
import { SEARCH_TYPES, SEARCH_OPTIONS } from 'constants/search'; import * as COLLECTIONS_CONSTS from 'constants/collections';
import { DEFAULT_KNOWN_TAGS, DEFAULT_FOLLOWED_TAGS, MATURE_TAGS } from 'constants/tags'; import { DEFAULT_KNOWN_TAGS, DEFAULT_FOLLOWED_TAGS, MATURE_TAGS } from 'constants/tags';
import Lbry, { apiCall } from 'lbry'; import Lbry, { apiCall } from 'lbry';
import { selectState as selectSearchState } from 'redux/selectors/search'; import LbryFirst from 'lbry-first';
// constants // constants
export { export {
@ -21,12 +23,12 @@ export {
CLAIM_VALUES, CLAIM_VALUES,
LICENSES, LICENSES,
THUMBNAIL_STATUSES, THUMBNAIL_STATUSES,
SEARCH_TYPES,
SEARCH_OPTIONS,
SETTINGS, SETTINGS,
DAEMON_SETTINGS, DAEMON_SETTINGS,
TRANSACTIONS, TRANSACTIONS,
TX_LIST, TX_LIST,
TXO_LIST,
ABANDON_STATES,
SORT_OPTIONS, SORT_OPTIONS,
PAGES, PAGES,
DEFAULT_KNOWN_TAGS, DEFAULT_KNOWN_TAGS,
@ -34,10 +36,12 @@ export {
MATURE_TAGS, MATURE_TAGS,
SPEECH_URLS, SPEECH_URLS,
SHARED_PREFERENCES, SHARED_PREFERENCES,
COLLECTIONS_CONSTS,
}; };
// common // common
export { Lbry, apiCall }; export { Lbry, apiCall };
export { LbryFirst };
export { export {
regexInvalidURI, regexInvalidURI,
regexAddress, regexAddress,
@ -48,6 +52,8 @@ export {
isURIClaimable, isURIClaimable,
isNameValid, isNameValid,
convertToShareLink, convertToShareLink,
splitBySeparator,
isURIEqual,
} from 'lbryURI'; } from 'lbryURI';
// middlware // middlware
@ -55,26 +61,43 @@ export { buildSharedStateMiddleware } from 'redux/middleware/shared-state';
// actions // actions
export { doToast, doDismissToast, doError, doDismissError } from 'redux/actions/notifications'; export { doToast, doDismissToast, doError, doDismissError } from 'redux/actions/notifications';
export {
doLocalCollectionCreate,
doFetchItemsInCollection,
doFetchItemsInCollections,
doCollectionEdit,
doCollectionDelete,
} from 'redux/actions/collections';
export { export {
doFetchClaimsByChannel, doFetchClaimsByChannel,
doFetchClaimListMine, doFetchClaimListMine,
doAbandonClaim, doAbandonClaim,
doAbandonTxo,
doResolveUris, doResolveUris,
doResolveUri, doResolveUri,
doFetchChannelListMine, doFetchChannelListMine,
doFetchCollectionListMine,
doCreateChannel, doCreateChannel,
doUpdateChannel, doUpdateChannel,
doClaimSearch, doClaimSearch,
doImportChannel, doImportChannel,
doRepost,
doClearRepostError,
doClearChannelErrors,
doCheckPublishNameAvailability,
doPurchaseList,
doCheckPendingClaims,
doCollectionPublish,
doCollectionPublishUpdate,
} from 'redux/actions/claims'; } from 'redux/actions/claims';
export { doDeletePurchasedUri, doPurchaseUri, doFileGet } from 'redux/actions/file'; export { doClearPurchasedUriSuccess, doPurchaseUri, doFileGet } from 'redux/actions/file';
export { export {
doFetchFileInfo, doFetchFileInfo,
doFileList, doFileList,
doFetchFileInfosAndPublishedClaims, doFetchFileInfos,
doSetFileListSort, doSetFileListSort,
} from 'redux/actions/file_info'; } from 'redux/actions/file_info';
@ -85,24 +108,17 @@ export {
doUploadThumbnail, doUploadThumbnail,
doPrepareEdit, doPrepareEdit,
doPublish, doPublish,
doCheckPendingPublishes, doCheckReflectingFiles,
} from 'redux/actions/publish'; } from 'redux/actions/publish';
export {
doSearch,
doUpdateSearchQuery,
doFocusSearchInput,
doBlurSearchInput,
setSearchApi,
doUpdateSearchOptions,
} from 'redux/actions/search';
export { savePosition } from 'redux/actions/content'; export { savePosition } from 'redux/actions/content';
export { export {
doUpdateBalance, doUpdateBalance,
doBalanceSubscribe, doBalanceSubscribe,
doFetchTransactions, doFetchTransactions,
doFetchTxoPage,
doUpdateTxoPageParams,
doGetNewAddress, doGetNewAddress,
doCheckAddressIsMine, doCheckAddressIsMine,
doSendDraftTransaction, doSendDraftTransaction,
@ -117,14 +133,12 @@ export {
doSetTransactionListFilter, doSetTransactionListFilter,
doUpdateBlockHeight, doUpdateBlockHeight,
doClearSupport, doClearSupport,
doSupportAbandonForClaim,
doFetchUtxoCounts,
doUtxoConsolidate,
doTipClaimMass,
} from 'redux/actions/wallet'; } from 'redux/actions/wallet';
export { doToggleTagFollow, doAddTag, doDeleteTag } from 'redux/actions/tags';
export { doCommentList, doCommentCreate } from 'redux/actions/comments';
export { doToggleBlockChannel } from 'redux/actions/blocked';
export { doPopulateSharedUserState, doPreferenceGet, doPreferenceSet } from 'redux/actions/sync'; export { doPopulateSharedUserState, doPreferenceGet, doPreferenceSet } from 'redux/actions/sync';
// utils // utils
@ -135,29 +149,44 @@ export { isClaimNsfw, createNormalizedClaimSearchKey } from 'util/claim';
// reducers // reducers
export { claimsReducer } from 'redux/reducers/claims'; export { claimsReducer } from 'redux/reducers/claims';
export { commentReducer } from 'redux/reducers/comments';
export { contentReducer } from 'redux/reducers/content'; export { contentReducer } from 'redux/reducers/content';
export { fileInfoReducer } from 'redux/reducers/file_info'; export { fileInfoReducer } from 'redux/reducers/file_info';
export { fileReducer } from 'redux/reducers/file';
export { notificationsReducer } from 'redux/reducers/notifications'; export { notificationsReducer } from 'redux/reducers/notifications';
export { publishReducer } from 'redux/reducers/publish'; export { publishReducer } from 'redux/reducers/publish';
export { searchReducer } from 'redux/reducers/search';
export { tagsReducer } from 'redux/reducers/tags';
export { blockedReducer } from 'redux/reducers/blocked';
export { walletReducer } from 'redux/reducers/wallet'; export { walletReducer } from 'redux/reducers/wallet';
export { collectionsReducer } from 'redux/reducers/collections';
// selectors // selectors
export { makeSelectContentPositionForUri } from 'redux/selectors/content'; export { makeSelectContentPositionForUri } from 'redux/selectors/content';
export { selectToast, selectError } from 'redux/selectors/notifications'; export { selectToast, selectError } from 'redux/selectors/notifications';
export { export {
selectFailedPurchaseUris, selectSavedCollectionIds,
selectPurchasedUris, selectBuiltinCollections,
selectPurchaseUriErrorMessage, selectResolvedCollections,
selectLastPurchasedUri, selectMyUnpublishedCollections,
makeSelectStreamingUrlForUri, selectMyEditedCollections,
} from 'redux/selectors/file'; selectMyPublishedCollections,
selectMyPublishedMixedCollections,
selectMyPublishedPlaylistCollections,
makeSelectEditedCollectionForId,
makeSelectPendingCollectionForId,
makeSelectPublishedCollectionForId,
makeSelectCollectionIsMine,
makeSelectMyPublishedCollectionForId,
makeSelectUnpublishedCollectionForId,
makeSelectCollectionForId,
makeSelectClaimUrlInCollection,
makeSelectUrlsForCollectionId,
makeSelectClaimIdsForCollectionId,
makeSelectNameForCollectionId,
makeSelectCountForCollectionId,
makeSelectIsResolvingCollectionForId,
makeSelectIndexForUrlInCollection,
makeSelectPreviousUrlForCollectionAndUrl,
makeSelectNextUrlForCollectionAndUrl,
makeSelectCollectionForIdHasClaimUrl,
} from 'redux/selectors/collections';
export { export {
makeSelectClaimForUri, makeSelectClaimForUri,
@ -173,26 +202,37 @@ export {
makeSelectTitleForUri, makeSelectTitleForUri,
makeSelectDateForUri, makeSelectDateForUri,
makeSelectAmountForUri, makeSelectAmountForUri,
makeSelectEffectiveAmountForUri,
makeSelectTagsForUri, makeSelectTagsForUri,
makeSelectTagInClaimOrChannelForUri,
makeSelectTotalStakedAmountForChannelUri,
makeSelectStakedLevelForChannelUri,
makeSelectContentTypeForUri, makeSelectContentTypeForUri,
makeSelectIsUriResolving, makeSelectIsUriResolving,
makeSelectPendingClaimForUri,
makeSelectTotalItemsForChannel, makeSelectTotalItemsForChannel,
makeSelectTotalPagesForChannel, makeSelectTotalPagesForChannel,
makeSelectNsfwCountFromUris, makeSelectNsfwCountFromUris,
makeSelectNsfwCountForChannel,
makeSelectOmittedCountForChannel, makeSelectOmittedCountForChannel,
makeSelectClaimIsNsfw, makeSelectClaimIsNsfw,
makeSelectRecommendedContentForUri,
makeSelectFirstRecommendedFileForUri,
makeSelectChannelForClaimUri, makeSelectChannelForClaimUri,
makeSelectChannelPermUrlForClaimUri,
makeSelectMyChannelPermUrlForName,
makeSelectClaimIsPending, makeSelectClaimIsPending,
makeSelectPendingByUri, makeSelectReflectingClaimForUri,
makeSelectClaimsInChannelForCurrentPageState,
makeSelectShortUrlForUri, makeSelectShortUrlForUri,
makeSelectCanonicalUrlForUri, makeSelectCanonicalUrlForUri,
makeSelectPermanentUrlForUri, makeSelectPermanentUrlForUri,
makeSelectSupportsForUri, makeSelectSupportsForUri,
selectPendingById, makeSelectMyPurchasesForPage,
makeSelectClaimWasPurchased,
makeSelectAbandoningClaimById,
makeSelectIsAbandoningClaimForUri,
makeSelectClaimHasSource,
makeSelectClaimIsStreamPlaceholder,
selectPendingIds,
selectReflectingById,
makeSelectClaimForClaimId,
selectClaimsById, selectClaimsById,
selectClaimsByUri, selectClaimsByUri,
selectAllClaimsByChannel, selectAllClaimsByChannel,
@ -201,13 +241,16 @@ export {
selectMyActiveClaims, selectMyActiveClaims,
selectAllFetchingChannelClaims, selectAllFetchingChannelClaims,
selectIsFetchingClaimListMine, selectIsFetchingClaimListMine,
selectPendingClaims,
selectMyClaims, selectMyClaims,
selectPendingClaims,
selectMyClaimsWithoutChannels, selectMyClaimsWithoutChannels,
selectMyChannelUrls,
selectMyClaimUrisWithoutChannels, selectMyClaimUrisWithoutChannels,
selectAllMyClaimsByOutpoint, selectAllMyClaimsByOutpoint,
selectMyClaimsOutpoints, selectMyClaimsOutpoints,
selectFetchingMyChannels, selectFetchingMyChannels,
selectFetchingMyCollections,
selectMyCollectionIds,
selectMyChannelClaims, selectMyChannelClaims,
selectResolvingUris, selectResolvingUris,
selectPlayingUri, selectPlayingUri,
@ -224,10 +267,26 @@ export {
selectChannelImportPending, selectChannelImportPending,
makeSelectMyStreamUrlsForPage, makeSelectMyStreamUrlsForPage,
selectMyStreamUrlsCount, selectMyStreamUrlsCount,
selectRepostError,
selectRepostLoading,
selectClaimIdsByUri,
selectMyClaimsPage,
selectMyClaimsPageNumber,
selectMyClaimsPageItemCount,
selectFetchingMyClaimsPageError,
selectMyPurchases,
selectIsFetchingMyPurchases,
selectFetchingMyPurchasesError,
selectMyPurchasesCount,
selectPurchaseUriSuccess,
makeSelectClaimIdForUri,
selectUpdatingCollection,
selectUpdateCollectionError,
selectCreatingCollection,
selectCreateCollectionError,
makeSelectClaimIdIsPending,
} from 'redux/selectors/claims'; } from 'redux/selectors/claims';
export { makeSelectCommentsForUri } from 'redux/selectors/comments';
export { export {
makeSelectFileInfoForUri, makeSelectFileInfoForUri,
makeSelectDownloadingForUri, makeSelectDownloadingForUri,
@ -251,6 +310,7 @@ export {
makeSelectSearchDownloadUrlsForPage, makeSelectSearchDownloadUrlsForPage,
makeSelectSearchDownloadUrlsCount, makeSelectSearchDownloadUrlsCount,
selectDownloadUrlsCount, selectDownloadUrlsCount,
makeSelectStreamingUrlForUri,
} from 'redux/selectors/file_info'; } from 'redux/selectors/file_info';
export { export {
@ -262,18 +322,6 @@ export {
selectTakeOverAmount, selectTakeOverAmount,
} from 'redux/selectors/publish'; } from 'redux/selectors/publish';
export { selectSearchState };
export {
makeSelectSearchUris,
selectSearchValue,
selectSearchOptions,
selectIsSearching,
selectSearchUrisByQuery,
selectSearchBarFocused,
selectSearchSuggestions,
makeSelectQueryWithOptions,
} from 'redux/selectors/search';
export { export {
selectBalance, selectBalance,
selectTotalBalance, selectTotalBalance,
@ -285,6 +333,7 @@ export {
selectSupportsByOutpoint, selectSupportsByOutpoint,
selectTotalSupports, selectTotalSupports,
selectTransactionItems, selectTransactionItems,
selectTransactionsFile,
selectRecentTransactions, selectRecentTransactions,
selectHasTransactions, selectHasTransactions,
selectIsFetchingTransactions, selectIsFetchingTransactions,
@ -309,20 +358,24 @@ export {
selectWalletUnlockResult, selectWalletUnlockResult,
selectTransactionListFilter, selectTransactionListFilter,
selectFilteredTransactions, selectFilteredTransactions,
selectTxoPageParams,
selectTxoPage,
selectTxoPageNumber,
selectTxoItemCount,
selectIsFetchingTxos,
selectFetchingTxosError,
makeSelectLatestTransactions, makeSelectLatestTransactions,
makeSelectFilteredTransactionsForPage, makeSelectFilteredTransactionsForPage,
selectFilteredTransactionCount, selectFilteredTransactionCount,
selectIsWalletReconnecting, selectIsWalletReconnecting,
selectPendingSupportTransactions,
selectAbandonClaimSupportError,
makeSelectPendingAmountByUri,
selectIsFetchingUtxoCounts,
selectIsConsolidatingUtxos,
selectIsMassClaimingTips,
selectUtxoCounts,
selectPendingOtherTransactions,
selectPendingConsolidateTxid,
selectPendingMassClaimTxid,
} from 'redux/selectors/wallet'; } from 'redux/selectors/wallet';
export {
selectFollowedTags,
selectUnfollowedTags,
makeSelectIsFollowingTag,
} from 'redux/selectors/tags';
export {
selectBlockedChannels,
selectChannelIsBlocked,
selectBlockedChannelsCount,
} from 'redux/selectors/blocked';

183
src/lbry-first.js Normal file
View file

@ -0,0 +1,183 @@
// @flow
import 'proxy-polyfill';
const CHECK_LBRYFIRST_STARTED_TRY_NUMBER = 200;
//
// Basic LBRYFIRST connection config
// Offers a proxy to call LBRYFIRST methods
//
const LbryFirst: LbryFirstTypes = {
isConnected: false,
connectPromise: null,
lbryFirstConnectionString: 'http://localhost:1337/rpc',
apiRequestHeaders: { 'Content-Type': 'application/json' },
// Allow overriding lbryFirst connection string (e.g. to `/api/proxy` for lbryweb)
setLbryFirstConnectionString: (value: string) => {
LbryFirst.lbryFirstConnectionString = value;
},
setApiHeader: (key: string, value: string) => {
LbryFirst.apiRequestHeaders = Object.assign(LbryFirst.apiRequestHeaders, { [key]: value });
},
unsetApiHeader: key => {
Object.keys(LbryFirst.apiRequestHeaders).includes(key) &&
delete LbryFirst.apiRequestHeaders['key'];
},
// Allow overriding Lbry methods
overrides: {},
setOverride: (methodName, newMethod) => {
LbryFirst.overrides[methodName] = newMethod;
},
getApiRequestHeaders: () => LbryFirst.apiRequestHeaders,
//
// LbryFirst Methods
//
status: (params = {}) => lbryFirstCallWithResult('status', params),
stop: () => lbryFirstCallWithResult('stop', {}),
version: () => lbryFirstCallWithResult('version', {}),
// Upload to youtube
upload: (params: { title: string, description: string, file_path: ?string } = {}) => {
// Only upload when originally publishing for now
if (!params.file_path) {
return Promise.resolve();
}
const uploadParams: {
Title: string,
Description: string,
FilePath: string,
Category: string,
Keywords: string,
} = {
Title: params.title,
Description: params.description,
FilePath: params.file_path,
Category: '',
Keywords: '',
};
return lbryFirstCallWithResult('youtube.Upload', uploadParams);
},
hasYTAuth: (token: string) => {
const hasYTAuthParams = {};
hasYTAuthParams.AuthToken = token;
return lbryFirstCallWithResult('youtube.HasAuth', hasYTAuthParams);
},
ytSignup: () => {
const emptyParams = {};
return lbryFirstCallWithResult('youtube.Signup', emptyParams);
},
remove: () => {
const emptyParams = {};
return lbryFirstCallWithResult('youtube.Remove', emptyParams);
},
// Connect to lbry-first
connect: () => {
if (LbryFirst.connectPromise === null) {
LbryFirst.connectPromise = new Promise((resolve, reject) => {
let tryNum = 0;
// Check every half second to see if the lbryFirst is accepting connections
function checkLbryFirstStarted() {
tryNum += 1;
LbryFirst.status()
.then(resolve)
.catch(() => {
if (tryNum <= CHECK_LBRYFIRST_STARTED_TRY_NUMBER) {
setTimeout(checkLbryFirstStarted, tryNum < 50 ? 400 : 1000);
} else {
reject(new Error('Unable to connect to LBRY'));
}
});
}
checkLbryFirstStarted();
});
}
// Flow thinks this could be empty, but it will always return a promise
// $FlowFixMe
return LbryFirst.connectPromise;
},
};
function checkAndParse(response) {
if (response.status >= 200 && response.status < 300) {
return response.json();
}
return response.json().then(json => {
let error;
if (json.error) {
const errorMessage = typeof json.error === 'object' ? json.error.message : json.error;
error = new Error(errorMessage);
} else {
error = new Error('Protocol error with unknown response signature');
}
return Promise.reject(error);
});
}
export function apiCall(method: string, params: ?{}, resolve: Function, reject: Function) {
const counter = new Date().getTime();
const paramsArray = [params];
const options = {
method: 'POST',
headers: LbryFirst.apiRequestHeaders,
body: JSON.stringify({
jsonrpc: '2.0',
method,
params: paramsArray,
id: counter,
}),
};
return fetch(LbryFirst.lbryFirstConnectionString, options)
.then(checkAndParse)
.then(response => {
const error = response.error || (response.result && response.result.error);
if (error) {
return reject(error);
}
return resolve(response.result);
})
.catch(reject);
}
function lbryFirstCallWithResult(name: string, params: ?{} = {}) {
return new Promise((resolve, reject) => {
apiCall(
name,
params,
result => {
resolve(result);
},
reject
);
});
}
// This is only for a fallback
// If there is a LbryFirst method that is being called by an app, it should be added to /flow-typed/LbryFirst.js
const lbryFirstProxy = new Proxy(LbryFirst, {
get(target: LbryFirstTypes, name: string) {
if (name in target) {
return target[name];
}
return (params = {}) =>
new Promise((resolve, reject) => {
apiCall(name, params, resolve, reject);
});
},
});
export default lbryFirstProxy;

View file

@ -11,6 +11,8 @@ const Lbry: LbryTypes = {
isConnected: false, isConnected: false,
connectPromise: null, connectPromise: null,
daemonConnectionString: 'http://localhost:5279', daemonConnectionString: 'http://localhost:5279',
alternateConnectionString: '',
methodsUsingAlternateConnectionString: [],
apiRequestHeaders: { 'Content-Type': 'application/json-rpc' }, apiRequestHeaders: { 'Content-Type': 'application/json-rpc' },
// Allow overriding daemon connection string (e.g. to `/api/proxy` for lbryweb) // Allow overriding daemon connection string (e.g. to `/api/proxy` for lbryweb)
@ -38,9 +40,9 @@ const Lbry: LbryTypes = {
const formats = [ const formats = [
[/\.(mp4|m4v|webm|flv|f4v|ogv)$/i, 'video'], [/\.(mp4|m4v|webm|flv|f4v|ogv)$/i, 'video'],
[/\.(mp3|m4a|aac|wav|flac|ogg|opus)$/i, 'audio'], [/\.(mp3|m4a|aac|wav|flac|ogg|opus)$/i, 'audio'],
[/\.(jpeg|jpg|png|gif|svg)$/i, 'image'], [/\.(jpeg|jpg|png|gif|svg|webp)$/i, 'image'],
[/\.(h|go|ja|java|js|jsx|c|cpp|cs|css|rb|scss|sh|php|py)$/i, 'script'], [/\.(h|go|ja|java|js|jsx|c|cpp|cs|css|rb|scss|sh|php|py)$/i, 'script'],
[/\.(json|csv|txt|log|md|markdown|docx|pdf|xml|yml|yaml)$/i, 'document'], [/\.(html|json|csv|txt|log|md|markdown|docx|pdf|xml|yml|yaml)$/i, 'document'],
[/\.(pdf|odf|doc|docx|epub|org|rtf)$/i, 'e-book'], [/\.(pdf|odf|doc|docx|epub|org|rtf)$/i, 'e-book'],
[/\.(stl|obj|fbx|gcode)$/i, '3D-file'], [/\.(stl|obj|fbx|gcode)$/i, '3D-file'],
[/\.(cbr|cbt|cbz)$/i, 'comic-book'], [/\.(cbr|cbt|cbz)$/i, 'comic-book'],
@ -84,8 +86,14 @@ const Lbry: LbryTypes = {
stream_abandon: params => daemonCallWithResult('stream_abandon', params), stream_abandon: params => daemonCallWithResult('stream_abandon', params),
stream_list: params => daemonCallWithResult('stream_list', params), stream_list: params => daemonCallWithResult('stream_list', params),
channel_abandon: params => daemonCallWithResult('channel_abandon', params), channel_abandon: params => daemonCallWithResult('channel_abandon', params),
channel_sign: params => daemonCallWithResult('channel_sign', params),
support_create: params => daemonCallWithResult('support_create', params), support_create: params => daemonCallWithResult('support_create', params),
support_list: params => daemonCallWithResult('support_list', params), support_list: params => daemonCallWithResult('support_list', params),
stream_repost: params => daemonCallWithResult('stream_repost', params),
collection_resolve: params => daemonCallWithResult('collection_resolve', params),
collection_list: params => daemonCallWithResult('collection_list', params),
collection_create: params => daemonCallWithResult('collection_create', params),
collection_update: params => daemonCallWithResult('collection_update', params),
// File fetching and manipulation // File fetching and manipulation
file_list: (params = {}) => daemonCallWithResult('file_list', params), file_list: (params = {}) => daemonCallWithResult('file_list', params),
@ -108,6 +116,8 @@ const Lbry: LbryTypes = {
transaction_list: (params = {}) => daemonCallWithResult('transaction_list', params), transaction_list: (params = {}) => daemonCallWithResult('transaction_list', params),
utxo_release: (params = {}) => daemonCallWithResult('utxo_release', params), utxo_release: (params = {}) => daemonCallWithResult('utxo_release', params),
support_abandon: (params = {}) => daemonCallWithResult('support_abandon', params), support_abandon: (params = {}) => daemonCallWithResult('support_abandon', params),
purchase_list: (params = {}) => daemonCallWithResult('purchase_list', params),
txo_list: (params = {}) => daemonCallWithResult('txo_list', params),
sync_hash: (params = {}) => daemonCallWithResult('sync_hash', params), sync_hash: (params = {}) => daemonCallWithResult('sync_hash', params),
sync_apply: (params = {}) => daemonCallWithResult('sync_apply', params), sync_apply: (params = {}) => daemonCallWithResult('sync_apply', params),
@ -119,6 +129,10 @@ const Lbry: LbryTypes = {
// Comments // Comments
comment_list: (params = {}) => daemonCallWithResult('comment_list', params), comment_list: (params = {}) => daemonCallWithResult('comment_list', params),
comment_create: (params = {}) => daemonCallWithResult('comment_create', params), comment_create: (params = {}) => daemonCallWithResult('comment_create', params),
comment_hide: (params = {}) => daemonCallWithResult('comment_hide', params),
comment_abandon: (params = {}) => daemonCallWithResult('comment_abandon', params),
comment_update: (params = {}) => daemonCallWithResult('comment_update', params),
// Connect to the sdk // Connect to the sdk
connect: () => { connect: () => {
if (Lbry.connectPromise === null) { if (Lbry.connectPromise === null) {
@ -186,7 +200,10 @@ export function apiCall(method: string, params: ?{}, resolve: Function, reject:
}), }),
}; };
return fetch(Lbry.daemonConnectionString, options) const connectionString = Lbry.methodsUsingAlternateConnectionString.includes(method)
? Lbry.alternateConnectionString
: Lbry.daemonConnectionString;
return fetch(connectionString + '?m=' + method, options)
.then(checkAndParse) .then(checkAndParse)
.then(response => { .then(response => {
const error = response.error || (response.result && response.result.error); const error = response.error || (response.result && response.result.error);

View file

@ -12,6 +12,11 @@ const regexPartModifierSeparator = '([:$#]?)([^/]*)';
const queryStringBreaker = '^([\\S]+)([?][\\S]*)'; const queryStringBreaker = '^([\\S]+)([?][\\S]*)';
const separateQuerystring = new RegExp(queryStringBreaker); const separateQuerystring = new RegExp(queryStringBreaker);
const MOD_SEQUENCE_SEPARATOR = '*';
const MOD_CLAIM_ID_SEPARATOR_OLD = '#';
const MOD_CLAIM_ID_SEPARATOR = ':';
const MOD_BID_POSITION_SEPARATOR = '$';
/** /**
* Parses a LBRY name into its component parts. Throws errors with user-friendly * Parses a LBRY name into its component parts. Throws errors with user-friendly
* messages for invalid names. * messages for invalid names.
@ -29,7 +34,7 @@ const separateQuerystring = new RegExp(queryStringBreaker);
* - secondaryBidPosition (int, if present) * - secondaryBidPosition (int, if present)
*/ */
export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj { export function parseURI(url: string, requireProto: boolean = false): LbryUrlObj {
// Break into components. Empty sub-matches are converted to null // Break into components. Empty sub-matches are converted to null
const componentsRegex = new RegExp( const componentsRegex = new RegExp(
@ -42,12 +47,12 @@ export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj
); );
// chop off the querystring first // chop off the querystring first
let QSStrippedURL, qs; let QSStrippedURL, qs;
const qsRegexResult = separateQuerystring.exec(URL); const qsRegexResult = separateQuerystring.exec(url);
if (qsRegexResult) { if (qsRegexResult) {
[QSStrippedURL, qs] = qsRegexResult.slice(1).map(match => match || null); [QSStrippedURL, qs] = qsRegexResult.slice(1).map(match => match || null);
} }
const cleanURL = QSStrippedURL || URL; const cleanURL = QSStrippedURL || url;
const regexMatch = componentsRegex.exec(cleanURL) || []; const regexMatch = componentsRegex.exec(cleanURL) || [];
const [proto, ...rest] = regexMatch.slice(1).map(match => match || null); const [proto, ...rest] = regexMatch.slice(1).map(match => match || null);
const path = rest.join(''); const path = rest.join('');
@ -60,6 +65,8 @@ export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj
secondaryModSeparator, secondaryModSeparator,
secondaryModValue, secondaryModValue,
] = rest; ] = rest;
const searchParams = new URLSearchParams(qs || '');
const startTime = searchParams.get('t');
// Validate protocol // Validate protocol
if (requireProto && !proto) { if (requireProto && !proto) {
@ -73,7 +80,7 @@ export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj
rest.forEach(urlPiece => { rest.forEach(urlPiece => {
if (urlPiece && urlPiece.includes(' ')) { if (urlPiece && urlPiece.includes(' ')) {
throw new Error('URL can not include a space'); throw new Error(__('URL can not include a space'));
} }
}); });
@ -121,6 +128,7 @@ export function parseURI(URL: string, requireProto: boolean = false): LbryUrlObj
: {}), : {}),
...(primaryBidPosition ? { primaryBidPosition: parseInt(primaryBidPosition, 10) } : {}), ...(primaryBidPosition ? { primaryBidPosition: parseInt(primaryBidPosition, 10) } : {}),
...(secondaryBidPosition ? { secondaryBidPosition: parseInt(secondaryBidPosition, 10) } : {}), ...(secondaryBidPosition ? { secondaryBidPosition: parseInt(secondaryBidPosition, 10) } : {}),
...(startTime ? { startTime: parseInt(startTime, 10) } : {}),
// The values below should not be used for new uses of parseURI // The values below should not be used for new uses of parseURI
// They will not work properly with canonical_urls // They will not work properly with canonical_urls
@ -138,28 +146,28 @@ function parseURIModifier(modSeperator: ?string, modValue: ?string) {
if (modSeperator) { if (modSeperator) {
if (!modValue) { if (!modValue) {
console.error(__(`No modifier provided after separator %modSeperator%.`, { modSeperator })); throw new Error(__(`No modifier provided after separator %modSeperator%.`, { modSeperator }));
} }
if (modSeperator === '#') { if (modSeperator === MOD_CLAIM_ID_SEPARATOR || MOD_CLAIM_ID_SEPARATOR_OLD) {
claimId = modValue; claimId = modValue;
} else if (modSeperator === ':') { } else if (modSeperator === MOD_SEQUENCE_SEPARATOR) {
claimSequence = modValue; claimSequence = modValue;
} else if (modSeperator === '$') { } else if (modSeperator === MOD_BID_POSITION_SEPARATOR) {
bidPosition = modValue; bidPosition = modValue;
} }
} }
if (claimId && (claimId.length > claimIdMaxLength || !claimId.match(/^[0-9a-f]+$/))) { if (claimId && (claimId.length > claimIdMaxLength || !claimId.match(/^[0-9a-f]+$/))) {
console.error(__(`Invalid claim ID %claimId%.`, { claimId })); throw new Error(__(`Invalid claim ID %claimId%.`, { claimId }));
} }
if (claimSequence && !claimSequence.match(/^-?[1-9][0-9]*$/)) { if (claimSequence && !claimSequence.match(/^-?[1-9][0-9]*$/)) {
console.error(__('Claim sequence must be a number.')); throw new Error(__('Claim sequence must be a number.'));
} }
if (bidPosition && !bidPosition.match(/^-?[1-9][0-9]*$/)) { if (bidPosition && !bidPosition.match(/^-?[1-9][0-9]*$/)) {
console.error(__('Bid position must be a number.')); throw new Error(__('Bid position must be a number.'));
} }
return [claimId, claimSequence, bidPosition]; return [claimId, claimSequence, bidPosition];
@ -184,6 +192,7 @@ export function buildURI(
primaryBidPosition, primaryBidPosition,
secondaryClaimSequence, secondaryClaimSequence,
secondaryBidPosition, secondaryBidPosition,
startTime,
...deprecatedParts ...deprecatedParts
} = UrlObj; } = UrlObj;
const { claimId, claimName, contentName } = deprecatedParts; const { claimId, claimName, contentName } = deprecatedParts;
@ -233,7 +242,8 @@ export function buildURI(
(secondaryClaimName ? `/${secondaryClaimName}` : '') + (secondaryClaimName ? `/${secondaryClaimName}` : '') +
(secondaryClaimId ? `#${secondaryClaimId}` : '') + (secondaryClaimId ? `#${secondaryClaimId}` : '') +
(secondaryClaimSequence ? `:${secondaryClaimSequence}` : '') + (secondaryClaimSequence ? `:${secondaryClaimSequence}` : '') +
(secondaryBidPosition ? `${secondaryBidPosition}` : '') (secondaryBidPosition ? `${secondaryBidPosition}` : '') +
(startTime ? `?t=${startTime}` : '')
); );
} }
@ -248,6 +258,7 @@ export function normalizeURI(URL: string) {
primaryBidPosition, primaryBidPosition,
secondaryClaimSequence, secondaryClaimSequence,
secondaryBidPosition, secondaryBidPosition,
startTime,
} = parseURI(URL); } = parseURI(URL);
return buildURI({ return buildURI({
@ -259,6 +270,7 @@ export function normalizeURI(URL: string) {
primaryBidPosition, primaryBidPosition,
secondaryClaimSequence, secondaryClaimSequence,
secondaryBidPosition, secondaryBidPosition,
startTime,
}); });
} }
@ -313,3 +325,22 @@ export function convertToShareLink(URL: string) {
'https://open.lbry.com/' 'https://open.lbry.com/'
); );
} }
export function splitBySeparator(uri: string) {
const protocolLength = 7;
return uri.startsWith('lbry://') ? uri.slice(protocolLength).split(/[#:*]/) : uri.split(/#:\*\$/);
}
export function isURIEqual(uriA: string, uriB: string) {
const parseA = parseURI(normalizeURI(uriA));
const parseB = parseURI(normalizeURI(uriB));
if (parseA.isChannel) {
if (parseB.isChannel && parseA.channelClaimId === parseB.channelClaimId) {
return true;
}
} else if (parseA.streamClaimId === parseB.streamClaimId) {
return true;
} else {
return false;
}
}

View file

@ -1,5 +1,6 @@
// @flow // @flow
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import * as ABANDON_STATES from 'constants/abandon_states';
import Lbry from 'lbry'; import Lbry from 'lbry';
import { normalizeURI } from 'lbryURI'; import { normalizeURI } from 'lbryURI';
import { doToast } from 'redux/actions/notifications'; import { doToast } from 'redux/actions/notifications';
@ -8,14 +9,34 @@ import {
selectResolvingUris, selectResolvingUris,
selectClaimsByUri, selectClaimsByUri,
selectMyChannelClaims, selectMyChannelClaims,
selectPendingIds,
selectPendingClaimsById,
} from 'redux/selectors/claims'; } from 'redux/selectors/claims';
import { doFetchTransactions } from 'redux/actions/wallet';
import { doFetchTxoPage } from 'redux/actions/wallet';
import { selectSupportsByOutpoint } from 'redux/selectors/wallet'; import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
import { creditsToString } from 'util/format-credits'; import { creditsToString } from 'util/format-credits';
import { batchActions } from 'util/batch-actions'; import { batchActions } from 'util/batch-actions';
import { createNormalizedClaimSearchKey } from 'util/claim'; import { createNormalizedClaimSearchKey } from 'util/claim';
import { PAGE_SIZE } from 'constants/claim';
import {
selectPendingCollections,
makeSelectClaimIdsForCollectionId,
} from 'redux/selectors/collections';
import {
doFetchItemsInCollection,
doFetchItemsInCollections,
doCollectionDelete,
} from 'redux/actions/collections';
export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean = false) { let onChannelConfirmCallback;
let checkPendingInterval;
export function doResolveUris(
uris: Array<string>,
returnCachedClaims: boolean = false,
resolveReposts: boolean = true
) {
return (dispatch: Dispatch, getState: GetState) => { return (dispatch: Dispatch, getState: GetState) => {
const normalizedUris = uris.map(normalizeURI); const normalizedUris = uris.map(normalizeURI);
const state = getState(); const state = getState();
@ -34,6 +55,13 @@ export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean =
return; return;
} }
const options: { include_is_my_output?: boolean, include_purchase_receipt: boolean } = {
include_purchase_receipt: true,
};
if (urisToResolve.length === 1) {
options.include_is_my_output = true;
}
dispatch({ dispatch({
type: ACTIONS.RESOLVE_URIS_STARTED, type: ACTIONS.RESOLVE_URIS_STARTED,
data: { uris: normalizedUris }, data: { uris: normalizedUris },
@ -44,28 +72,49 @@ export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean =
stream: ?StreamClaim, stream: ?StreamClaim,
channel: ?ChannelClaim, channel: ?ChannelClaim,
claimsInChannel: ?number, claimsInChannel: ?number,
collection: ?CollectionClaim,
}, },
} = {}; } = {};
Lbry.resolve({ urls: urisToResolve }).then((result: ResolveResponse) => { const collectionIds: Array<string> = [];
Object.entries(result).forEach(([uri, uriResolveInfo]) => {
return Lbry.resolve({ urls: urisToResolve, ...options }).then(
async(result: ResolveResponse) => {
let repostedResults = {};
const repostsToResolve = [];
const fallbackResolveInfo = { const fallbackResolveInfo = {
stream: null, stream: null,
claimsInChannel: null, claimsInChannel: null,
channel: null, channel: null,
}; };
function processResult(result, resolveInfo = {}, checkReposts = false) {
Object.entries(result).forEach(([uri, uriResolveInfo]) => {
// Flow has terrible Object.entries support // Flow has terrible Object.entries support
// https://github.com/facebook/flow/issues/2221 // https://github.com/facebook/flow/issues/2221
if (uriResolveInfo) { if (uriResolveInfo) {
if (uriResolveInfo.error) { if (uriResolveInfo.error) {
// $FlowFixMe
resolveInfo[uri] = { ...fallbackResolveInfo }; resolveInfo[uri] = { ...fallbackResolveInfo };
} else { } else {
if (checkReposts) {
if (uriResolveInfo.reposted_claim) {
// $FlowFixMe
const repostUrl = uriResolveInfo.reposted_claim.permanent_url;
if (!resolvingUris.includes(repostUrl)) {
repostsToResolve.push(repostUrl);
}
}
}
let result = {}; let result = {};
if (uriResolveInfo.value_type === 'channel') { if (uriResolveInfo.value_type === 'channel') {
result.channel = uriResolveInfo; result.channel = uriResolveInfo;
// $FlowFixMe // $FlowFixMe
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel; result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
} else if (uriResolveInfo.value_type === 'collection') {
result.collection = uriResolveInfo;
// $FlowFixMe
collectionIds.push(uriResolveInfo.claim_id);
} else { } else {
result.stream = uriResolveInfo; result.stream = uriResolveInfo;
if (uriResolveInfo.signing_channel) { if (uriResolveInfo.signing_channel) {
@ -81,12 +130,30 @@ export function doResolveUris(uris: Array<string>, returnCachedClaims: boolean =
} }
} }
}); });
}
processResult(result, resolveInfo, resolveReposts);
if (repostsToResolve.length) {
dispatch({
type: ACTIONS.RESOLVE_URIS_STARTED,
data: { uris: repostsToResolve, debug: 'reposts' },
});
repostedResults = await Lbry.resolve({ urls: repostsToResolve, ...options });
}
processResult(repostedResults, resolveInfo);
dispatch({ dispatch({
type: ACTIONS.RESOLVE_URIS_COMPLETED, type: ACTIONS.RESOLVE_URIS_COMPLETED,
data: { resolveInfo }, data: { resolveInfo },
}); });
});
if (collectionIds.length) {
dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 }));
}
return result;
}
);
}; };
} }
@ -94,26 +161,126 @@ export function doResolveUri(uri: string) {
return doResolveUris([uri]); return doResolveUris([uri]);
} }
export function doFetchClaimListMine(page: number = 1, pageSize: number = 99999) { export function doFetchClaimListMine(
page: number = 1,
pageSize: number = 99999,
resolve: boolean = true,
filterBy: Array<string> = []
) {
return (dispatch: Dispatch) => { return (dispatch: Dispatch) => {
dispatch({ dispatch({
type: ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED, type: ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED,
}); });
Lbry.stream_list({ page, page_size: pageSize }).then((result: StreamListResponse) => { let claimTypes = ['stream', 'repost'];
const claims = result.items; if (filterBy && filterBy.length !== 0) {
claimTypes = claimTypes.filter(t => filterBy.includes(t));
}
// $FlowFixMe
Lbry.claim_list({
page: page,
page_size: pageSize,
claim_type: claimTypes,
resolve,
}).then((result: StreamListResponse) => {
dispatch({ dispatch({
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED, type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED,
data: { data: {
claims, result,
resolve,
}, },
}); });
}); });
}; };
} }
export function doAbandonClaim(txid: string, nout: number) { export function doAbandonTxo(txo: Txo, cb: string => void) {
return (dispatch: Dispatch) => {
if (cb) cb(ABANDON_STATES.PENDING);
const isClaim = txo.type === 'claim';
const isSupport = txo.type === 'support' && txo.is_my_input === true;
const isTip = txo.type === 'support' && txo.is_my_input === false;
const data = isClaim ? { claimId: txo.claim_id } : { outpoint: `${txo.txid}:${txo.nout}` };
const startedActionType = isClaim
? ACTIONS.ABANDON_CLAIM_STARTED
: ACTIONS.ABANDON_SUPPORT_STARTED;
const completedActionType = isClaim
? ACTIONS.ABANDON_CLAIM_SUCCEEDED
: ACTIONS.ABANDON_SUPPORT_COMPLETED;
dispatch({
type: startedActionType,
data,
});
const errorCallback = () => {
if (cb) cb(ABANDON_STATES.ERROR);
dispatch(
doToast({
message: isClaim ? 'Error abandoning your claim/support' : 'Error unlocking your tip',
isError: true,
})
);
};
const successCallback = () => {
dispatch({
type: completedActionType,
data,
});
let abandonMessage;
if (isClaim) {
abandonMessage = __('Successfully abandoned your claim.');
} else if (isSupport) {
abandonMessage = __('Successfully abandoned your support.');
} else {
abandonMessage = __('Successfully unlocked your tip!');
}
if (cb) cb(ABANDON_STATES.DONE);
dispatch(
doToast({
message: abandonMessage,
})
);
};
const abandonParams: {
claim_id?: string,
txid?: string,
nout?: number,
} = {
blocking: true,
};
if (isClaim) {
abandonParams['claim_id'] = txo.claim_id;
} else {
abandonParams['txid'] = txo.txid;
abandonParams['nout'] = txo.nout;
}
let method;
if (isSupport || isTip) {
method = 'support_abandon';
} else if (isClaim) {
const { normalized_name: claimName } = txo;
method = claimName.startsWith('@') ? 'channel_abandon' : 'stream_abandon';
}
if (!method) {
console.error('No "method" chosen for claim or support abandon');
return;
}
Lbry[method](abandonParams).then(successCallback, errorCallback);
};
}
export function doAbandonClaim(txid: string, nout: number, cb: string => void) {
const outpoint = `${txid}:${nout}`; const outpoint = `${txid}:${nout}`;
return (dispatch: Dispatch, getState: GetState) => { return (dispatch: Dispatch, getState: GetState) => {
@ -154,6 +321,7 @@ export function doAbandonClaim(txid: string, nout: number) {
isError: true, isError: true,
}) })
); );
if (cb) cb(ABANDON_STATES.ERROR);
}; };
const successCallback = () => { const successCallback = () => {
@ -161,14 +329,15 @@ export function doAbandonClaim(txid: string, nout: number) {
type: completedActionType, type: completedActionType,
data, data,
}); });
if (cb) cb(ABANDON_STATES.DONE);
let abandonMessage; let abandonMessage;
if (isClaim) { if (isClaim) {
abandonMessage = 'Successfully abandoned your claim.'; abandonMessage = __('Successfully abandoned your claim.');
} else if (supportToAbandon) { } else if (supportToAbandon) {
abandonMessage = 'Successfully abandoned your support.'; abandonMessage = __('Successfully abandoned your support.');
} else { } else {
abandonMessage = 'Successfully unlocked your tip!'; abandonMessage = __('Successfully unlocked your tip!');
} }
dispatch( dispatch(
@ -176,13 +345,7 @@ export function doAbandonClaim(txid: string, nout: number) {
message: abandonMessage, message: abandonMessage,
}) })
); );
dispatch(doFetchTxoPage());
// After abandoning, fetch transactions to show the new abandon transaction
// Only fetch the latest few transactions since we don't care about old ones
// Not very robust, but better than calling the entire list for large wallets
const page = 1;
const pageSize = 10;
dispatch(doFetchTransactions(page, pageSize));
}; };
const abandonParams = { const abandonParams = {
@ -220,6 +383,8 @@ export function doFetchClaimsByChannel(uri: string, page: number = 1) {
valid_channel_signature: true, valid_channel_signature: true,
page: page || 1, page: page || 1,
order_by: ['release_time'], order_by: ['release_time'],
include_is_my_output: true,
include_purchase_receipt: true,
}).then((result: ClaimSearchResponse) => { }).then((result: ClaimSearchResponse) => {
const { items: claims, total_items: claimsInChannel, page: returnedPage } = result; const { items: claims, total_items: claimsInChannel, page: returnedPage } = result;
@ -236,7 +401,13 @@ export function doFetchClaimsByChannel(uri: string, page: number = 1) {
}; };
} }
export function doCreateChannel(name: string, amount: number, optionalParams: any) { export function doClearChannelErrors() {
return {
type: ACTIONS.CLEAR_CHANNEL_ERRORS,
};
}
export function doCreateChannel(name: string, amount: number, optionalParams: any, onConfirm: any) {
return (dispatch: Dispatch) => { return (dispatch: Dispatch) => {
dispatch({ dispatch({
type: ACTIONS.CREATE_CHANNEL_STARTED, type: ACTIONS.CREATE_CHANNEL_STARTED,
@ -252,7 +423,8 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
description?: string, description?: string,
website_url?: string, website_url?: string,
email?: string, email?: string,
tags?: Array<string>, tags?: Array<Tag>,
languages?: Array<string>,
} = { } = {
name, name,
bid: creditsToString(amount), bid: creditsToString(amount),
@ -281,6 +453,9 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
if (optionalParams.tags) { if (optionalParams.tags) {
createParams.tags = optionalParams.tags.map(tag => tag.name); createParams.tags = optionalParams.tags.map(tag => tag.name);
} }
if (optionalParams.languages) {
createParams.languages = optionalParams.languages;
}
} }
return ( return (
@ -293,6 +468,13 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
type: ACTIONS.CREATE_CHANNEL_COMPLETED, type: ACTIONS.CREATE_CHANNEL_COMPLETED,
data: { channelClaim }, data: { channelClaim },
}); });
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [channelClaim],
},
});
dispatch(doCheckPendingClaims(onConfirm));
return channelClaim; return channelClaim;
}) })
.catch(error => { .catch(error => {
@ -300,13 +482,12 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
type: ACTIONS.CREATE_CHANNEL_FAILED, type: ACTIONS.CREATE_CHANNEL_FAILED,
data: error.message, data: error.message,
}); });
return error;
}) })
); );
}; };
} }
export function doUpdateChannel(params: any) { export function doUpdateChannel(params: any, cb: any) {
return (dispatch: Dispatch, getState: GetState) => { return (dispatch: Dispatch, getState: GetState) => {
dispatch({ dispatch({
type: ACTIONS.UPDATE_CHANNEL_STARTED, type: ACTIONS.UPDATE_CHANNEL_STARTED,
@ -326,7 +507,7 @@ export function doUpdateChannel(params: any) {
email: params.email, email: params.email,
tags: [], tags: [],
replace: true, replace: true,
languages: [], languages: params.languages || [],
locations: [], locations: [],
blocking: true, blocking: true,
}; };
@ -336,15 +517,10 @@ export function doUpdateChannel(params: any) {
} }
// we'll need to remove these once we add locations/channels to channel page edit/create options // we'll need to remove these once we add locations/channels to channel page edit/create options
if (channelClaim && channelClaim.value && channelClaim.value.locations) { if (channelClaim && channelClaim.value && channelClaim.value.locations) {
updateParams.locations = channelClaim.value.locations; updateParams.locations = channelClaim.value.locations;
} }
if (channelClaim && channelClaim.value && channelClaim.value.languages) {
updateParams.languages = channelClaim.value.languages;
}
return Lbry.channel_update(updateParams) return Lbry.channel_update(updateParams)
.then((result: ChannelUpdateResponse) => { .then((result: ChannelUpdateResponse) => {
const channelClaim = result.outputs[0]; const channelClaim = result.outputs[0];
@ -352,7 +528,16 @@ export function doUpdateChannel(params: any) {
type: ACTIONS.UPDATE_CHANNEL_COMPLETED, type: ACTIONS.UPDATE_CHANNEL_COMPLETED,
data: { channelClaim }, data: { channelClaim },
}); });
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [channelClaim],
},
});
dispatch(doCheckPendingClaims(cb));
return Boolean(result.outputs[0]);
}) })
.then()
.catch(error => { .catch(error => {
dispatch({ dispatch({
type: ACTIONS.UPDATE_CHANNEL_FAILED, type: ACTIONS.UPDATE_CHANNEL_FAILED,
@ -369,7 +554,7 @@ export function doImportChannel(certificate: string) {
}); });
return Lbry.channel_import({ channel_data: certificate }) return Lbry.channel_import({ channel_data: certificate })
.then((result: string) => { .then(() => {
dispatch({ dispatch({
type: ACTIONS.IMPORT_CHANNEL_COMPLETED, type: ACTIONS.IMPORT_CHANNEL_COMPLETED,
}); });
@ -383,7 +568,11 @@ export function doImportChannel(certificate: string) {
}; };
} }
export function doFetchChannelListMine(page: number = 1, pageSize: number = 99999) { export function doFetchChannelListMine(
page: number = 1,
pageSize: number = 99999,
resolve: boolean = true
) {
return (dispatch: Dispatch) => { return (dispatch: Dispatch) => {
dispatch({ dispatch({
type: ACTIONS.FETCH_CHANNEL_LIST_STARTED, type: ACTIONS.FETCH_CHANNEL_LIST_STARTED,
@ -396,7 +585,48 @@ export function doFetchChannelListMine(page: number = 1, pageSize: number = 9999
}); });
}; };
Lbry.channel_list({ page, page_size: pageSize }).then(callback); const failure = error => {
dispatch({
type: ACTIONS.FETCH_CHANNEL_LIST_FAILED,
data: error,
});
};
Lbry.channel_list({ page, page_size: pageSize, resolve }).then(callback, failure);
};
}
export function doFetchCollectionListMine(page: number = 1, pageSize: number = 99999) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.FETCH_COLLECTION_LIST_STARTED,
});
const callback = (response: CollectionListResponse) => {
const { items } = response;
dispatch({
type: ACTIONS.FETCH_COLLECTION_LIST_COMPLETED,
data: { claims: items },
});
dispatch(
doFetchItemsInCollections({
collectionIds: items.map(claim => claim.claim_id),
page_size: 5,
})
);
};
const failure = error => {
dispatch({
type: ACTIONS.FETCH_COLLECTION_LIST_FAILED,
data: error,
});
};
Lbry.collection_list({ page, page_size: pageSize, resolve_claims: 1, resolve: true }).then(
callback,
failure
);
}; };
} }
@ -404,13 +634,16 @@ export function doClaimSearch(
options: { options: {
page_size: number, page_size: number,
page: number, page: number,
no_totals: boolean, no_totals?: boolean,
any_tags?: Array<string>, any_tags?: Array<string>,
claim_ids?: Array<string>,
channel_ids?: Array<string>, channel_ids?: Array<string>,
not_channel_ids?: Array<string>, not_channel_ids?: Array<string>,
not_tags?: Array<string>, not_tags?: Array<string>,
order_by?: Array<string>, order_by?: Array<string>,
release_time?: string, release_time?: string,
has_source?: boolean,
has_no_souce?: boolean,
} = { } = {
no_totals: true, no_totals: true,
page_size: 10, page_size: 10,
@ -418,7 +651,7 @@ export function doClaimSearch(
} }
) { ) {
const query = createNormalizedClaimSearchKey(options); const query = createNormalizedClaimSearchKey(options);
return (dispatch: Dispatch) => { return async(dispatch: Dispatch) => {
dispatch({ dispatch({
type: ACTIONS.CLAIM_SEARCH_STARTED, type: ACTIONS.CLAIM_SEARCH_STARTED,
data: { query: query }, data: { query: query },
@ -442,6 +675,7 @@ export function doClaimSearch(
pageSize: options.page_size, pageSize: options.page_size,
}, },
}); });
return resolveInfo;
}; };
const failure = err => { const failure = err => {
@ -450,8 +684,387 @@ export function doClaimSearch(
data: { query }, data: { query },
error: err, error: err,
}); });
return false;
}; };
Lbry.claim_search(options).then(success, failure); return await Lbry.claim_search({
...options,
include_purchase_receipt: true,
}).then(success, failure);
}; };
} }
export function doRepost(options: StreamRepostOptions) {
return (dispatch: Dispatch): Promise<any> => {
return new Promise(resolve => {
dispatch({
type: ACTIONS.CLAIM_REPOST_STARTED,
});
function success(response) {
const repostClaim = response.outputs[0];
dispatch({
type: ACTIONS.CLAIM_REPOST_COMPLETED,
data: {
originalClaimId: options.claim_id,
repostClaim,
},
});
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [repostClaim],
},
});
dispatch(doFetchClaimListMine(1, 10));
resolve(repostClaim);
}
function failure(error) {
dispatch({
type: ACTIONS.CLAIM_REPOST_FAILED,
data: {
error: error.message,
},
});
}
Lbry.stream_repost(options).then(success, failure);
});
};
}
export function doCollectionPublish(
options: {
name: string,
bid: string,
blocking: true,
title?: string,
channel_id?: string,
thumbnail_url?: string,
description?: string,
tags?: Array<Tag>,
languages?: Array<string>,
claims: Array<string>,
},
localId: string
) {
return (dispatch: Dispatch): Promise<any> => {
// $FlowFixMe
const params: {
name: string,
bid: string,
channel_id?: string,
blocking?: true,
title?: string,
thumbnail_url?: string,
description?: string,
tags?: Array<string>,
languages?: Array<string>,
claims: Array<string>,
} = {
name: options.name,
bid: creditsToString(options.bid),
title: options.title,
thumbnail_url: options.thumbnail_url,
description: options.description,
tags: [],
languages: options.languages || [],
locations: [],
blocking: true,
claims: options.claims,
};
if (options.tags) {
params['tags'] = options.tags.map(tag => tag.name);
}
if (options.channel_id) {
params['channel_id'] = options.channel_id;
}
return new Promise(resolve => {
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_STARTED,
});
function success(response) {
const collectionClaim = response.outputs[0];
dispatch(
batchActions(
{
type: ACTIONS.COLLECTION_PUBLISH_COMPLETED,
data: { claimId: collectionClaim.claim_id },
},
// move unpublished collection to pending collection with new publish id
// recent publish won't resolve this second. handle it in checkPending
{
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [collectionClaim],
},
}
)
);
dispatch({
type: ACTIONS.COLLECTION_PENDING,
data: { localId: localId, claimId: collectionClaim.claim_id },
});
dispatch(doCheckPendingClaims());
dispatch(doFetchCollectionListMine(1, 10));
return resolve(collectionClaim);
}
function failure(error) {
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_FAILED,
data: {
error: error.message,
},
});
}
return Lbry.collection_create(params).then(success, failure);
});
};
}
export function doCollectionPublishUpdate(
options: {
bid?: string,
blocking?: true,
title?: string,
thumbnail_url?: string,
description?: string,
claim_id: string,
tags?: Array<Tag>,
languages?: Array<string>,
claims?: Array<string>,
channel_id?: string,
},
isBackgroundUpdate?: boolean
) {
return (dispatch: Dispatch, getState: GetState): Promise<any> => {
// TODO: implement one click update
const updateParams: {
bid?: string,
blocking?: true,
title?: string,
thumbnail_url?: string,
channel_id?: string,
description?: string,
claim_id: string,
tags?: Array<string>,
languages?: Array<string>,
claims?: Array<string>,
clear_claims: boolean,
replace?: boolean,
} = isBackgroundUpdate
? {
blocking: true,
claim_id: options.claim_id,
clear_claims: true,
}
: {
bid: creditsToString(options.bid),
title: options.title,
thumbnail_url: options.thumbnail_url,
description: options.description,
tags: [],
languages: options.languages || [],
locations: [],
blocking: true,
claim_id: options.claim_id,
clear_claims: true,
replace: true,
};
if (isBackgroundUpdate && updateParams.claim_id) {
const state = getState();
updateParams['claims'] = makeSelectClaimIdsForCollectionId(updateParams.claim_id)(state);
} else if (options.claims) {
updateParams['claims'] = options.claims;
}
if (options.tags) {
updateParams['tags'] = options.tags.map(tag => tag.name);
}
if (options.channel_id) {
updateParams['channel_id'] = options.channel_id;
}
return new Promise(resolve => {
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_STARTED,
});
function success(response) {
const collectionClaim = response.outputs[0];
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_COMPLETED,
data: {
collectionClaim,
},
});
dispatch({
type: ACTIONS.COLLECTION_PENDING,
data: { claimId: collectionClaim.claim_id },
});
dispatch({
type: ACTIONS.UPDATE_PENDING_CLAIMS,
data: {
claims: [collectionClaim],
},
});
dispatch(doCheckPendingClaims());
return resolve(collectionClaim);
}
function failure(error) {
dispatch({
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED,
data: {
error: error.message,
},
});
}
return Lbry.collection_update(updateParams).then(success, failure);
});
};
}
export function doCheckPublishNameAvailability(name: string) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.CHECK_PUBLISH_NAME_STARTED,
});
return Lbry.claim_list({ name: name }).then(result => {
dispatch({
type: ACTIONS.CHECK_PUBLISH_NAME_COMPLETED,
});
if (result.items.length) {
dispatch({
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED,
data: {
result,
resolve: false,
},
});
}
return !(result && result.items && result.items.length);
});
};
}
export function doClearRepostError() {
return {
type: ACTIONS.CLEAR_REPOST_ERROR,
};
}
export function doPurchaseList(page: number = 1, pageSize: number = PAGE_SIZE) {
return (dispatch: Dispatch) => {
dispatch({
type: ACTIONS.PURCHASE_LIST_STARTED,
});
const success = (result: PurchaseListResponse) => {
return dispatch({
type: ACTIONS.PURCHASE_LIST_COMPLETED,
data: {
result,
},
});
};
const failure = error => {
dispatch({
type: ACTIONS.PURCHASE_LIST_FAILED,
data: {
error: error.message,
},
});
};
Lbry.purchase_list({
page: page,
page_size: pageSize,
resolve: true,
}).then(success, failure);
};
}
export const doCheckPendingClaims = (onChannelConfirmed: Function) => (
dispatch: Dispatch,
getState: GetState
) => {
if (onChannelConfirmed) {
onChannelConfirmCallback = onChannelConfirmed;
}
clearInterval(checkPendingInterval);
const checkTxoList = () => {
const state = getState();
const pendingById = Object.assign({}, selectPendingClaimsById(state));
const pendingTxos = (Object.values(pendingById): any).map(p => p.txid);
// use collections
const pendingCollections = selectPendingCollections(state);
if (pendingTxos.length) {
Lbry.txo_list({ txid: pendingTxos })
.then(result => {
const txos = result.items;
const idsToConfirm = [];
txos.forEach(txo => {
if (txo.claim_id && txo.confirmations > 0) {
idsToConfirm.push(txo.claim_id);
delete pendingById[txo.claim_id];
}
});
return { idsToConfirm, pendingById };
})
.then(results => {
const { idsToConfirm, pendingById } = results;
if (idsToConfirm.length) {
return Lbry.claim_list({ claim_id: idsToConfirm, resolve: true }).then(results => {
const claims = results.items;
const collectionIds = claims
.filter(c => c.value_type === 'collection')
.map(c => c.claim_id);
dispatch({
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
data: {
claims: claims,
pending: pendingById,
},
});
if (collectionIds.length) {
dispatch(
doFetchItemsInCollections({
collectionIds,
})
);
}
const channelClaims = claims.filter(claim => claim.value_type === 'channel');
if (channelClaims.length && onChannelConfirmCallback) {
channelClaims.forEach(claim => onChannelConfirmCallback(claim));
}
if (Object.keys(pendingById).length === 0) {
clearInterval(checkPendingInterval);
}
});
}
});
} else {
clearInterval(checkPendingInterval);
}
};
// do something with onConfirmed (typically get blocklist for channel)
checkPendingInterval = setInterval(() => {
checkTxoList();
}, 30000);
};

View file

@ -0,0 +1,495 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { v4 as uuid } from 'uuid';
import Lbry from 'lbry';
import { doClaimSearch, doAbandonClaim } from 'redux/actions/claims';
import { makeSelectClaimForClaimId } from 'redux/selectors/claims';
import {
makeSelectCollectionForId,
// makeSelectPublishedCollectionForId, // for "save" or "copy" action
makeSelectMyPublishedCollectionForId,
makeSelectPublishedCollectionForId,
makeSelectUnpublishedCollectionForId,
makeSelectEditedCollectionForId,
} from 'redux/selectors/collections';
import * as COLS from 'constants/collections';
const getTimestamp = () => {
return Math.floor(Date.now() / 1000);
};
const FETCH_BATCH_SIZE = 50;
export const doLocalCollectionCreate = (
name: string,
collectionItems: Array<string>,
type: string,
sourceId: string
) => (dispatch: Dispatch) => {
return dispatch({
type: ACTIONS.COLLECTION_NEW,
data: {
entry: {
id: uuid(), // start with a uuid, this becomes a claimId after publish
name: name,
updatedAt: getTimestamp(),
items: collectionItems || [],
sourceId: sourceId,
type: type,
},
},
});
};
export const doCollectionDelete = (id: string, colKey: ?string = undefined) => (
dispatch: Dispatch,
getState: GetState
) => {
const state = getState();
const claim = makeSelectClaimForClaimId(id)(state);
const collectionDelete = () =>
dispatch({
type: ACTIONS.COLLECTION_DELETE,
data: {
id: id,
collectionKey: colKey,
},
});
if (claim && !colKey) {
// could support "abandon, but keep" later
const { txid, nout } = claim;
return dispatch(doAbandonClaim(txid, nout, collectionDelete));
}
return collectionDelete();
};
// Given a collection, save its collectionId to be resolved and displayed in Library
// export const doCollectionSave = (
// id: string,
// ) => (dispatch: Dispatch) => {
// return dispatch({
// type: ACTIONS.COLLECTION_SAVE,
// data: {
// id: id,
// },
// });
// };
// Given a collection and name, copy it to a local private collection with a name
// export const doCollectionCopy = (
// id: string,
// ) => (dispatch: Dispatch) => {
// return dispatch({
// type: ACTIONS.COLLECTION_COPY,
// data: {
// id: id,
// },
// });
// };
export const doFetchItemsInCollections = (
resolveItemsOptions: {
collectionIds: Array<string>,
pageSize?: number,
},
resolveStartedCallback?: () => void
) => async(dispatch: Dispatch, getState: GetState) => {
/*
1) make sure all the collection claims are loaded into claims reducer, search/resolve if necessary.
2) get the item claims for each
3) format and make sure they're in the order as in the claim
4) Build the collection objects and update collections reducer
5) Update redux claims reducer
*/
let state = getState();
const { collectionIds, pageSize } = resolveItemsOptions;
dispatch({
type: ACTIONS.COLLECTION_ITEMS_RESOLVE_STARTED,
data: { ids: collectionIds },
});
if (resolveStartedCallback) resolveStartedCallback();
const collectionIdsToSearch = collectionIds.filter(claimId => !state.claims.byId[claimId]);
if (collectionIdsToSearch.length) {
await dispatch(doClaimSearch({ claim_ids: collectionIdsToSearch, page: 1, page_size: 9999 }));
}
const stateAfterClaimSearch = getState();
async function fetchItemsForCollectionClaim(claim: CollectionClaim, pageSize?: number) {
const totalItems = claim.value.claims && claim.value.claims.length;
const claimId = claim.claim_id;
const itemOrder = claim.value.claims;
const sortResults = (items: Array<Claim>, claimList) => {
const newItems: Array<Claim> = [];
claimList.forEach(id => {
const index = items.findIndex(i => i.claim_id === id);
if (index >= 0) {
newItems.push(items[index]);
}
});
/*
This will return newItems[] of length less than total_items below
if one or more of the claims has been abandoned. That's ok for now.
*/
return newItems;
};
const mergeBatches = (
arrayOfResults: Array<{ items: Array<Claim>, total_items: number }>,
claimList: Array<string>
) => {
const mergedResults: { items: Array<Claim>, total_items: number } = {
items: [],
total_items: 0,
};
arrayOfResults.forEach(result => {
mergedResults.items = mergedResults.items.concat(result.items);
mergedResults.total_items = result.total_items;
});
mergedResults.items = sortResults(mergedResults.items, claimList);
return mergedResults;
};
try {
const batchSize = pageSize || FETCH_BATCH_SIZE;
const batches: Array<Promise<any>> = [];
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
batches[i] = Lbry.claim_search({
claim_ids: claim.value.claims,
page: i + 1,
page_size: batchSize,
no_totals: true,
});
}
const itemsInBatches = await Promise.all(batches);
const result = mergeBatches(itemsInBatches, itemOrder);
// $FlowFixMe
const itemsById: { claimId: string, items?: ?Array<GenericClaim> } = { claimId: claimId };
if (result.items) {
itemsById.items = result.items;
} else {
itemsById.items = null;
}
return itemsById;
} catch (e) {
return {
claimId: claimId,
items: null,
};
}
}
function formatForClaimActions(resultClaimsByUri) {
const formattedClaims = {};
Object.entries(resultClaimsByUri).forEach(([uri, uriResolveInfo]) => {
// Flow has terrible Object.entries support
// https://github.com/facebook/flow/issues/2221
if (uriResolveInfo) {
let result = {};
if (uriResolveInfo.value_type === 'channel') {
result.channel = uriResolveInfo;
// $FlowFixMe
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
// ALSO SKIP COLLECTIONS
} else if (uriResolveInfo.value_type === 'collection') {
result.collection = uriResolveInfo;
} else {
result.stream = uriResolveInfo;
if (uriResolveInfo.signing_channel) {
result.channel = uriResolveInfo.signing_channel;
result.claimsInChannel =
(uriResolveInfo.signing_channel.meta &&
uriResolveInfo.signing_channel.meta.claims_in_channel) ||
0;
}
}
// $FlowFixMe
formattedClaims[uri] = result;
}
});
return formattedClaims;
}
const invalidCollectionIds = [];
const promisedCollectionItemFetches = [];
collectionIds.forEach(collectionId => {
const claim = makeSelectClaimForClaimId(collectionId)(stateAfterClaimSearch);
if (!claim) {
invalidCollectionIds.push(collectionId);
} else {
promisedCollectionItemFetches.push(fetchItemsForCollectionClaim(claim, pageSize));
}
});
// $FlowFixMe
const collectionItemsById: Array<{
claimId: string,
items: ?Array<GenericClaim>,
}> = await Promise.all(promisedCollectionItemFetches);
const newCollectionObjectsById = {};
const resolvedItemsByUrl = {};
collectionItemsById.forEach(entry => {
// $FlowFixMe
const collectionItems: Array<any> = entry.items;
const collectionId = entry.claimId;
if (collectionItems) {
const claim = makeSelectClaimForClaimId(collectionId)(stateAfterClaimSearch);
const editedCollection = makeSelectEditedCollectionForId(collectionId)(stateAfterClaimSearch);
const { name, timestamp, value } = claim || {};
const { title } = value;
const valueTypes = new Set();
const streamTypes = new Set();
let newItems = [];
let isPlaylist;
if (collectionItems) {
collectionItems.forEach(collectionItem => {
newItems.push(collectionItem.permanent_url);
valueTypes.add(collectionItem.value_type);
if (collectionItem.value.stream_type) {
streamTypes.add(collectionItem.value.stream_type);
}
resolvedItemsByUrl[collectionItem.canonical_url] = collectionItem;
});
isPlaylist =
valueTypes.size === 1 &&
valueTypes.has('stream') &&
((streamTypes.size === 1 && (streamTypes.has('audio') || streamTypes.has('video'))) ||
(streamTypes.size === 2 && (streamTypes.has('audio') && streamTypes.has('video'))));
}
newCollectionObjectsById[collectionId] = {
items: newItems,
id: collectionId,
name: title || name,
itemCount: claim.value.claims.length,
type: isPlaylist ? 'playlist' : 'collection',
updatedAt: timestamp,
};
if (editedCollection && timestamp > editedCollection['updatedAt']) {
dispatch({
type: ACTIONS.COLLECTION_DELETE,
data: {
id: collectionId,
collectionKey: 'edited',
},
});
}
} else {
invalidCollectionIds.push(collectionId);
}
});
const formattedClaimsByUri = formatForClaimActions(collectionItemsById);
dispatch({
type: ACTIONS.RESOLVE_URIS_COMPLETED,
data: { resolveInfo: formattedClaimsByUri },
});
dispatch({
type: ACTIONS.COLLECTION_ITEMS_RESOLVE_COMPLETED,
data: {
resolvedCollections: newCollectionObjectsById,
failedCollectionIds: invalidCollectionIds,
},
});
};
export const doFetchItemsInCollection = (
options: { collectionId: string, pageSize?: number },
cb?: () => void
) => {
const { collectionId, pageSize } = options;
const newOptions: { collectionIds: Array<string>, pageSize?: number } = {
collectionIds: [collectionId],
};
if (pageSize) newOptions.pageSize = pageSize;
return doFetchItemsInCollections(newOptions, cb);
};
export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => async(
dispatch: Dispatch,
getState: GetState
) => {
const state = getState();
const collection: Collection = makeSelectCollectionForId(collectionId)(state);
const editedCollection: Collection = makeSelectEditedCollectionForId(collectionId)(state);
const unpublishedCollection: Collection = makeSelectUnpublishedCollectionForId(collectionId)(
state
);
const publishedCollection: Collection = makeSelectPublishedCollectionForId(collectionId)(state); // needs to be published only
const generateCollectionItemsFromSearchResult = results => {
return (
Object.values(results)
// $FlowFixMe
.reduce(
(
acc,
cur: {
stream: ?StreamClaim,
channel: ?ChannelClaim,
claimsInChannel: ?number,
collection: ?CollectionClaim,
}
) => {
let url;
if (cur.stream) {
url = cur.stream.permanent_url;
} else if (cur.channel) {
url = cur.channel.permanent_url;
} else if (cur.collection) {
url = cur.collection.permanent_url;
} else {
return acc;
}
acc.push(url);
return acc;
},
[]
)
);
};
if (!collection) {
return dispatch({
type: ACTIONS.COLLECTION_ERROR,
data: {
message: 'collection does not exist',
},
});
}
let currentItems = collection.items ? collection.items.concat() : [];
const { claims: passedClaims, order, claimIds, replace, remove, type } = params;
const collectionType = type || collection.type;
let newItems: Array<?string> = currentItems;
if (passedClaims) {
if (remove) {
const passedUrls = passedClaims.map(claim => claim.permanent_url);
// $FlowFixMe // need this?
newItems = currentItems.filter((item: string) => !passedUrls.includes(item));
} else {
passedClaims.forEach(claim => newItems.push(claim.permanent_url));
}
}
if (claimIds) {
const batches = [];
if (claimIds.length > 50) {
for (let i = 0; i < Math.ceil(claimIds.length / 50); i++) {
batches[i] = claimIds.slice(i * 50, (i + 1) * 50);
}
} else {
batches[0] = claimIds;
}
const resultArray = await Promise.all(
batches.map(batch => {
let options = { claim_ids: batch, page: 1, page_size: 50 };
return dispatch(doClaimSearch(options));
})
);
const searchResults = Object.assign({}, ...resultArray);
if (replace) {
newItems = generateCollectionItemsFromSearchResult(searchResults);
} else {
newItems = currentItems.concat(generateCollectionItemsFromSearchResult(searchResults));
}
}
if (order) {
const [movedItem] = currentItems.splice(order.from, 1);
currentItems.splice(order.to, 0, movedItem);
}
// console.log('p&e', publishedCollection.items, newItems, publishedCollection.items.join(','), newItems.join(','))
if (editedCollection) {
// delete edited if newItems are the same as publishedItems
if (publishedCollection.items.join(',') === newItems.join(',')) {
dispatch({
type: ACTIONS.COLLECTION_DELETE,
data: {
id: collectionId,
collectionKey: 'edited',
},
});
} else {
dispatch({
type: ACTIONS.COLLECTION_EDIT,
data: {
id: collectionId,
collectionKey: 'edited',
collection: {
items: newItems,
id: collectionId,
name: params.name || collection.name,
updatedAt: getTimestamp(),
type: collectionType,
},
},
});
}
} else if (publishedCollection) {
dispatch({
type: ACTIONS.COLLECTION_EDIT,
data: {
id: collectionId,
collectionKey: 'edited',
collection: {
items: newItems,
id: collectionId,
name: params.name || collection.name,
updatedAt: getTimestamp(),
type: collectionType,
},
},
});
} else if (COLS.BUILTIN_LISTS.includes(collectionId)) {
dispatch({
type: ACTIONS.COLLECTION_EDIT,
data: {
id: collectionId,
collectionKey: 'builtin',
collection: {
items: newItems,
id: collectionId,
name: params.name || collection.name,
updatedAt: getTimestamp(),
type: collectionType,
},
},
});
} else if (unpublishedCollection) {
dispatch({
type: ACTIONS.COLLECTION_EDIT,
data: {
id: collectionId,
collectionKey: 'unpublished',
collection: {
items: newItems,
id: collectionId,
name: params.name || collection.name,
updatedAt: getTimestamp(),
type: collectionType,
},
},
});
}
return true;
};

View file

@ -1,84 +0,0 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry';
import { selectClaimsByUri, selectMyChannelClaims } from 'redux/selectors/claims';
import { doToast } from 'redux/actions/notifications';
export function doCommentList(uri: string, page: number = 1, pageSize: number = 99999) {
return (dispatch: Dispatch, getState: GetState) => {
const state = getState();
const claim = selectClaimsByUri(state)[uri];
const claimId = claim ? claim.claim_id : null;
dispatch({
type: ACTIONS.COMMENT_LIST_STARTED,
});
Lbry.comment_list({
claim_id: claimId,
page,
page_size: pageSize,
})
.then((result: CommentListResponse) => {
const { items: comments } = result;
dispatch({
type: ACTIONS.COMMENT_LIST_COMPLETED,
data: {
comments,
claimId: claimId,
uri: uri,
},
});
})
.catch(error => {
console.log(error);
dispatch({
type: ACTIONS.COMMENT_LIST_FAILED,
data: error,
});
});
};
}
export function doCommentCreate(
comment: string = '',
claim_id: string = '',
channel: ?string,
parent_id?: number
) {
return (dispatch: Dispatch, getState: GetState) => {
const state = getState();
dispatch({
type: ACTIONS.COMMENT_CREATE_STARTED,
});
const myChannels = selectMyChannelClaims(state);
const namedChannelClaim =
myChannels && myChannels.find(myChannel => myChannel.name === channel);
const channel_id = namedChannelClaim ? namedChannelClaim.claim_id : null;
return Lbry.comment_create({
comment,
claim_id,
channel_id,
})
.then((result: Comment) => {
dispatch({
type: ACTIONS.COMMENT_CREATE_COMPLETED,
data: {
comment: result,
claimId: claim_id,
},
});
})
.catch(error => {
dispatch({
type: ACTIONS.COMMENT_CREATE_FAILED,
data: error,
});
dispatch(
doToast({
message: 'Oops, someone broke comments.',
isError: true,
})
);
});
};
}

View file

@ -1,7 +1,8 @@
// @flow
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
export function savePosition(claimId: string, outpoint: string, position: number) { export function savePosition(claimId: string, outpoint: string, position: number) {
return dispatch => { return (dispatch: Dispatch) => {
dispatch({ dispatch({
type: ACTIONS.SET_CONTENT_POSITION, type: ACTIONS.SET_CONTENT_POSITION,
data: { claimId, outpoint, position }, data: { claimId, outpoint, position },

View file

@ -3,8 +3,11 @@ import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry'; import Lbry from 'lbry';
import { doToast } from 'redux/actions/notifications'; import { doToast } from 'redux/actions/notifications';
import { selectBalance } from 'redux/selectors/wallet'; import { selectBalance } from 'redux/selectors/wallet';
import { makeSelectFileInfoForUri, selectDownloadingByOutpoint } from 'redux/selectors/file_info'; import {
import { makeSelectStreamingUrlForUri } from 'redux/selectors/file'; makeSelectFileInfoForUri,
selectDownloadingByOutpoint,
makeSelectStreamingUrlForUri,
} from 'redux/selectors/file_info';
import { makeSelectClaimForUri } from 'redux/selectors/claims'; import { makeSelectClaimForUri } from 'redux/selectors/claims';
type Dispatch = (action: any) => any; type Dispatch = (action: any) => any;
@ -28,7 +31,6 @@ export function doFileGet(uri: string, saveFile: boolean = true, onSuccess?: Get
.then((streamInfo: GetResponse) => { .then((streamInfo: GetResponse) => {
const timeout = const timeout =
streamInfo === null || typeof streamInfo !== 'object' || streamInfo.error === 'Timeout'; streamInfo === null || typeof streamInfo !== 'object' || streamInfo.error === 'Timeout';
if (timeout) { if (timeout) {
dispatch({ dispatch({
type: ACTIONS.FETCH_FILE_INFO_FAILED, type: ACTIONS.FETCH_FILE_INFO_FAILED,
@ -37,16 +39,17 @@ export function doFileGet(uri: string, saveFile: boolean = true, onSuccess?: Get
dispatch(doToast({ message: `File timeout for uri ${uri}`, isError: true })); dispatch(doToast({ message: `File timeout for uri ${uri}`, isError: true }));
} else { } else {
// purchase was completed successfully if (streamInfo.purchase_receipt || streamInfo.content_fee) {
dispatch({ dispatch({
type: ACTIONS.PURCHASE_URI_COMPLETED, type: ACTIONS.PURCHASE_URI_COMPLETED,
data: { uri }, data: { uri, purchaseReceipt: streamInfo.purchase_receipt || streamInfo.content_fee },
}); });
}
dispatch({ dispatch({
type: ACTIONS.FETCH_FILE_INFO_COMPLETED, type: ACTIONS.FETCH_FILE_INFO_COMPLETED,
data: { data: {
fileInfo: streamInfo, fileInfo: streamInfo,
outpoint: streamInfo.outpoint, outpoint: outpoint,
}, },
}); });
@ -55,10 +58,10 @@ export function doFileGet(uri: string, saveFile: boolean = true, onSuccess?: Get
} }
} }
}) })
.catch(() => { .catch(error => {
dispatch({ dispatch({
type: ACTIONS.PURCHASE_URI_FAILED, type: ACTIONS.PURCHASE_URI_FAILED,
data: { uri }, data: { uri, error },
}); });
dispatch({ dispatch({
@ -101,7 +104,10 @@ export function doPurchaseUri(
data: { uri, error: `Already fetching uri: ${uri}` }, data: { uri, error: `Already fetching uri: ${uri}` },
}); });
Promise.resolve(); if (onSuccess) {
onSuccess(fileInfo);
}
return; return;
} }
@ -120,9 +126,8 @@ export function doPurchaseUri(
}; };
} }
export function doDeletePurchasedUri(uri: string) { export function doClearPurchasedUriSuccess() {
return { return {
type: ACTIONS.DELETE_PURCHASED_URI, type: ACTIONS.CLEAR_PURCHASED_URI_SUCCESS,
data: { uri },
}; };
} }

View file

@ -1,7 +1,6 @@
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry'; import Lbry from 'lbry';
import { doFetchClaimListMine } from 'redux/actions/claims'; import { selectClaimsByUri } from 'redux/selectors/claims';
import { selectClaimsByUri, selectIsFetchingClaimListMine } from 'redux/selectors/claims';
import { selectIsFetchingFileList, selectUrisLoading } from 'redux/selectors/file_info'; import { selectIsFetchingFileList, selectUrisLoading } from 'redux/selectors/file_info';
export function doFetchFileInfo(uri) { export function doFetchFileInfo(uri) {
@ -58,13 +57,10 @@ export function doFileList(page = 1, pageSize = 99999) {
}; };
} }
export function doFetchFileInfosAndPublishedClaims() { export function doFetchFileInfos() {
return (dispatch, getState) => { return (dispatch, getState) => {
const state = getState(); const state = getState();
const isFetchingClaimListMine = selectIsFetchingClaimListMine(state);
const isFetchingFileInfo = selectIsFetchingFileList(state); const isFetchingFileInfo = selectIsFetchingFileList(state);
if (!isFetchingClaimListMine) dispatch(doFetchClaimListMine());
if (!isFetchingFileInfo) dispatch(doFileList()); if (!isFetchingFileInfo) dispatch(doFileList());
}; };
} }

View file

@ -1,6 +1,6 @@
// @flow // @flow
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import uuid from 'uuid/v4'; import { v4 as uuid } from 'uuid';
export function doToast(params: ToastParams) { export function doToast(params: ToastParams) {
if (!params) { if (!params) {

View file

@ -4,14 +4,15 @@ import { SPEECH_STATUS, SPEECH_PUBLISH } from 'constants/speech_urls';
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import * as THUMBNAIL_STATUSES from 'constants/thumbnail_upload_statuses'; import * as THUMBNAIL_STATUSES from 'constants/thumbnail_upload_statuses';
import Lbry from 'lbry'; import Lbry from 'lbry';
import LbryFirst from 'lbry-first';
import { batchActions } from 'util/batch-actions'; import { batchActions } from 'util/batch-actions';
import { creditsToString } from 'util/format-credits'; import { creditsToString } from 'util/format-credits';
import { doError } from 'redux/actions/notifications'; import { doError } from 'redux/actions/notifications';
import { isClaimNsfw } from 'util/claim'; import { isClaimNsfw } from 'util/claim';
import { import {
selectMyChannelClaims, selectMyChannelClaims,
selectPendingById,
selectMyClaimsWithoutChannels, selectMyClaimsWithoutChannels,
selectReflectingById,
} from 'redux/selectors/claims'; } from 'redux/selectors/claims';
import { selectPublishFormValues, selectMyClaimForUri } from 'redux/selectors/publish'; import { selectPublishFormValues, selectMyClaimForUri } from 'redux/selectors/publish';
@ -20,6 +21,7 @@ export const doResetThumbnailStatus = () => (dispatch: Dispatch) => {
type: ACTIONS.UPDATE_PUBLISH_FORM, type: ACTIONS.UPDATE_PUBLISH_FORM,
data: { data: {
thumbnailPath: '', thumbnailPath: '',
thumbnailError: undefined,
}, },
}); });
@ -67,8 +69,10 @@ export const doUploadThumbnail = (
thumbnailBlob?: File, thumbnailBlob?: File,
fsAdapter?: any, fsAdapter?: any,
fs?: any, fs?: any,
path?: any path?: any,
cb?: (string) => void
) => (dispatch: Dispatch) => { ) => (dispatch: Dispatch) => {
const downMessage = __('Thumbnail upload service may be down, try again later.');
let thumbnail, fileExt, fileName, fileType; let thumbnail, fileExt, fileName, fileType;
const makeid = () => { const makeid = () => {
@ -94,6 +98,45 @@ export const doUploadThumbnail = (
); );
}; };
dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
thumbnailError: undefined,
},
});
const doUpload = data => {
return fetch(SPEECH_PUBLISH, {
method: 'POST',
body: data,
})
.then(res => res.text())
.then(text => (text.length ? JSON.parse(text) : {}))
.then(json => {
if (!json.success) return uploadError(json.message || downMessage);
if (cb) {
cb(json.data.serveUrl);
}
return dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
thumbnail: json.data.serveUrl,
},
});
})
.catch(err => {
let message = err.message;
// This sucks but ¯\_(ツ)_/¯
if (message === 'Failed to fetch') {
message = downMessage;
}
uploadError(message);
});
};
dispatch({ dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM, type: ACTIONS.UPDATE_PUBLISH_FORM,
data: { uploadThumbnailStatus: THUMBNAIL_STATUSES.IN_PROGRESS }, data: { uploadThumbnailStatus: THUMBNAIL_STATUSES.IN_PROGRESS },
@ -110,24 +153,7 @@ export const doUploadThumbnail = (
data.append('name', name); data.append('name', name);
// $FlowFixMe // $FlowFixMe
data.append('file', { uri: 'file://' + filePath, type: fileType, name: fileName }); data.append('file', { uri: 'file://' + filePath, type: fileType, name: fileName });
return doUpload(data);
return fetch(SPEECH_PUBLISH, {
method: 'POST',
body: data,
})
.then(response => response.json())
.then(json =>
json.success
? dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
thumbnail: `${json.data.url}.${fileExt}`,
},
})
: uploadError(json.message)
)
.catch(err => uploadError(err.message));
}); });
} else { } else {
if (filePath && fs && path) { if (filePath && fs && path) {
@ -150,24 +176,7 @@ export const doUploadThumbnail = (
data.append('name', name); data.append('name', name);
// $FlowFixMe // $FlowFixMe
data.append('file', file); data.append('file', file);
return doUpload(data);
return fetch(SPEECH_PUBLISH, {
method: 'POST',
body: data,
})
.then(response => response.json())
.then(json =>
json.success
? dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
thumbnail: `${json.data.url}${fileExt}`,
},
})
: uploadError(json.message)
)
.catch(err => uploadError(err.message));
} }
}; };
@ -186,6 +195,7 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
currency: 'LBC', currency: 'LBC',
}, },
languages, languages,
release_time,
license, license,
license_url: licenseUrl, license_url: licenseUrl,
thumbnail, thumbnail,
@ -201,6 +211,8 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
description, description,
fee, fee,
languages, languages,
releaseTime: release_time,
releaseTimeEdited: undefined,
thumbnail: thumbnail ? thumbnail.url : null, thumbnail: thumbnail ? thumbnail.url : null,
title, title,
uri, uri,
@ -232,11 +244,13 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
dispatch({ type: ACTIONS.DO_PREPARE_EDIT, data: publishData }); dispatch({ type: ACTIONS.DO_PREPARE_EDIT, data: publishData });
}; };
export const doPublish = (success: Function, fail: Function) => ( export const doPublish = (success: Function, fail: Function, preview: Function) => (
dispatch: Dispatch, dispatch: Dispatch,
getState: () => {} getState: () => {}
) => { ) => {
if (!preview) {
dispatch({ type: ACTIONS.PUBLISH_START }); dispatch({ type: ACTIONS.PUBLISH_START });
}
const state = getState(); const state = getState();
const myClaimForUri = selectMyClaimForUri(state); const myClaimForUri = selectMyClaimForUri(state);
@ -252,8 +266,10 @@ export const doPublish = (success: Function, fail: Function) => (
filePath, filePath,
description, description,
language, language,
releaseTimeEdited,
license, license,
licenseUrl, licenseUrl,
useLBRYUploader,
licenseType, licenseType,
otherLicenseDescription, otherLicenseDescription,
thumbnail, thumbnail,
@ -264,8 +280,14 @@ export const doPublish = (success: Function, fail: Function) => (
uri, uri,
tags, tags,
locations, locations,
optimize,
isLivestreamPublish,
remoteFileUrl,
} = publishData; } = publishData;
// Handle scenario where we have a claim that has the same name as a channel we are publishing with.
const myClaimForUriEditing = myClaimForUri && myClaimForUri.name === name ? myClaimForUri : null;
let publishingLicense; let publishingLicense;
switch (licenseType) { switch (licenseType) {
case COPYRIGHT: case COPYRIGHT:
@ -288,7 +310,6 @@ export const doPublish = (success: Function, fail: Function) => (
description?: string, description?: string,
channel_id?: string, channel_id?: string,
file_path?: string, file_path?: string,
license_url?: string, license_url?: string,
license?: string, license?: string,
thumbnail_url?: string, thumbnail_url?: string,
@ -299,6 +320,9 @@ export const doPublish = (success: Function, fail: Function) => (
tags: Array<string>, tags: Array<string>,
locations?: Array<any>, locations?: Array<any>,
blocking: boolean, blocking: boolean,
optimize_file?: boolean,
preview?: boolean,
remote_url?: string,
} = { } = {
name, name,
title, title,
@ -309,10 +333,14 @@ export const doPublish = (success: Function, fail: Function) => (
tags: tags && tags.map(tag => tag.name), tags: tags && tags.map(tag => tag.name),
thumbnail_url: thumbnail, thumbnail_url: thumbnail,
blocking: true, blocking: true,
preview: false,
}; };
// Temporary solution to keep the same publish flow with the new tags api // Temporary solution to keep the same publish flow with the new tags api
// Eventually we will allow users to enter their own tags on publish // Eventually we will allow users to enter their own tags on publish
// `nsfw` will probably be removed // `nsfw` will probably be removed
if (remoteFileUrl) {
publishPayload.remote_url = remoteFileUrl;
}
if (publishingLicense) { if (publishingLicense) {
publishPayload.license = publishingLicense; publishPayload.license = publishingLicense;
@ -326,11 +354,17 @@ export const doPublish = (success: Function, fail: Function) => (
publishPayload.thumbnail_url = thumbnail; publishPayload.thumbnail_url = thumbnail;
} }
if (useLBRYUploader) {
publishPayload.tags.push('lbry-first');
}
// Set release time to curret date. On edits, keep original release/transaction time as release_time // Set release time to curret date. On edits, keep original release/transaction time as release_time
if (myClaimForUri && myClaimForUri.value.release_time) { if (releaseTimeEdited) {
publishPayload.release_time = releaseTimeEdited;
} else if (myClaimForUriEditing && myClaimForUriEditing.value.release_time) {
publishPayload.release_time = Number(myClaimForUri.value.release_time); publishPayload.release_time = Number(myClaimForUri.value.release_time);
} else if (myClaimForUri && myClaimForUri.timestamp) { } else if (myClaimForUriEditing && myClaimForUriEditing.timestamp) {
publishPayload.release_time = Number(myClaimForUri.timestamp); publishPayload.release_time = Number(myClaimForUriEditing.timestamp);
} else { } else {
publishPayload.release_time = Number(Math.round(Date.now() / 1000)); publishPayload.release_time = Number(Math.round(Date.now() / 1000));
} }
@ -339,8 +373,8 @@ export const doPublish = (success: Function, fail: Function) => (
publishPayload.channel_id = channelId; publishPayload.channel_id = channelId;
} }
if (myClaimForUri && myClaimForUri.value && myClaimForUri.value.locations) { if (myClaimForUriEditing && myClaimForUriEditing.value && myClaimForUriEditing.value.locations) {
publishPayload.locations = myClaimForUri.value.locations; publishPayload.locations = myClaimForUriEditing.value.locations;
} }
if (!contentIsFree && fee && (fee.currency && Number(fee.amount) > 0)) { if (!contentIsFree && fee && (fee.currency && Number(fee.amount) > 0)) {
@ -348,55 +382,113 @@ export const doPublish = (success: Function, fail: Function) => (
publishPayload.fee_amount = creditsToString(fee.amount); publishPayload.fee_amount = creditsToString(fee.amount);
} }
if (optimize) {
publishPayload.optimize_file = true;
}
// Only pass file on new uploads, not metadata only edits. // Only pass file on new uploads, not metadata only edits.
// The sdk will figure it out // The sdk will figure it out
if (filePath) publishPayload.file_path = filePath; if (filePath && !isLivestreamPublish) publishPayload.file_path = filePath;
return Lbry.publish(publishPayload).then(success, fail); if (preview) {
publishPayload.preview = true;
publishPayload.optimize_file = false;
return Lbry.publish(publishPayload).then((previewResponse: PublishResponse) => {
return preview(previewResponse);
}, fail);
}
return Lbry.publish(publishPayload).then((response: PublishResponse) => {
if (!useLBRYUploader) {
return success(response);
}
// $FlowFixMe
publishPayload.permanent_url = response.outputs[0].permanent_url;
return LbryFirst.upload(publishPayload)
.then(() => {
// Return original publish response so app treats it like a normal publish
return success(response);
})
.catch(error => {
return success(response, error);
});
}, fail);
}; };
// Calls claim_list_mine until any pending publishes are confirmed // Calls file_list until any reflecting files are done
export const doCheckPendingPublishes = (onConfirmed: Function) => ( export const doCheckReflectingFiles = () => (dispatch: Dispatch, getState: GetState) => {
dispatch: Dispatch,
getState: GetState
) => {
const state = getState(); const state = getState();
const pendingById = selectPendingById(state); const { checkingReflector } = state.claims;
let reflectorCheckInterval;
if (!Object.keys(pendingById).length) { const checkFileList = async () => {
return; const state = getState();
} const reflectingById = selectReflectingById(state);
const ids = Object.keys(reflectingById);
let publishCheckInterval; const newReflectingById = {};
const promises = [];
const checkFileList = () => { // TODO: just use file_list({claim_id: Array<claimId>})
Lbry.stream_list({ page: 1, page_size: 10 }).then(result => { if (Object.keys(reflectingById).length) {
const claims = result.items; ids.forEach(claimId => {
promises.push(Lbry.file_list({ claim_id: claimId }));
claims.forEach(claim => {
// If it's confirmed, check if it was pending previously
if (claim.confirmations > 0 && pendingById[claim.claim_id]) {
delete pendingById[claim.claim_id];
if (onConfirmed) {
onConfirmed(claim);
}
}
}); });
dispatch({ Promise.all(promises)
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED, .then(results => {
data: { results.forEach(res => {
claims, if (res.items[0]) {
}, const fileListItem = res.items[0];
}); const fileClaimId = fileListItem.claim_id;
const {
if (!Object.keys(pendingById).length) { is_fully_reflected: done,
clearInterval(publishCheckInterval); uploading_to_reflector: uploading,
} reflector_progress: progress,
}); } = fileListItem;
if (uploading) {
newReflectingById[fileClaimId] = {
fileListItem: fileListItem,
progress,
stalled: !done && !uploading,
}; };
}
publishCheckInterval = setInterval(() => { }
});
})
.then(() => {
dispatch({
type: ACTIONS.UPDATE_FILES_REFLECTING,
data: newReflectingById,
});
if (!Object.keys(newReflectingById).length) {
dispatch({
type: ACTIONS.TOGGLE_CHECKING_REFLECTING,
data: false,
});
clearInterval(reflectorCheckInterval);
}
});
} else {
dispatch({
type: ACTIONS.TOGGLE_CHECKING_REFLECTING,
data: false,
});
clearInterval(reflectorCheckInterval);
}
};
// do it once...
checkFileList(); checkFileList();
}, 30000); // then start the interval if it's not already started
if (!checkingReflector) {
dispatch({
type: ACTIONS.TOGGLE_CHECKING_REFLECTING,
data: true,
});
reflectorCheckInterval = setInterval(() => {
checkFileList();
}, 5000);
}
}; };

View file

@ -1,180 +0,0 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { buildURI } from 'lbryURI';
import { doResolveUri } from 'redux/actions/claims';
import {
makeSelectSearchUris,
selectSuggestions,
makeSelectQueryWithOptions,
selectSearchValue,
} from 'redux/selectors/search';
import { batchActions } from 'util/batch-actions';
import debounce from 'util/debounce';
import handleFetchResponse from 'util/handle-fetch';
const DEBOUNCED_SEARCH_SUGGESTION_MS = 300;
type Dispatch = (action: any) => any;
type GetState = () => { search: SearchState };
// We can't use env's because they aren't passed into node_modules
let CONNECTION_STRING = 'https://lighthouse.lbry.com/';
export const setSearchApi = (endpoint: string) => {
CONNECTION_STRING = endpoint.replace(/\/*$/, '/'); // exactly one slash at the end;
};
export const getSearchSuggestions = (value: string) => (dispatch: Dispatch, getState: GetState) => {
const query = value.trim();
// strip out any basic stuff for more accurate search results
let searchValue = query.replace(/lbry:\/\//g, '').replace(/-/g, ' ');
if (searchValue.includes('#')) {
// This should probably be more robust, but I think it's fine for now
// Remove everything after # to get rid of the claim id
searchValue = searchValue.substring(0, searchValue.indexOf('#'));
}
const suggestions = selectSuggestions(getState());
if (suggestions[searchValue]) {
return;
}
fetch(`${CONNECTION_STRING}autocomplete?s=${searchValue}`)
.then(handleFetchResponse)
.then(apiSuggestions => {
dispatch({
type: ACTIONS.UPDATE_SEARCH_SUGGESTIONS,
data: {
query: searchValue,
suggestions: apiSuggestions,
},
});
})
.catch(() => {
// If the fetch fails, do nothing
// Basic search suggestions are already populated at this point
});
};
const throttledSearchSuggestions = debounce((dispatch, query) => {
dispatch(getSearchSuggestions(query));
}, DEBOUNCED_SEARCH_SUGGESTION_MS);
export const doUpdateSearchQuery = (query: string, shouldSkipSuggestions: ?boolean) => (
dispatch: Dispatch
) => {
dispatch({
type: ACTIONS.UPDATE_SEARCH_QUERY,
data: { query },
});
// Don't fetch new suggestions if the user just added a space
if (!query.endsWith(' ') || !shouldSkipSuggestions) {
throttledSearchSuggestions(dispatch, query);
}
};
export const doSearch = (
rawQuery: string, // pass in a query if you don't want to search for what's in the search bar
size: ?number, // only pass in if you don't want to use the users setting (ex: related content)
from: ?number,
isBackgroundSearch: boolean = false
) => (dispatch: Dispatch, getState: GetState) => {
const query = rawQuery.replace(/^lbry:\/\//i, '').replace(/\//, ' ');
if (!query) {
dispatch({
type: ACTIONS.SEARCH_FAIL,
});
return;
}
const state = getState();
const queryWithOptions = makeSelectQueryWithOptions(query, size, from, isBackgroundSearch)(state);
// If we have already searched for something, we don't need to do anything
const urisForQuery = makeSelectSearchUris(queryWithOptions)(state);
if (urisForQuery && !!urisForQuery.length) {
return;
}
dispatch({
type: ACTIONS.SEARCH_START,
});
// If the user is on the file page with a pre-populated uri and they select
// the search option without typing anything, searchQuery will be empty
// We need to populate it so the input is filled on the search page
// isBackgroundSearch means the search is happening in the background, don't update the search query
if (!state.search.searchQuery && !isBackgroundSearch) {
dispatch(doUpdateSearchQuery(query));
}
fetch(`${CONNECTION_STRING}search?${queryWithOptions}`)
.then(handleFetchResponse)
.then((data: Array<{ name: string, claimId: string }>) => {
const uris = [];
const actions = [];
data.forEach(result => {
if (result) {
const { name, claimId } = result;
const urlObj: LbryUrlObj = {};
if (name.startsWith('@')) {
urlObj.channelName = name;
urlObj.channelClaimId = claimId;
} else {
urlObj.streamName = name;
urlObj.streamClaimId = claimId;
}
const url = buildURI(urlObj);
actions.push(doResolveUri(url));
uris.push(url);
}
});
actions.push({
type: ACTIONS.SEARCH_SUCCESS,
data: {
query: queryWithOptions,
uris,
},
});
dispatch(batchActions(...actions));
})
.catch(e => {
dispatch({
type: ACTIONS.SEARCH_FAIL,
});
});
};
export const doFocusSearchInput = () => (dispatch: Dispatch) =>
dispatch({
type: ACTIONS.SEARCH_FOCUS,
});
export const doBlurSearchInput = () => (dispatch: Dispatch) =>
dispatch({
type: ACTIONS.SEARCH_BLUR,
});
export const doUpdateSearchOptions = (newOptions: SearchOptions) => (
dispatch: Dispatch,
getState: GetState
) => {
const state = getState();
const searchValue = selectSearchValue(state);
dispatch({
type: ACTIONS.UPDATE_SEARCH_OPTIONS,
data: newOptions,
});
if (searchValue) {
// After updating, perform a search with the new options
dispatch(doSearch(searchValue));
}
};

View file

@ -6,21 +6,50 @@ type SharedData = {
version: '0.1', version: '0.1',
value: { value: {
subscriptions?: Array<string>, subscriptions?: Array<string>,
following?: Array<{ uri: string, notificationsDisabled: boolean }>,
tags?: Array<string>, tags?: Array<string>,
blocked?: Array<string>, blocked?: Array<string>,
coin_swap_codes?: Array<string>,
settings?: any, settings?: any,
app_welcome_version?: number,
sharing_3P?: boolean,
unpublishedCollections: CollectionGroup,
editedCollections: CollectionGroup,
builtinCollections: CollectionGroup,
savedCollections: Array<string>,
}, },
}; };
function extractUserState(rawObj: SharedData) { function extractUserState(rawObj: SharedData) {
if (rawObj && rawObj.version === '0.1' && rawObj.value) { if (rawObj && rawObj.version === '0.1' && rawObj.value) {
const { subscriptions, tags, blocked, settings} = rawObj.value; const {
subscriptions,
following,
tags,
blocked,
coin_swap_codes,
settings,
app_welcome_version,
sharing_3P,
unpublishedCollections,
editedCollections,
builtinCollections,
savedCollections,
} = rawObj.value;
return { return {
...(subscriptions ? { subscriptions } : {}), ...(subscriptions ? { subscriptions } : {}),
...(following ? { following } : {}),
...(tags ? { tags } : {}), ...(tags ? { tags } : {}),
...(blocked ? { blocked } : {}), ...(blocked ? { blocked } : {}),
...(coin_swap_codes ? { coin_swap_codes } : {}),
...(settings ? { settings } : {}), ...(settings ? { settings } : {}),
...(app_welcome_version ? { app_welcome_version } : {}),
...(sharing_3P ? { sharing_3P } : {}),
...(unpublishedCollections ? { unpublishedCollections } : {}),
...(editedCollections ? { editedCollections } : {}),
...(builtinCollections ? { builtinCollections } : {}),
...(savedCollections ? { savedCollections } : {}),
}; };
} }
@ -29,8 +58,37 @@ function extractUserState(rawObj: SharedData) {
export function doPopulateSharedUserState(sharedSettings: any) { export function doPopulateSharedUserState(sharedSettings: any) {
return (dispatch: Dispatch) => { return (dispatch: Dispatch) => {
const { subscriptions, tags, blocked, settings } = extractUserState(sharedSettings); const {
dispatch({ type: ACTIONS.USER_STATE_POPULATE, data: { subscriptions, tags, blocked, settings } }); subscriptions,
following,
tags,
blocked,
coin_swap_codes,
settings,
app_welcome_version,
sharing_3P,
unpublishedCollections,
editedCollections,
builtinCollections,
savedCollections,
} = extractUserState(sharedSettings);
dispatch({
type: ACTIONS.USER_STATE_POPULATE,
data: {
subscriptions,
following,
tags,
blocked,
coinSwapCodes: coin_swap_codes,
settings,
welcomeVersion: app_welcome_version,
allowAnalytics: sharing_3P,
unpublishedCollections,
editedCollections,
builtinCollections,
savedCollections,
},
});
}; };
} }
@ -41,6 +99,7 @@ export function doPreferenceSet(
success: Function, success: Function,
fail: Function fail: Function
) { ) {
return (dispatch: Dispatch) => {
const preference = { const preference = {
type: typeof value, type: typeof value,
version, version,
@ -54,21 +113,30 @@ export function doPreferenceSet(
Lbry.preference_set(options) Lbry.preference_set(options)
.then(() => { .then(() => {
if (success) {
success(preference); success(preference);
}
}) })
.catch(() => { .catch(err => {
dispatch({
type: ACTIONS.SYNC_FATAL_ERROR,
error: err,
});
if (fail) { if (fail) {
fail(); fail();
} }
}); });
};
} }
export function doPreferenceGet(key: string, success: Function, fail?: Function) { export function doPreferenceGet(key: string, success: Function, fail?: Function) {
return (dispatch: Dispatch) => {
const options = { const options = {
key, key,
}; };
Lbry.preference_get(options) return Lbry.preference_get(options)
.then(result => { .then(result => {
if (result) { if (result) {
const preference = result[key]; const preference = result[key];
@ -78,8 +146,14 @@ export function doPreferenceGet(key: string, success: Function, fail?: Function)
return success(null); return success(null);
}) })
.catch(err => { .catch(err => {
dispatch({
type: ACTIONS.SYNC_FATAL_ERROR,
error: err,
});
if (fail) { if (fail) {
fail(err); fail(err);
} }
}); });
};
} }

View file

@ -1,24 +0,0 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry';
export const doToggleTagFollow = (name: string) => ({
type: ACTIONS.TOGGLE_TAG_FOLLOW,
data: {
name,
},
});
export const doAddTag = (name: string) => ({
type: ACTIONS.TAG_ADD,
data: {
name,
},
});
export const doDeleteTag = (name: string) => ({
type: ACTIONS.TAG_DELETE,
data: {
name,
},
});

View file

@ -1,11 +1,21 @@
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import Lbry from 'lbry'; import Lbry from 'lbry';
import { doToast } from 'redux/actions/notifications'; import { doToast } from 'redux/actions/notifications';
import { selectBalance } from 'redux/selectors/wallet'; import {
selectBalance,
selectPendingSupportTransactions,
selectTxoPageParams,
selectPendingOtherTransactions,
selectPendingConsolidateTxid,
selectPendingMassClaimTxid,
} from 'redux/selectors/wallet';
import { creditsToString } from 'util/format-credits'; import { creditsToString } from 'util/format-credits';
import { selectMyClaimsRaw } from 'redux/selectors/claims'; import { selectMyClaimsRaw, selectClaimsById } from 'redux/selectors/claims';
import { doFetchChannelListMine, doFetchClaimListMine, doClaimSearch } from 'redux/actions/claims';
const FIFTEEN_SECONDS = 15000;
let walletBalancePromise = null; let walletBalancePromise = null;
export function doUpdateBalance() { export function doUpdateBalance() {
return (dispatch, getState) => { return (dispatch, getState) => {
const { const {
@ -13,7 +23,8 @@ export function doUpdateBalance() {
} = getState(); } = getState();
if (walletBalancePromise === null) { if (walletBalancePromise === null) {
walletBalancePromise = Lbry.wallet_balance().then(response => { walletBalancePromise = Lbry.wallet_balance()
.then(response => {
walletBalancePromise = null; walletBalancePromise = null;
const { available, reserved, reserved_subtotals, total } = response; const { available, reserved, reserved_subtotals, total } = response;
@ -33,6 +44,9 @@ export function doUpdateBalance() {
}, },
}); });
} }
})
.catch(() => {
walletBalancePromise = null;
}); });
} }
@ -43,20 +57,17 @@ export function doUpdateBalance() {
export function doBalanceSubscribe() { export function doBalanceSubscribe() {
return dispatch => { return dispatch => {
dispatch(doUpdateBalance()); dispatch(doUpdateBalance());
setInterval(() => dispatch(doUpdateBalance()), 5000); setInterval(() => dispatch(doUpdateBalance()), 10000);
}; };
} }
export function doFetchTransactions(page = 1, pageSize = 99999) { export function doFetchTransactions(page = 1, pageSize = 999999) {
return dispatch => { return dispatch => {
dispatch(doFetchSupports());
dispatch({ dispatch({
type: ACTIONS.FETCH_TRANSACTIONS_STARTED, type: ACTIONS.FETCH_TRANSACTIONS_STARTED,
}); });
Lbry.utxo_release() Lbry.transaction_list({ page, page_size: pageSize }).then(result => {
.then(() => Lbry.transaction_list({ page, page_size: pageSize }))
.then(result => {
dispatch({ dispatch({
type: ACTIONS.FETCH_TRANSACTIONS_COMPLETED, type: ACTIONS.FETCH_TRANSACTIONS_COMPLETED,
data: { data: {
@ -67,6 +78,81 @@ export function doFetchTransactions(page = 1, pageSize = 99999) {
}; };
} }
export function doFetchTxoPage() {
return (dispatch, getState) => {
const fetchId = Math.random()
.toString(36)
.substr(2, 9);
dispatch({
type: ACTIONS.FETCH_TXO_PAGE_STARTED,
data: fetchId,
});
const state = getState();
const queryParams = selectTxoPageParams(state);
Lbry.txo_list(queryParams)
.then(res => {
const items = res.items || [];
const claimsById = selectClaimsById(state);
const channelIds = items.reduce((acc, cur) => {
if (
cur.type === 'support' &&
cur.signing_channel &&
!claimsById[cur.signing_channel.channel_id]
) {
acc.push(cur.signing_channel.channel_id);
}
return acc;
}, []);
if (channelIds.length) {
const searchParams = {
page_size: 9999,
page: 1,
no_totals: true,
claim_ids: channelIds,
};
// make sure redux has these channels resolved
dispatch(doClaimSearch(searchParams));
}
return res;
})
.then(res => {
dispatch({
type: ACTIONS.FETCH_TXO_PAGE_COMPLETED,
data: {
result: res,
fetchId: fetchId,
},
});
})
.catch(e => {
dispatch({
type: ACTIONS.FETCH_TXO_PAGE_COMPLETED,
data: {
error: e.message,
fetchId: fetchId,
},
});
});
};
}
export function doUpdateTxoPageParams(params) {
return dispatch => {
dispatch({
type: ACTIONS.UPDATE_TXO_FETCH_PARAMS,
data: params,
});
dispatch(doFetchTxoPage());
};
}
export function doFetchSupports(page = 1, pageSize = 99999) { export function doFetchSupports(page = 1, pageSize = 99999) {
return dispatch => { return dispatch => {
dispatch({ dispatch({
@ -84,6 +170,74 @@ export function doFetchSupports(page = 1, pageSize = 99999) {
}; };
} }
export function doFetchUtxoCounts() {
return async dispatch => {
dispatch({
type: ACTIONS.FETCH_UTXO_COUNT_STARTED,
});
let resultSets = await Promise.all([
Lbry.txo_list({ type: 'other', is_not_spent: true, page: 1, page_size: 1 }),
Lbry.txo_list({ type: 'support', is_not_spent: true, page: 1, page_size: 1 }),
]);
const counts = {};
const paymentCount = resultSets[0]['total_items'];
const supportCount = resultSets[1]['total_items'];
counts['other'] = typeof paymentCount === 'number' ? paymentCount : 0;
counts['support'] = typeof supportCount === 'number' ? supportCount : 0;
dispatch({
type: ACTIONS.FETCH_UTXO_COUNT_COMPLETED,
data: counts,
debug: { resultSets },
});
};
}
export function doUtxoConsolidate() {
return async dispatch => {
dispatch({
type: ACTIONS.DO_UTXO_CONSOLIDATE_STARTED,
});
const results = await Lbry.txo_spend({ type: 'other' });
const result = results[0];
dispatch({
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
data: { txids: [result.txid] },
});
dispatch({
type: ACTIONS.DO_UTXO_CONSOLIDATE_COMPLETED,
data: { txid: result.txid },
});
dispatch(doCheckPendingTxs());
};
}
export function doTipClaimMass() {
return async dispatch => {
dispatch({
type: ACTIONS.TIP_CLAIM_MASS_STARTED,
});
const results = await Lbry.txo_spend({ type: 'support', is_not_my_input: true });
const result = results[0];
dispatch({
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
data: { txids: [result.txid] },
});
dispatch({
type: ACTIONS.TIP_CLAIM_MASS_COMPLETED,
data: { txid: result.txid },
});
dispatch(doCheckPendingTxs());
};
}
export function doGetNewAddress() { export function doGetNewAddress() {
return dispatch => { return dispatch => {
dispatch({ dispatch({
@ -123,8 +277,8 @@ export function doSendDraftTransaction(address, amount) {
if (balance - amount <= 0) { if (balance - amount <= 0) {
dispatch( dispatch(
doToast({ doToast({
title: 'Insufficient credits', title: __('Insufficient credits'),
message: 'Insufficient credits', message: __('Insufficient credits'),
}) })
); );
return; return;
@ -141,8 +295,8 @@ export function doSendDraftTransaction(address, amount) {
}); });
dispatch( dispatch(
doToast({ doToast({
message: `You sent ${amount} LBC`, message: __('You sent %amount% LBRY Credits', { amount: amount }),
linkText: 'History', linkText: __('History'),
linkTarget: '/wallet', linkTarget: '/wallet',
}) })
); );
@ -153,7 +307,7 @@ export function doSendDraftTransaction(address, amount) {
}); });
dispatch( dispatch(
doToast({ doToast({
message: 'Transaction failed', message: __('Transaction failed'),
isError: true, isError: true,
}) })
); );
@ -167,7 +321,7 @@ export function doSendDraftTransaction(address, amount) {
}); });
dispatch( dispatch(
doToast({ doToast({
message: 'Transaction failed', message: __('Transaction failed'),
isError: true, isError: true,
}) })
); );
@ -194,42 +348,44 @@ export function doSetDraftTransactionAddress(address) {
}; };
} }
export function doSendTip(amount, claimId, isSupport, successCallback, errorCallback) { export function doSendTip(params, isSupport, successCallback, errorCallback, shouldNotify = true) {
return (dispatch, getState) => { return (dispatch, getState) => {
const state = getState(); const state = getState();
const balance = selectBalance(state); const balance = selectBalance(state);
const myClaims = selectMyClaimsRaw(state); const myClaims = selectMyClaimsRaw(state);
const shouldSupport = const shouldSupport =
isSupport || (myClaims ? myClaims.find(claim => claim.claim_id === claimId) : false); isSupport || (myClaims ? myClaims.find(claim => claim.claim_id === params.claim_id) : false);
if (balance - amount <= 0) { if (balance - params.amount <= 0) {
dispatch( dispatch(
doToast({ doToast({
message: 'Insufficient credits', message: __('Insufficient credits'),
isError: true, isError: true,
}) })
); );
return; return;
} }
const success = () => { const success = response => {
if (shouldNotify) {
dispatch( dispatch(
doToast({ doToast({
message: shouldSupport message: shouldSupport
? __(`You deposited ${amount} LBC as a support!`) ? __('You deposited %amount% LBRY Credits as a support!', { amount: params.amount })
: __(`You sent ${amount} LBC as a tip, Mahalo!`), : __('You sent %amount% LBRY Credits as a tip, Mahalo!', { amount: params.amount }),
linkText: __('History'), linkText: __('History'),
linkTarget: __('/wallet'), linkTarget: '/wallet',
}) })
); );
}
dispatch({ dispatch({
type: ACTIONS.SUPPORT_TRANSACTION_COMPLETED, type: ACTIONS.SUPPORT_TRANSACTION_COMPLETED,
}); });
if (successCallback) { if (successCallback) {
successCallback(); successCallback(response);
} }
}; };
@ -258,10 +414,10 @@ export function doSendTip(amount, claimId, isSupport, successCallback, errorCall
}); });
Lbry.support_create({ Lbry.support_create({
claim_id: claimId, ...params,
amount: creditsToString(amount),
tip: !shouldSupport, tip: !shouldSupport,
blocking: true, blocking: true,
amount: creditsToString(params.amount),
}).then(success, error); }).then(success, error);
}; };
} }
@ -338,20 +494,71 @@ export function doWalletLock() {
}; };
} }
// Collect all tips for a claim
export function doSupportAbandonForClaim(claimId, claimType, keep, preview) {
return dispatch => {
if (preview) {
dispatch({
type: ACTIONS.ABANDON_CLAIM_SUPPORT_PREVIEW,
});
} else {
dispatch({
type: ACTIONS.ABANDON_CLAIM_SUPPORT_STARTED,
});
}
const params = { claim_id: claimId };
if (preview) params['preview'] = true;
if (keep) params['keep'] = keep;
return Lbry.support_abandon(params)
.then(res => {
if (!preview) {
dispatch({
type: ACTIONS.ABANDON_CLAIM_SUPPORT_COMPLETED,
data: { claimId, txid: res.txid, effective: res.outputs[0].amount, type: claimType },
});
dispatch(doCheckPendingTxs());
}
return res;
})
.catch(e => {
dispatch({
type: ACTIONS.ABANDON_CLAIM_SUPPORT_FAILED,
data: e.message,
});
});
};
}
export function doWalletReconnect() { export function doWalletReconnect() {
return dispatch => { return dispatch => {
dispatch({ dispatch({
type: ACTIONS.WALLET_RESTART, type: ACTIONS.WALLET_RESTART,
}); });
let failed = false;
// this basically returns null when it's done. :( // this basically returns null when it's done. :(
// might be good to dispatch ACTIONS.WALLET_RESTARTED // might be good to dispatch ACTIONS.WALLET_RESTARTED
Lbry.wallet_reconnect().then(() => const walletTimeout = setTimeout(() => {
failed = true;
dispatch({ dispatch({
type: ACTIONS.WALLET_RESTART_COMPLETED, type: ACTIONS.WALLET_RESTART_COMPLETED,
});
dispatch(
doToast({
message: __(
'Your servers were not available. Check your url and port, or switch back to defaults.'
),
isError: true,
}) })
); );
}, FIFTEEN_SECONDS);
Lbry.wallet_reconnect().then(() => {
clearTimeout(walletTimeout);
if (!failed) dispatch({ type: ACTIONS.WALLET_RESTART_COMPLETED });
});
}; };
} }
export function doWalletDecrypt() { export function doWalletDecrypt() {
return dispatch => { return dispatch => {
dispatch({ dispatch({
@ -409,3 +616,95 @@ export function doUpdateBlockHeight() {
} }
}); });
} }
// Calls transaction_show on txes until any pending txes are confirmed
export const doCheckPendingTxs = () => (dispatch, getState) => {
const state = getState();
const pendingTxsById = selectPendingSupportTransactions(state); // {}
const pendingOtherTxes = selectPendingOtherTransactions(state);
if (!Object.keys(pendingTxsById).length && !pendingOtherTxes.length) {
return;
}
let txCheckInterval;
const checkTxList = () => {
const state = getState();
const pendingSupportTxs = selectPendingSupportTransactions(state); // {}
const pendingConsolidateTxes = selectPendingOtherTransactions(state);
const pendingConsTxid = selectPendingConsolidateTxid(state);
const pendingMassCLaimTxid = selectPendingMassClaimTxid(state);
const promises = [];
const newPendingTxes = {};
const noLongerPendingConsolidate = [];
const types = new Set([]);
// { claimId: {txid: 123, amount 12.3}, }
const entries = Object.entries(pendingSupportTxs);
entries.forEach(([claim, data]) => {
promises.push(Lbry.transaction_show({ txid: data.txid }));
types.add(data.type);
});
if (pendingConsolidateTxes.length) {
pendingConsolidateTxes.forEach(txid => promises.push(Lbry.transaction_show({ txid })));
}
Promise.all(promises).then(txShows => {
let changed = false;
txShows.forEach(result => {
if (pendingConsolidateTxes.includes(result.txid)) {
if (result.height > 0) {
noLongerPendingConsolidate.push(result.txid);
}
} else {
if (result.height <= 0) {
const match = entries.find(entry => entry[1].txid === result.txid);
newPendingTxes[match[0]] = match[1];
} else {
changed = true;
}
}
});
if (changed) {
dispatch({
type: ACTIONS.PENDING_SUPPORTS_UPDATED,
data: newPendingTxes,
});
if (types.has('channel')) {
dispatch(doFetchChannelListMine());
}
if (types.has('stream')) {
dispatch(doFetchClaimListMine());
}
}
if (noLongerPendingConsolidate.length) {
if (noLongerPendingConsolidate.includes(pendingConsTxid)) {
dispatch(
doToast({
message: __('Your wallet is finished consolidating'),
})
);
}
if (noLongerPendingConsolidate.includes(pendingMassCLaimTxid)) {
dispatch(
doToast({
message: __('Your tips have been collected'),
})
);
}
dispatch({
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
data: { txids: noLongerPendingConsolidate, remove: true },
});
}
if (!Object.keys(pendingTxsById).length && !pendingOtherTxes.length) {
clearInterval(txCheckInterval);
}
});
};
txCheckInterval = setInterval(() => {
checkTxList();
}, 30000);
};

View file

@ -2,27 +2,39 @@
import isEqual from 'util/deep-equal'; import isEqual from 'util/deep-equal';
import { doPreferenceSet } from 'redux/actions/sync'; import { doPreferenceSet } from 'redux/actions/sync';
const SHARED_PREFERENCE_KEY = 'shared'; const RUN_PREFERENCES_DELAY_MS = 2000;
const SHARED_PREFERENCE_VERSION = '0.1'; const SHARED_PREFERENCE_VERSION = '0.1';
let oldShared = {}; let oldShared = {};
let timeout;
export const buildSharedStateMiddleware = ( export const buildSharedStateMiddleware = (
actions: Array<string>, actions: Array<string>,
sharedStateFilters: {}, sharedStateFilters: {},
sharedStateCb?: any => void sharedStateCb?: any => void
) => ({ getState, dispatch }: { getState: () => {}, dispatch: any => void }) => ( ) => ({
next: ({}) => void getState,
) => (action: { type: string, data: any }) => { dispatch,
}: {
getState: () => { user: any, settings: any },
dispatch: any => void,
}) => (next: ({}) => void) => (action: { type: string, data: any }) => {
const currentState = getState(); const currentState = getState();
// We don't care if sync is disabled here, we always want to backup preferences to the wallet // We don't care if sync is disabled here, we always want to backup preferences to the wallet
if (!actions.includes(action.type)) { if (!actions.includes(action.type) || typeof action === 'function') {
return next(action); return next(action);
} }
clearTimeout(timeout);
const actionResult = next(action); const actionResult = next(action);
// Call `getState` after calling `next` to ensure the state has updated in response to the action // Call `getState` after calling `next` to ensure the state has updated in response to the action
const nextState = getState(); function runPreferences() {
const nextState: { user: any, settings: any } = getState();
const syncEnabled =
nextState.settings &&
nextState.settings.clientSettings &&
nextState.settings.clientSettings.enable_sync;
const hasVerifiedEmail =
nextState.user && nextState.user.user && nextState.user.user.has_verified_email;
const preferenceKey = syncEnabled && hasVerifiedEmail ? 'shared' : 'local';
const shared = {}; const shared = {};
Object.keys(sharedStateFilters).forEach(key => { Object.keys(sharedStateFilters).forEach(key => {
@ -39,13 +51,15 @@ export const buildSharedStateMiddleware = (
if (!isEqual(oldShared, shared)) { if (!isEqual(oldShared, shared)) {
// only update if the preference changed from last call in the same session // only update if the preference changed from last call in the same session
oldShared = shared; oldShared = shared;
doPreferenceSet(SHARED_PREFERENCE_KEY, shared, SHARED_PREFERENCE_VERSION); dispatch(doPreferenceSet(preferenceKey, shared, SHARED_PREFERENCE_VERSION));
} }
if (sharedStateCb) { if (sharedStateCb) {
// Pass dispatch to the callback to consumers can dispatch actions in response to preference set // Pass dispatch to the callback to consumers can dispatch actions in response to preference set
sharedStateCb({ dispatch, getState }); sharedStateCb({ dispatch, getState });
} }
clearTimeout(timeout);
return actionResult; return actionResult;
}
timeout = setTimeout(runPreferences, RUN_PREFERENCES_DELAY_MS);
}; };

View file

@ -33,8 +33,7 @@ export const blockedReducer = handleActions(
const { blocked } = action.data; const { blocked } = action.data;
return { return {
...state, ...state,
blockedChannels: blockedChannels: blocked && blocked.length ? blocked : state.blockedChannels,
blocked && blocked.length ? blocked : state.blockedChannels,
}; };
}, },
}, },

View file

@ -9,25 +9,35 @@
// - Sean // - Sean
import * as ACTIONS from 'constants/action_types'; import * as ACTIONS from 'constants/action_types';
import { buildURI, parseURI } from 'lbryURI'; import mergeClaim from 'util/merge-claim';
import { concatClaims } from 'util/claim';
type State = { type State = {
createChannelError: ?string, createChannelError: ?string,
createCollectionError: ?string,
channelClaimCounts: { [string]: number }, channelClaimCounts: { [string]: number },
claimsByUri: { [string]: string }, claimsByUri: { [string]: string },
byId: { [string]: Claim }, byId: { [string]: Claim },
pendingById: { [string]: Claim }, // keep pending claims
resolvingUris: Array<string>, resolvingUris: Array<string>,
pendingById: { [string]: Claim }, reflectingById: { [string]: ReflectingUpdate },
myClaims: ?Array<Claim>, myClaims: ?Array<string>,
myChannelClaims: ?Set<string>, myChannelClaims: ?Array<string>,
myCollectionClaims: ?Array<string>,
abandoningById: { [string]: boolean }, abandoningById: { [string]: boolean },
fetchingChannelClaims: { [string]: number }, fetchingChannelClaims: { [string]: number },
fetchingMyChannels: boolean, fetchingMyChannels: boolean,
fetchingMyCollections: boolean,
fetchingClaimSearchByQuery: { [string]: boolean }, fetchingClaimSearchByQuery: { [string]: boolean },
purchaseUriSuccess: boolean,
myPurchases: ?Array<string>,
myPurchasesPageNumber: ?number,
myPurchasesPageTotalResults: ?number,
fetchingMyPurchases: boolean,
fetchingMyPurchasesError: ?string,
claimSearchByQuery: { [string]: Array<string> }, claimSearchByQuery: { [string]: Array<string> },
claimSearchByQueryLastPageReached: { [string]: Array<boolean> }, claimSearchByQueryLastPageReached: { [string]: Array<boolean> },
creatingChannel: boolean, creatingChannel: boolean,
creatingCollection: boolean,
paginatedClaimsByChannel: { paginatedClaimsByChannel: {
[string]: { [string]: {
all: Array<string>, all: Array<string>,
@ -36,9 +46,21 @@ type State = {
[number]: Array<string>, [number]: Array<string>,
}, },
}, },
updateChannelError: string, updateChannelError: ?string,
updateCollectionError: ?string,
updatingChannel: boolean, updatingChannel: boolean,
updatingCollection: boolean,
pendingChannelImport: string | boolean, pendingChannelImport: string | boolean,
repostLoading: boolean,
repostError: ?string,
fetchingClaimListMinePageError: ?string,
myClaimsPageResults: Array<string>,
myClaimsPageNumber: ?number,
myClaimsPageTotalResults: ?number,
isFetchingClaimListMine: boolean,
isCheckingNameForPublish: boolean,
checkingPending: boolean,
checkingReflecting: boolean,
}; };
const reducers = {}; const reducers = {};
@ -49,49 +71,67 @@ const defaultState = {
channelClaimCounts: {}, channelClaimCounts: {},
fetchingChannelClaims: {}, fetchingChannelClaims: {},
resolvingUris: [], resolvingUris: [],
// This should not be a Set
// Storing sets in reducers can cause issues
myChannelClaims: undefined, myChannelClaims: undefined,
myCollectionClaims: [],
myClaims: undefined, myClaims: undefined,
myPurchases: undefined,
myPurchasesPageNumber: undefined,
myPurchasesPageTotalResults: undefined,
purchaseUriSuccess: false,
fetchingMyPurchases: false,
fetchingMyPurchasesError: undefined,
fetchingMyChannels: false, fetchingMyChannels: false,
fetchingMyCollections: false,
abandoningById: {}, abandoningById: {},
pendingById: {}, pendingById: {},
reflectingById: {},
claimSearchError: false, claimSearchError: false,
claimSearchByQuery: {}, claimSearchByQuery: {},
claimSearchByQueryLastPageReached: {}, claimSearchByQueryLastPageReached: {},
fetchingClaimSearchByQuery: {}, fetchingClaimSearchByQuery: {},
updateChannelError: '', updateChannelError: '',
updateCollectionError: '',
updatingChannel: false, updatingChannel: false,
creatingChannel: false, creatingChannel: false,
createChannelError: undefined, createChannelError: undefined,
updatingCollection: false,
creatingCollection: false,
createCollectionError: undefined,
pendingChannelImport: false, pendingChannelImport: false,
repostLoading: false,
repostError: undefined,
fetchingClaimListMinePageError: undefined,
myClaimsPageResults: [],
myClaimsPageNumber: undefined,
myClaimsPageTotalResults: undefined,
isFetchingClaimListMine: false,
isFetchingMyPurchases: false,
isCheckingNameForPublish: false,
checkingPending: false,
checkingReflecting: false,
}; };
function handleClaimAction(state: State, action: any): State { function handleClaimAction(state: State, action: any): State {
const { const { resolveInfo }: ClaimActionResolveInfo = action.data;
resolveInfo,
}: {
[string]: {
stream: ?StreamClaim,
channel: ?ChannelClaim,
claimsInChannel: ?number,
},
} = action.data;
const byUri = Object.assign({}, state.claimsByUri); const byUri = Object.assign({}, state.claimsByUri);
const byId = Object.assign({}, state.byId); const byId = Object.assign({}, state.byId);
const channelClaimCounts = Object.assign({}, state.channelClaimCounts); const channelClaimCounts = Object.assign({}, state.channelClaimCounts);
const pendingById = state.pendingById;
let newResolvingUrls = new Set(state.resolvingUris); let newResolvingUrls = new Set(state.resolvingUris);
let myClaimIds = new Set(state.myClaims);
Object.entries(resolveInfo).forEach(([url: string, resolveResponse: ResolveResponse]) => { Object.entries(resolveInfo).forEach(([url: string, resolveResponse: ResolveResponse]) => {
// $FlowFixMe // $FlowFixMe
const { claimsInChannel, stream, channel } = resolveResponse; const { claimsInChannel, stream, channel: channelFromResolve, collection } = resolveResponse;
if (claimsInChannel) { const channel = channelFromResolve || (stream && stream.signing_channel);
channelClaimCounts[url] = claimsInChannel;
channelClaimCounts[channel.canonical_url] = claimsInChannel;
}
if (stream) { if (stream) {
if (pendingById[stream.claim_id]) {
byId[stream.claim_id] = mergeClaim(stream, byId[stream.claim_id]);
} else {
byId[stream.claim_id] = stream; byId[stream.claim_id] = stream;
}
byUri[url] = stream.claim_id; byUri[url] = stream.claim_id;
// If url isn't a canonical_url, make sure that is added too // If url isn't a canonical_url, make sure that is added too
@ -101,23 +141,53 @@ function handleClaimAction(state: State, action: any): State {
byUri[stream.permanent_url] = stream.claim_id; byUri[stream.permanent_url] = stream.claim_id;
newResolvingUrls.delete(stream.canonical_url); newResolvingUrls.delete(stream.canonical_url);
newResolvingUrls.delete(stream.permanent_url); newResolvingUrls.delete(stream.permanent_url);
if (stream.is_my_output) {
myClaimIds.add(stream.claim_id);
}
} }
if (channel) { if (channel && channel.claim_id) {
if (!stream) { if (!stream) {
byUri[url] = channel.claim_id; byUri[url] = channel.claim_id;
} }
if (claimsInChannel) {
channelClaimCounts[url] = claimsInChannel;
channelClaimCounts[channel.canonical_url] = claimsInChannel;
}
if (pendingById[channel.claim_id]) {
byId[channel.claim_id] = mergeClaim(channel, byId[channel.claim_id]);
} else {
byId[channel.claim_id] = channel; byId[channel.claim_id] = channel;
// Also add the permanent_url here until lighthouse returns canonical_url for search results }
byUri[channel.permanent_url] = channel.claim_id; byUri[channel.permanent_url] = channel.claim_id;
byUri[channel.canonical_url] = channel.claim_id; byUri[channel.canonical_url] = channel.claim_id;
newResolvingUrls.delete(channel.canonical_url); newResolvingUrls.delete(channel.canonical_url);
newResolvingUrls.delete(channel.permanent_url); newResolvingUrls.delete(channel.permanent_url);
} }
if (collection) {
if (pendingById[collection.claim_id]) {
byId[collection.claim_id] = mergeClaim(collection, byId[collection.claim_id]);
} else {
byId[collection.claim_id] = collection;
}
byUri[url] = collection.claim_id;
byUri[collection.canonical_url] = collection.claim_id;
byUri[collection.permanent_url] = collection.claim_id;
newResolvingUrls.delete(collection.canonical_url);
newResolvingUrls.delete(collection.permanent_url);
if (collection.is_my_output) {
myClaimIds.add(collection.claim_id);
}
}
newResolvingUrls.delete(url); newResolvingUrls.delete(url);
if (!stream && !channel) { if (!stream && !channel && !collection && !pendingById[byUri[url]]) {
byUri[url] = null; byUri[url] = null;
} }
}); });
@ -127,6 +197,7 @@ function handleClaimAction(state: State, action: any): State {
claimsByUri: byUri, claimsByUri: byUri,
channelClaimCounts, channelClaimCounts,
resolvingUris: Array.from(newResolvingUrls), resolvingUris: Array.from(newResolvingUrls),
myClaims: Array.from(myClaimIds),
}); });
} }
@ -159,42 +230,46 @@ reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED] = (state: State): State =>
}); });
reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any): State => { reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any): State => {
const { claims }: { claims: Array<Claim> } = action.data; const { result }: { result: ClaimListResponse } = action.data;
const claims = result.items;
const page = result.page;
const totalItems = result.total_items;
const byId = Object.assign({}, state.byId); const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri); const byUri = Object.assign({}, state.claimsByUri);
const pendingById: { [string]: Claim } = Object.assign({}, state.pendingById); const pendingById = Object.assign({}, state.pendingById);
const myClaims = state.myClaims ? state.myClaims.slice() : []; let myClaimIds = new Set(state.myClaims);
let urlsForCurrentPage = [];
claims.forEach((claim: Claim) => { claims.forEach((claim: Claim) => {
const uri = buildURI({ streamName: claim.name, streamClaimId: claim.claim_id }); const { permanent_url: permanentUri, claim_id: claimId, canonical_url: canonicalUri } = claim;
if (claim.type && claim.type.match(/claim|update/)) { if (claim.type && claim.type.match(/claim|update/)) {
urlsForCurrentPage.push(permanentUri);
if (claim.confirmations < 1) { if (claim.confirmations < 1) {
pendingById[claim.claim_id] = claim; pendingById[claimId] = claim;
delete byId[claim.claim_id]; if (byId[claimId]) {
delete byUri[claim.claim_id]; byId[claimId] = mergeClaim(claim, byId[claimId]);
} else { } else {
byId[claim.claim_id] = claim; byId[claimId] = claim;
byUri[uri] = claim.claim_id;
} }
} else {
byId[claimId] = claim;
}
byUri[permanentUri] = claimId;
byUri[canonicalUri] = claimId;
myClaimIds.add(claimId);
} }
});
// Remove old pending publishes
Object.values(pendingById)
// $FlowFixMe
.filter(pendingClaim => byId[pendingClaim.claim_id])
.forEach(pendingClaim => {
// $FlowFixMe
delete pendingById[pendingClaim.claim_id];
}); });
return Object.assign({}, state, { return Object.assign({}, state, {
isFetchingClaimListMine: false, isFetchingClaimListMine: false,
myClaims: concatClaims(myClaims, claims), myClaims: Array.from(myClaimIds),
byId, byId,
claimsByUri: byUri,
pendingById, pendingById,
claimsByUri: byUri,
myClaimsPageResults: urlsForCurrentPage,
myClaimsPageNumber: page,
myClaimsPageTotalResults: totalItems,
}); });
}; };
@ -203,35 +278,126 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_STARTED] = (state: State): State =>
reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): State => { reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): State => {
const { claims }: { claims: Array<ChannelClaim> } = action.data; const { claims }: { claims: Array<ChannelClaim> } = action.data;
const myClaims = state.myClaims || []; let myClaimIds = new Set(state.myClaims);
const pendingById = Object.assign(state.pendingById); const pendingById = Object.assign({}, state.pendingById);
let myChannelClaims; let myChannelClaims;
let byId = Object.assign({}, state.byId); const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
const channelClaimCounts = Object.assign({}, state.channelClaimCounts);
if (!claims.length) { if (!claims.length) {
// $FlowFixMe // $FlowFixMe
myChannelClaims = null; myChannelClaims = null;
} else { } else {
myChannelClaims = new Set(state.myChannelClaims); myChannelClaims = new Set(state.myChannelClaims);
claims.forEach(claim => { claims.forEach(claim => {
// $FlowFixMe const { meta } = claim;
myChannelClaims.add(claim.claim_id); const { claims_in_channel: claimsInChannel } = claim.meta;
byId[claim.claim_id] = claim; const {
canonical_url: canonicalUrl,
permanent_url: permanentUrl,
claim_id: claimId,
confirmations,
} = claim;
if (pendingById[claim.claim_id] && claim.confirmations > 0) { byUri[canonicalUrl] = claimId;
delete pendingById[claim.claim_id]; byUri[permanentUrl] = claimId;
channelClaimCounts[canonicalUrl] = claimsInChannel;
channelClaimCounts[permanentUrl] = claimsInChannel;
// $FlowFixMe
myChannelClaims.add(claimId);
if (confirmations < 1) {
pendingById[claimId] = claim;
if (byId[claimId]) {
byId[claimId] = mergeClaim(claim, byId[claimId]);
} else {
byId[claimId] = claim;
} }
} else {
byId[claimId] = claim;
}
myClaimIds.add(claimId);
}); });
} }
return Object.assign({}, state, { return Object.assign({}, state, {
byId, byId,
pendingById,
claimsByUri: byUri,
channelClaimCounts,
fetchingMyChannels: false, fetchingMyChannels: false,
myChannelClaims, myChannelClaims: myChannelClaims ? Array.from(myChannelClaims) : null,
myClaims: concatClaims(myClaims, claims), myClaims: myClaimIds ? Array.from(myClaimIds) : null,
}); });
}; };
reducers[ACTIONS.FETCH_CHANNEL_LIST_FAILED] = (state: State, action: any): State => {
return Object.assign({}, state, {
fetchingMyChannels: false,
});
};
reducers[ACTIONS.FETCH_COLLECTION_LIST_STARTED] = (state: State): State => ({
...state,
fetchingMyCollections: true,
});
reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any): State => {
const { claims }: { claims: Array<CollectionClaim> } = action.data;
const myClaims = state.myClaims || [];
let myClaimIds = new Set(myClaims);
const pendingById = Object.assign({}, state.pendingById);
let myCollectionClaimsSet = new Set([]);
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
if (claims.length) {
myCollectionClaimsSet = new Set(state.myCollectionClaims);
claims.forEach(claim => {
const { meta } = claim;
const {
canonical_url: canonicalUrl,
permanent_url: permanentUrl,
claim_id: claimId,
confirmations,
} = claim;
byUri[canonicalUrl] = claimId;
byUri[permanentUrl] = claimId;
// $FlowFixMe
myCollectionClaimsSet.add(claimId);
// we don't want to overwrite a pending result with a resolve
if (confirmations < 1) {
pendingById[claimId] = claim;
if (byId[claimId]) {
byId[claimId] = mergeClaim(claim, byId[claimId]);
} else {
byId[claimId] = claim;
}
} else {
byId[claimId] = claim;
}
myClaimIds.add(claimId);
});
}
return {
...state,
byId,
pendingById,
claimsByUri: byUri,
fetchingMyCollections: false,
myCollectionClaims: Array.from(myCollectionClaimsSet),
myClaims: myClaimIds ? Array.from(myClaimIds) : null,
};
};
reducers[ACTIONS.FETCH_COLLECTION_LIST_FAILED] = (state: State): State => {
return { ...state, fetchingMyCollections: false };
};
reducers[ACTIONS.FETCH_CHANNEL_CLAIMS_STARTED] = (state: State, action: any): State => { reducers[ACTIONS.FETCH_CHANNEL_CLAIMS_STARTED] = (state: State, action: any): State => {
const { uri, page } = action.data; const { uri, page } = action.data;
const fetchingChannelClaims = Object.assign({}, state.fetchingChannelClaims); const fetchingChannelClaims = Object.assign({}, state.fetchingChannelClaims);
@ -265,7 +431,8 @@ reducers[ACTIONS.FETCH_CHANNEL_CLAIMS_COMPLETED] = (state: State, action: any):
const paginatedClaimsByChannel = Object.assign({}, state.paginatedClaimsByChannel); const paginatedClaimsByChannel = Object.assign({}, state.paginatedClaimsByChannel);
// check if count has changed - that means cached pagination will be wrong, so clear it // check if count has changed - that means cached pagination will be wrong, so clear it
const previousCount = paginatedClaimsByChannel[uri] && paginatedClaimsByChannel[uri]['itemCount']; const previousCount = paginatedClaimsByChannel[uri] && paginatedClaimsByChannel[uri]['itemCount'];
const byChannel = (claimsInChannel === previousCount) ? Object.assign({}, paginatedClaimsByChannel[uri]) : {}; const byChannel =
claimsInChannel === previousCount ? Object.assign({}, paginatedClaimsByChannel[uri]) : {};
const allClaimIds = new Set(byChannel.all); const allClaimIds = new Set(byChannel.all);
const currentPageClaimIds = []; const currentPageClaimIds = [];
const byId = Object.assign({}, state.byId); const byId = Object.assign({}, state.byId);
@ -309,27 +476,104 @@ reducers[ACTIONS.ABANDON_CLAIM_STARTED] = (state: State, action: any): State =>
}); });
}; };
reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State => {
const { claims: pendingClaims }: { claims: Array<Claim> } = action.data;
const byId = Object.assign({}, state.byId);
const pendingById = Object.assign({}, state.pendingById);
const byUri = Object.assign({}, state.claimsByUri);
let myClaimIds = new Set(state.myClaims);
const myChannelClaims = new Set(state.myChannelClaims);
// $FlowFixMe
pendingClaims.forEach((claim: Claim) => {
let newClaim;
const { permanent_url: uri, claim_id: claimId, type, value_type: valueType } = claim;
pendingById[claimId] = claim; // make sure we don't need to merge?
const oldClaim = byId[claimId];
if (oldClaim && oldClaim.canonical_url) {
newClaim = mergeClaim(oldClaim, claim);
} else {
newClaim = claim;
}
if (valueType === 'channel') {
myChannelClaims.add(claimId);
}
if (type && type.match(/claim|update/)) {
byId[claimId] = newClaim;
byUri[uri] = claimId;
}
myClaimIds.add(claimId);
});
return Object.assign({}, state, {
myClaims: Array.from(myClaimIds),
byId,
pendingById,
myChannelClaims: Array.from(myChannelClaims),
claimsByUri: byUri,
});
};
reducers[ACTIONS.UPDATE_CONFIRMED_CLAIMS] = (state: State, action: any): State => {
const {
claims: confirmedClaims,
pending: pendingClaims,
}: { claims: Array<Claim>, pending: { [string]: Claim } } = action.data;
const byId = Object.assign({}, state.byId);
const byUri = Object.assign({}, state.claimsByUri);
//
confirmedClaims.forEach((claim: GenericClaim) => {
const { claim_id: claimId, type } = claim;
let newClaim = claim;
const oldClaim = byId[claimId];
if (oldClaim && oldClaim.canonical_url) {
newClaim = mergeClaim(oldClaim, claim);
}
if (type && type.match(/claim|update|channel/)) {
byId[claimId] = newClaim;
}
});
return Object.assign({}, state, {
pendingById: pendingClaims,
byId,
claimsByUri: byUri,
});
};
reducers[ACTIONS.ABANDON_CLAIM_SUCCEEDED] = (state: State, action: any): State => { reducers[ACTIONS.ABANDON_CLAIM_SUCCEEDED] = (state: State, action: any): State => {
const { claimId }: { claimId: string } = action.data; const { claimId }: { claimId: string } = action.data;
const byId = Object.assign({}, state.byId); const byId = Object.assign({}, state.byId);
const newMyClaims = state.myClaims ? state.myClaims.slice() : []; const newMyClaims = state.myClaims ? state.myClaims.slice() : [];
const newMyChannelClaims = state.myChannelClaims ? state.myChannelClaims.slice() : [];
const claimsByUri = Object.assign({}, state.claimsByUri); const claimsByUri = Object.assign({}, state.claimsByUri);
const newMyCollectionClaims = state.myCollectionClaims ? state.myCollectionClaims.slice() : [];
Object.keys(claimsByUri).forEach(uri => { Object.keys(claimsByUri).forEach(uri => {
if (claimsByUri[uri] === claimId) { if (claimsByUri[uri] === claimId) {
delete claimsByUri[uri]; delete claimsByUri[uri];
} }
}); });
const myClaims = newMyClaims.filter(i => i.claim_id && i.claim_id !== claimId); const myClaims = newMyClaims.filter(i => i !== claimId);
const myChannelClaims = newMyChannelClaims.filter(i => i !== claimId);
const myCollectionClaims = newMyCollectionClaims.filter(i => i !== claimId);
delete byId[claimId]; delete byId[claimId];
return Object.assign({}, state, { return Object.assign({}, state, {
myClaims, myClaims,
myChannelClaims,
myCollectionClaims,
byId, byId,
claimsByUri, claimsByUri,
}); });
}; };
reducers[ACTIONS.CLEAR_CHANNEL_ERRORS] = (state: State): State => ({
...state,
createChannelError: null,
updateChannelError: null,
});
reducers[ACTIONS.CREATE_CHANNEL_STARTED] = (state: State): State => ({ reducers[ACTIONS.CREATE_CHANNEL_STARTED] = (state: State): State => ({
...state, ...state,
creatingChannel: true, creatingChannel: true,
@ -337,19 +581,7 @@ reducers[ACTIONS.CREATE_CHANNEL_STARTED] = (state: State): State => ({
}); });
reducers[ACTIONS.CREATE_CHANNEL_COMPLETED] = (state: State, action: any): State => { reducers[ACTIONS.CREATE_CHANNEL_COMPLETED] = (state: State, action: any): State => {
const channelClaim: ChannelClaim = action.data.channelClaim;
const byId = Object.assign({}, state.byId);
const pendingById = Object.assign({}, state.pendingById);
const myChannelClaims = new Set(state.myChannelClaims);
byId[channelClaim.claim_id] = channelClaim;
pendingById[channelClaim.claim_id] = channelClaim;
myChannelClaims.add(channelClaim.claim_id);
return Object.assign({}, state, { return Object.assign({}, state, {
byId,
pendingById,
myChannelClaims,
creatingChannel: false, creatingChannel: false,
}); });
}; };
@ -369,13 +601,7 @@ reducers[ACTIONS.UPDATE_CHANNEL_STARTED] = (state: State, action: any): State =>
}; };
reducers[ACTIONS.UPDATE_CHANNEL_COMPLETED] = (state: State, action: any): State => { reducers[ACTIONS.UPDATE_CHANNEL_COMPLETED] = (state: State, action: any): State => {
const channelClaim: ChannelClaim = action.data.channelClaim;
const byId = Object.assign({}, state.byId);
byId[channelClaim.claim_id] = channelClaim;
return Object.assign({}, state, { return Object.assign({}, state, {
byId,
updateChannelError: '', updateChannelError: '',
updatingChannel: false, updatingChannel: false,
}); });
@ -388,6 +614,61 @@ reducers[ACTIONS.UPDATE_CHANNEL_FAILED] = (state: State, action: any): State =>
}); });
}; };
reducers[ACTIONS.CLEAR_COLLECTION_ERRORS] = (state: State): State => ({
...state,
createCollectionError: null,
updateCollectionError: null,
});
reducers[ACTIONS.COLLECTION_PUBLISH_STARTED] = (state: State): State => ({
...state,
creatingCollection: true,
createCollectionError: null,
});
reducers[ACTIONS.COLLECTION_PUBLISH_COMPLETED] = (state: State, action: any): State => {
const myCollections = state.myCollectionClaims || [];
const myClaims = state.myClaims || [];
const { claimId } = action.data;
let myClaimIds = new Set(myClaims);
let myCollectionClaimsSet = new Set(myCollections);
myClaimIds.add(claimId);
myCollectionClaimsSet.add(claimId);
return Object.assign({}, state, {
creatingCollection: false,
myClaims: Array.from(myClaimIds),
myCollectionClaims: Array.from(myCollectionClaimsSet),
});
};
reducers[ACTIONS.COLLECTION_PUBLISH_FAILED] = (state: State, action: any): State => {
return Object.assign({}, state, {
creatingCollection: false,
createCollectionError: action.data.error,
});
};
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_STARTED] = (state: State, action: any): State => {
return Object.assign({}, state, {
updateCollectionError: '',
updatingCollection: true,
});
};
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_COMPLETED] = (state: State, action: any): State => {
return Object.assign({}, state, {
updateCollectionError: '',
updatingCollection: false,
});
};
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED] = (state: State, action: any): State => {
return Object.assign({}, state, {
updateCollectionError: action.data.error,
updatingCollection: false,
});
};
reducers[ACTIONS.IMPORT_CHANNEL_STARTED] = (state: State): State => reducers[ACTIONS.IMPORT_CHANNEL_STARTED] = (state: State): State =>
Object.assign({}, state, { pendingChannelImports: true }); Object.assign({}, state, { pendingChannelImports: true });
@ -436,14 +717,197 @@ reducers[ACTIONS.CLAIM_SEARCH_COMPLETED] = (state: State, action: any): State =>
}; };
reducers[ACTIONS.CLAIM_SEARCH_FAILED] = (state: State, action: any): State => { reducers[ACTIONS.CLAIM_SEARCH_FAILED] = (state: State, action: any): State => {
const { query } = action.data;
const claimSearchByQuery = Object.assign({}, state.claimSearchByQuery);
const fetchingClaimSearchByQuery = Object.assign({}, state.fetchingClaimSearchByQuery); const fetchingClaimSearchByQuery = Object.assign({}, state.fetchingClaimSearchByQuery);
delete fetchingClaimSearchByQuery[action.data.query]; const claimSearchByQueryLastPageReached = Object.assign(
{},
state.claimSearchByQueryLastPageReached
);
delete fetchingClaimSearchByQuery[query];
if (claimSearchByQuery[query] && claimSearchByQuery[query].length !== 0) {
claimSearchByQueryLastPageReached[query] = true;
} else {
claimSearchByQuery[query] = null;
}
return Object.assign({}, state, { return Object.assign({}, state, {
fetchingClaimSearchByQuery, fetchingClaimSearchByQuery,
claimSearchByQuery,
claimSearchByQueryLastPageReached,
}); });
}; };
reducers[ACTIONS.CLAIM_REPOST_STARTED] = (state: State): State => {
return {
...state,
repostLoading: true,
repostError: null,
};
};
reducers[ACTIONS.CLAIM_REPOST_COMPLETED] = (state: State, action: any): State => {
const { originalClaimId, repostClaim } = action.data;
const byId = { ...state.byId };
const claimsByUri = { ...state.claimsByUri };
const claimThatWasReposted = byId[originalClaimId];
const repostStub = { ...repostClaim, reposted_claim: claimThatWasReposted };
byId[repostStub.claim_id] = repostStub;
claimsByUri[repostStub.permanent_url] = repostStub.claim_id;
return {
...state,
byId,
claimsByUri,
repostLoading: false,
repostError: null,
};
};
reducers[ACTIONS.CLAIM_REPOST_FAILED] = (state: State, action: any): State => {
const { error } = action.data;
return {
...state,
repostLoading: false,
repostError: error,
};
};
reducers[ACTIONS.CLEAR_REPOST_ERROR] = (state: State): State => {
return {
...state,
repostError: null,
};
};
reducers[ACTIONS.ADD_FILES_REFLECTING] = (state: State, action): State => {
const pendingClaim = action.data;
const { reflectingById } = state;
const claimId = pendingClaim && pendingClaim.claim_id;
reflectingById[claimId] = { fileListItem: pendingClaim, progress: 0, stalled: false };
return Object.assign({}, state, {
...state,
reflectingById: reflectingById,
});
};
reducers[ACTIONS.UPDATE_FILES_REFLECTING] = (state: State, action): State => {
const newReflectingById = action.data;
return Object.assign({}, state, {
...state,
reflectingById: newReflectingById,
});
};
reducers[ACTIONS.TOGGLE_CHECKING_REFLECTING] = (state: State, action): State => {
const checkingReflecting = action.data;
return Object.assign({}, state, {
...state,
checkingReflecting,
});
};
reducers[ACTIONS.TOGGLE_CHECKING_PENDING] = (state: State, action): State => {
const checking = action.data;
return Object.assign({}, state, {
...state,
checkingPending: checking,
});
};
reducers[ACTIONS.PURCHASE_LIST_STARTED] = (state: State): State => {
return {
...state,
fetchingMyPurchases: true,
fetchingMyPurchasesError: null,
};
};
reducers[ACTIONS.PURCHASE_LIST_COMPLETED] = (state: State, action: any): State => {
const { result }: { result: PurchaseListResponse, resolve: boolean } = action.data;
const page = result.page;
const totalItems = result.total_items;
let byId = Object.assign({}, state.byId);
let byUri = Object.assign({}, state.claimsByUri);
let urlsForCurrentPage = [];
result.items.forEach(item => {
if (!item.claim) {
// Abandoned claim
return;
}
const { claim, ...purchaseInfo } = item;
claim.purchase_receipt = purchaseInfo;
const claimId = claim.claim_id;
const uri = claim.canonical_url;
byId[claimId] = claim;
byUri[uri] = claimId;
urlsForCurrentPage.push(uri);
});
return Object.assign({}, state, {
byId,
claimsByUri: byUri,
myPurchases: urlsForCurrentPage,
myPurchasesPageNumber: page,
myPurchasesPageTotalResults: totalItems,
fetchingMyPurchases: false,
});
};
reducers[ACTIONS.PURCHASE_LIST_FAILED] = (state: State, action: any): State => {
const { error } = action.data;
return {
...state,
fetchingMyPurchases: false,
fetchingMyPurchasesError: error,
};
};
reducers[ACTIONS.PURCHASE_URI_COMPLETED] = (state: State, action: any): State => {
const { uri, purchaseReceipt } = action.data;
let byId = Object.assign({}, state.byId);
let byUri = Object.assign({}, state.claimsByUri);
let myPurchases = state.myPurchases ? state.myPurchases.slice() : [];
let urlsForCurrentPage = [];
const claimId = byUri[uri];
if (claimId) {
let claim = byId[claimId];
claim.purchase_receipt = purchaseReceipt;
}
myPurchases.push(uri);
return {
...state,
byId,
myPurchases,
purchaseUriSuccess: true,
};
};
reducers[ACTIONS.PURCHASE_URI_FAILED] = (state: State): State => {
return {
...state,
purchaseUriSuccess: false,
};
};
reducers[ACTIONS.CLEAR_PURCHASED_URI_SUCCESS] = (state: State): State => {
return {
...state,
purchaseUriSuccess: false,
};
};
export function claimsReducer(state: State = defaultState, action: any) { export function claimsReducer(state: State = defaultState, action: any) {
const handler = reducers[action.type]; const handler = reducers[action.type];
if (handler) return handler(state, action); if (handler) return handler(state, action);

View file

@ -0,0 +1,239 @@
// @flow
import { handleActions } from 'util/redux-utils';
import * as ACTIONS from 'constants/action_types';
import * as COLS from 'constants/collections';
const getTimestamp = () => {
return Math.floor(Date.now() / 1000);
};
const defaultState: CollectionState = {
builtin: {
watchlater: {
items: [],
id: COLS.WATCH_LATER_ID,
name: 'Watch Later',
updatedAt: getTimestamp(),
type: COLS.COL_TYPE_PLAYLIST,
},
favorites: {
items: [],
id: COLS.FAVORITES_ID,
name: 'Favorites',
type: COLS.COL_TYPE_PLAYLIST,
updatedAt: getTimestamp(),
},
},
resolved: {},
unpublished: {}, // sync
edited: {},
pending: {},
saved: [],
isResolvingCollectionById: {},
error: null,
};
const collectionsReducer = handleActions(
{
[ACTIONS.COLLECTION_NEW]: (state, action) => {
const { entry: params } = action.data; // { id:, items: Array<string>}
// entry
const newListTemplate = {
id: params.id,
name: params.name,
items: [],
updatedAt: getTimestamp(),
type: params.type,
};
const newList = Object.assign({}, newListTemplate, { ...params });
const { unpublished: lists } = state;
const newLists = Object.assign({}, lists, { [params.id]: newList });
return {
...state,
unpublished: newLists,
};
},
[ACTIONS.COLLECTION_DELETE]: (state, action) => {
const { id, collectionKey } = action.data;
const { edited: editList, unpublished: unpublishedList, pending: pendingList } = state;
const newEditList = Object.assign({}, editList);
const newUnpublishedList = Object.assign({}, unpublishedList);
const newPendingList = Object.assign({}, pendingList);
if (collectionKey && state[collectionKey] && state[collectionKey][id]) {
const newList = Object.assign({}, state[collectionKey]);
delete newList[id];
return {
...state,
[collectionKey]: newList,
};
} else {
if (newEditList[id]) {
delete newEditList[id];
} else if (newUnpublishedList[id]) {
delete newUnpublishedList[id];
} else if (newPendingList[id]) {
delete newPendingList[id];
}
}
return {
...state,
edited: newEditList,
unpublished: newUnpublishedList,
pending: newPendingList,
};
},
[ACTIONS.COLLECTION_PENDING]: (state, action) => {
const { localId, claimId } = action.data;
const {
resolved: resolvedList,
edited: editList,
unpublished: unpublishedList,
pending: pendingList,
} = state;
const newEditList = Object.assign({}, editList);
const newResolvedList = Object.assign({}, resolvedList);
const newUnpublishedList = Object.assign({}, unpublishedList);
const newPendingList = Object.assign({}, pendingList);
if (localId) {
// new publish
newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {});
delete newUnpublishedList[localId];
} else {
// edit update
newPendingList[claimId] = Object.assign(
{},
newEditList[claimId] || newResolvedList[claimId]
);
delete newEditList[claimId];
}
return {
...state,
edited: newEditList,
unpublished: newUnpublishedList,
pending: newPendingList,
};
},
[ACTIONS.COLLECTION_EDIT]: (state, action) => {
const { id, collectionKey, collection } = action.data;
if (COLS.BUILTIN_LISTS.includes(id)) {
const { builtin: lists } = state;
return {
...state,
[collectionKey]: { ...lists, [id]: collection },
};
}
if (collectionKey === 'edited') {
const { edited: lists } = state;
return {
...state,
edited: { ...lists, [id]: collection },
};
}
const { unpublished: lists } = state;
return {
...state,
unpublished: { ...lists, [id]: collection },
};
},
[ACTIONS.COLLECTION_ERROR]: (state, action) => {
return Object.assign({}, state, {
error: action.data.message,
});
},
[ACTIONS.COLLECTION_ITEMS_RESOLVE_STARTED]: (state, action) => {
const { ids } = action.data;
const { isResolvingCollectionById } = state;
const newResolving = Object.assign({}, isResolvingCollectionById);
ids.forEach(id => {
newResolving[id] = true;
});
return Object.assign({}, state, {
...state,
error: '',
isResolvingCollectionById: newResolving,
});
},
[ACTIONS.USER_STATE_POPULATE]: (state, action) => {
const {
builtinCollections,
savedCollections,
unpublishedCollections,
editedCollections,
} = action.data;
return {
...state,
edited: editedCollections || state.edited,
unpublished: unpublishedCollections || state.unpublished,
builtin: builtinCollections || state.builtin,
saved: savedCollections || state.saved,
};
},
[ACTIONS.COLLECTION_ITEMS_RESOLVE_COMPLETED]: (state, action) => {
const { resolvedCollections, failedCollectionIds } = action.data;
const { pending, edited, isResolvingCollectionById, resolved } = state;
const newPending = Object.assign({}, pending);
const newEdited = Object.assign({}, edited);
const newResolved = Object.assign({}, resolved, resolvedCollections);
const resolvedIds = Object.keys(resolvedCollections);
const newResolving = Object.assign({}, isResolvingCollectionById);
if (resolvedCollections && Object.keys(resolvedCollections).length) {
resolvedIds.forEach(resolvedId => {
if (newEdited[resolvedId]) {
if (newEdited[resolvedId]['updatedAt'] < resolvedCollections[resolvedId]['updatedAt']) {
delete newEdited[resolvedId];
}
}
delete newResolving[resolvedId];
if (newPending[resolvedId]) {
delete newPending[resolvedId];
}
});
}
if (failedCollectionIds && Object.keys(failedCollectionIds).length) {
failedCollectionIds.forEach(failedId => {
delete newResolving[failedId];
});
}
return Object.assign({}, state, {
...state,
pending: newPending,
resolved: newResolved,
edited: newEdited,
isResolvingCollectionById: newResolving,
});
},
[ACTIONS.COLLECTION_ITEMS_RESOLVE_FAILED]: (state, action) => {
const { ids } = action.data;
const { isResolvingCollectionById } = state;
const newResolving = Object.assign({}, isResolvingCollectionById);
ids.forEach(id => {
delete newResolving[id];
});
return Object.assign({}, state, {
...state,
isResolvingCollectionById: newResolving,
error: action.data.message,
});
},
},
defaultState
);
export { collectionsReducer };

View file

@ -1,63 +0,0 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { handleActions } from 'util/redux-utils';
const defaultState: CommentsState = {
byId: {},
commentsByUri: {},
isLoading: false,
};
export const commentReducer = handleActions(
{
[ACTIONS.COMMENT_CREATE_STARTED]: (state: CommentsState, action: any): CommentsState => ({
...state,
isLoading: true,
}),
[ACTIONS.COMMENT_CREATE_FAILED]: (state: CommentsState, action: any) => ({
...state,
isLoading: false,
}),
[ACTIONS.COMMENT_CREATE_COMPLETED]: (state: CommentsState, action: any): CommentsState => {
const { comment, claimId }: any = action.data;
const byId = Object.assign({}, state.byId);
const comments = byId[claimId];
const newComments = comments.slice();
newComments.unshift(comment);
byId[claimId] = newComments;
return {
...state,
byId,
};
},
[ACTIONS.COMMENT_LIST_STARTED]: state => ({ ...state, isLoading: true }),
[ACTIONS.COMMENT_LIST_COMPLETED]: (state: CommentsState, action: any) => {
const { comments, claimId, uri } = action.data;
const byId = Object.assign({}, state.byId);
const commentsByUri = Object.assign({}, state.commentsByUri);
if (comments) {
byId[claimId] = comments;
commentsByUri[uri] = claimId;
}
return {
...state,
byId,
commentsByUri,
isLoading: false,
};
},
[ACTIONS.COMMENT_LIST_FAILED]: (state: CommentsState, action: any) => ({
...state,
isLoading: false,
}),
},
defaultState
);

View file

@ -1,89 +0,0 @@
// @flow
import * as ACTIONS from 'constants/action_types';
const reducers = {};
const defaultState = {
failedPurchaseUris: [],
purchasedUris: [],
purchaseUriErrorMessage: '',
};
reducers[ACTIONS.PURCHASE_URI_STARTED] = (
state: FileState,
action: PurchaseUriStarted
): FileState => {
const { uri } = action.data;
const newFailedPurchaseUris = state.failedPurchaseUris.slice();
if (newFailedPurchaseUris.includes(uri)) {
newFailedPurchaseUris.splice(newFailedPurchaseUris.indexOf(uri), 1);
}
return {
...state,
failedPurchaseUris: newFailedPurchaseUris,
purchaseUriErrorMessage: '',
};
};
reducers[ACTIONS.PURCHASE_URI_COMPLETED] = (
state: FileState,
action: PurchaseUriCompleted
): FileState => {
const { uri } = action.data;
const newPurchasedUris = state.purchasedUris.slice();
const newFailedPurchaseUris = state.failedPurchaseUris.slice();
if (!newPurchasedUris.includes(uri)) {
newPurchasedUris.push(uri);
}
if (newFailedPurchaseUris.includes(uri)) {
newFailedPurchaseUris.splice(newFailedPurchaseUris.indexOf(uri), 1);
}
return {
...state,
failedPurchaseUris: newFailedPurchaseUris,
purchasedUris: newPurchasedUris,
purchaseUriErrorMessage: '',
};
};
reducers[ACTIONS.PURCHASE_URI_FAILED] = (
state: FileState,
action: PurchaseUriFailed
): FileState => {
const { uri, error } = action.data;
const newFailedPurchaseUris = state.failedPurchaseUris.slice();
if (!newFailedPurchaseUris.includes(uri)) {
newFailedPurchaseUris.push(uri);
}
return {
...state,
failedPurchaseUris: newFailedPurchaseUris,
purchaseUriErrorMessage: error,
};
};
reducers[ACTIONS.DELETE_PURCHASED_URI] = (
state: FileState,
action: DeletePurchasedUri
): FileState => {
const { uri } = action.data;
const newPurchasedUris = state.purchasedUris.slice();
if (newPurchasedUris.includes(uri)) {
newPurchasedUris.splice(newPurchasedUris.indexOf(uri), 1);
}
return {
...state,
purchasedUris: newPurchasedUris,
};
};
export function fileReducer(state: FileState = defaultState, action: any) {
const handler = reducers[action.type];
if (handler) return handler(state, action);
return state;
}

View file

@ -7,8 +7,13 @@ import { CHANNEL_ANONYMOUS } from 'constants/claim';
type PublishState = { type PublishState = {
editingURI: ?string, editingURI: ?string,
fileText: ?string,
filePath: ?string, filePath: ?string,
remoteFileUrl: ?string,
contentIsFree: boolean, contentIsFree: boolean,
fileDur: number,
fileSize: number,
fileVid: boolean,
fee: { fee: {
amount: number, amount: number,
currency: string, currency: string,
@ -17,8 +22,11 @@ type PublishState = {
thumbnail_url: string, thumbnail_url: string,
thumbnailPath: string, thumbnailPath: string,
uploadThumbnailStatus: string, uploadThumbnailStatus: string,
thumbnailError: ?boolean,
description: string, description: string,
language: string, language: string,
releaseTime: ?number,
releaseTimeEdited: ?number,
channel: string, channel: string,
channelId: ?string, channelId: ?string,
name: string, name: string,
@ -28,11 +36,18 @@ type PublishState = {
otherLicenseDescription: string, otherLicenseDescription: string,
licenseUrl: string, licenseUrl: string,
tags: Array<string>, tags: Array<string>,
optimize: boolean,
useLBRYUploader: boolean,
}; };
const defaultState: PublishState = { const defaultState: PublishState = {
editingURI: undefined, editingURI: undefined,
fileText: '',
filePath: undefined, filePath: undefined,
fileDur: 0,
fileSize: 0,
fileVid: false,
remoteFileUrl: undefined,
contentIsFree: true, contentIsFree: true,
fee: { fee: {
amount: 1, amount: 1,
@ -42,14 +57,17 @@ const defaultState: PublishState = {
thumbnail_url: '', thumbnail_url: '',
thumbnailPath: '', thumbnailPath: '',
uploadThumbnailStatus: THUMBNAIL_STATUSES.API_DOWN, uploadThumbnailStatus: THUMBNAIL_STATUSES.API_DOWN,
thumbnailError: undefined,
description: '', description: '',
language: 'en', language: '',
releaseTime: undefined,
releaseTimeEdited: undefined,
nsfw: false, nsfw: false,
channel: CHANNEL_ANONYMOUS, channel: CHANNEL_ANONYMOUS,
channelId: '', channelId: '',
name: '', name: '',
nameError: undefined, nameError: undefined,
bid: 0.1, bid: 0.01,
bidError: undefined, bidError: undefined,
licenseType: 'None', licenseType: 'None',
otherLicenseDescription: 'All rights reserved', otherLicenseDescription: 'All rights reserved',
@ -58,6 +76,8 @@ const defaultState: PublishState = {
publishing: false, publishing: false,
publishSuccess: false, publishSuccess: false,
publishError: undefined, publishError: undefined,
optimize: false,
useLBRYUploader: false,
}; };
export const publishReducer = handleActions( export const publishReducer = handleActions(
@ -69,8 +89,13 @@ export const publishReducer = handleActions(
...data, ...data,
}; };
}, },
[ACTIONS.CLEAR_PUBLISH]: (): PublishState => ({ [ACTIONS.CLEAR_PUBLISH]: (state: PublishState): PublishState => ({
...defaultState, ...defaultState,
uri: undefined,
channel: state.channel,
bid: state.bid,
optimize: state.optimize,
language: state.language,
}), }),
[ACTIONS.PUBLISH_START]: (state: PublishState): PublishState => ({ [ACTIONS.PUBLISH_START]: (state: PublishState): PublishState => ({
...state, ...state,

View file

@ -1,94 +0,0 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { handleActions } from 'util/redux-utils';
import { SEARCH_OPTIONS } from 'constants/search';
const defaultState = {
isActive: false, // does the user have any typed text in the search input
focused: false, // is the search input focused
searchQuery: '', // needs to be an empty string for input focusing
options: {
[SEARCH_OPTIONS.RESULT_COUNT]: 30,
[SEARCH_OPTIONS.CLAIM_TYPE]: SEARCH_OPTIONS.INCLUDE_FILES_AND_CHANNELS,
[SEARCH_OPTIONS.MEDIA_AUDIO]: true,
[SEARCH_OPTIONS.MEDIA_VIDEO]: true,
[SEARCH_OPTIONS.MEDIA_TEXT]: true,
[SEARCH_OPTIONS.MEDIA_IMAGE]: true,
[SEARCH_OPTIONS.MEDIA_APPLICATION]: true,
},
suggestions: {},
urisByQuery: {},
};
export const searchReducer = handleActions(
{
[ACTIONS.SEARCH_START]: (state: SearchState): SearchState => ({
...state,
searching: true,
}),
[ACTIONS.SEARCH_SUCCESS]: (state: SearchState, action: SearchSuccess): SearchState => {
const { query, uris } = action.data;
return {
...state,
searching: false,
urisByQuery: Object.assign({}, state.urisByQuery, { [query]: uris }),
};
},
[ACTIONS.SEARCH_FAIL]: (state: SearchState): SearchState => ({
...state,
searching: false,
}),
[ACTIONS.UPDATE_SEARCH_QUERY]: (
state: SearchState,
action: UpdateSearchQuery
): SearchState => ({
...state,
searchQuery: action.data.query,
isActive: true,
}),
[ACTIONS.UPDATE_SEARCH_SUGGESTIONS]: (
state: SearchState,
action: UpdateSearchSuggestions
): SearchState => ({
...state,
suggestions: {
...state.suggestions,
[action.data.query]: action.data.suggestions,
},
}),
// sets isActive to false so the uri will be populated correctly if the
// user is on a file page. The search query will still be present on any
// other page
[ACTIONS.DISMISS_NOTIFICATION]: (state: SearchState): SearchState => ({
...state,
isActive: false,
}),
[ACTIONS.SEARCH_FOCUS]: (state: SearchState): SearchState => ({
...state,
focused: true,
}),
[ACTIONS.SEARCH_BLUR]: (state: SearchState): SearchState => ({
...state,
focused: false,
}),
[ACTIONS.UPDATE_SEARCH_OPTIONS]: (
state: SearchState,
action: UpdateSearchOptions
): SearchState => {
const { options: oldOptions } = state;
const newOptions = action.data;
const options = { ...oldOptions, ...newOptions };
return {
...state,
options,
};
},
},
defaultState
);

View file

@ -1,80 +0,0 @@
// @flow
import * as ACTIONS from 'constants/action_types';
import { handleActions } from 'util/redux-utils';
import { DEFAULT_KNOWN_TAGS, DEFAULT_FOLLOWED_TAGS } from 'constants/tags';
function getDefaultKnownTags() {
return DEFAULT_FOLLOWED_TAGS.concat(DEFAULT_KNOWN_TAGS).reduce(
(tagsMap, tag) => ({
...tagsMap,
[tag]: { name: tag },
}),
{}
);
}
const defaultState: TagState = {
followedTags: DEFAULT_FOLLOWED_TAGS,
knownTags: getDefaultKnownTags(),
};
export const tagsReducer = handleActions(
{
[ACTIONS.TOGGLE_TAG_FOLLOW]: (state: TagState, action: TagAction): TagState => {
const { followedTags } = state;
const { name } = action.data;
let newFollowedTags = followedTags.slice();
if (newFollowedTags.includes(name)) {
newFollowedTags = newFollowedTags.filter(tag => tag !== name);
} else {
newFollowedTags.push(name);
}
return {
...state,
followedTags: newFollowedTags,
};
},
[ACTIONS.TAG_ADD]: (state: TagState, action: TagAction) => {
const { knownTags } = state;
const { name } = action.data;
let newKnownTags = { ...knownTags };
newKnownTags[name] = { name };
return {
...state,
knownTags: newKnownTags,
};
},
[ACTIONS.TAG_DELETE]: (state: TagState, action: TagAction) => {
const { knownTags, followedTags } = state;
const { name } = action.data;
let newKnownTags = { ...knownTags };
delete newKnownTags[name];
const newFollowedTags = followedTags.filter(tag => tag !== name);
return {
...state,
knownTags: newKnownTags,
followedTags: newFollowedTags,
};
},
[ACTIONS.USER_STATE_POPULATE]: (
state: TagState,
action: { data: { tags: ?Array<string> } }
) => {
const { tags } = action.data;
return {
...state,
followedTags: tags && tags.length ? tags : DEFAULT_FOLLOWED_TAGS,
};
},
},
defaultState
);

View file

@ -26,6 +26,7 @@ type WalletState = {
supports: { [string]: Support }, supports: { [string]: Support },
abandoningSupportsByOutpoint: { [string]: boolean }, abandoningSupportsByOutpoint: { [string]: boolean },
fetchingTransactions: boolean, fetchingTransactions: boolean,
fetchingTransactionsError: string,
gettingNewAddress: boolean, gettingNewAddress: boolean,
draftTransaction: any, draftTransaction: any,
sendingSupport: boolean, sendingSupport: boolean,
@ -43,6 +44,19 @@ type WalletState = {
walletLockSucceded: ?boolean, walletLockSucceded: ?boolean,
walletLockResult: ?boolean, walletLockResult: ?boolean,
walletReconnecting: boolean, walletReconnecting: boolean,
txoFetchParams: {},
utxoCounts: {},
txoPage: any,
fetchId: string,
fetchingTxos: boolean,
fetchingTxosError?: string,
consolidatingUtxos: boolean,
pendingConsolidateTxid?: string,
massClaimingTips: boolean,
pendingMassClaimTxid?: string,
pendingSupportTransactions: {}, // { claimId: {txid: 123, amount 12.3}, }
pendingTxos: Array<string>,
abandonClaimSupportError?: string,
}; };
const defaultState = { const defaultState = {
@ -55,6 +69,7 @@ const defaultState = {
latestBlock: undefined, latestBlock: undefined,
transactions: {}, transactions: {},
fetchingTransactions: false, fetchingTransactions: false,
fetchingTransactionsError: undefined,
supports: {}, supports: {},
fetchingSupports: false, fetchingSupports: false,
abandoningSupportsByOutpoint: {}, abandoningSupportsByOutpoint: {},
@ -76,6 +91,22 @@ const defaultState = {
walletLockResult: null, walletLockResult: null,
transactionListFilter: 'all', transactionListFilter: 'all',
walletReconnecting: false, walletReconnecting: false,
txoFetchParams: {},
utxoCounts: {},
fetchingUtxoCounts: false,
fetchingUtxoError: undefined,
consolidatingUtxos: false,
pendingConsolidateTxid: null,
massClaimingTips: false,
pendingMassClaimTxid: null,
txoPage: {},
fetchId: '',
fetchingTxos: false,
fetchingTxosError: undefined,
pendingSupportTransactions: {},
pendingTxos: [],
abandonClaimSupportError: undefined,
}; };
export const walletReducer = handleActions( export const walletReducer = handleActions(
@ -100,6 +131,140 @@ export const walletReducer = handleActions(
}; };
}, },
[ACTIONS.FETCH_TXO_PAGE_STARTED]: (state: WalletState, action) => {
return {
...state,
fetchId: action.data,
fetchingTxos: true,
fetchingTxosError: undefined,
};
},
[ACTIONS.FETCH_TXO_PAGE_COMPLETED]: (state: WalletState, action) => {
if (state.fetchId !== action.data.fetchId) {
// Leave 'state' and 'fetchingTxos' alone. The latter would ensure
// the spiner would continue spinning for the latest transaction.
return { ...state };
}
return {
...state,
txoPage: action.data.result,
fetchId: '',
fetchingTxos: false,
};
},
[ACTIONS.FETCH_TXO_PAGE_FAILED]: (state: WalletState, action) => {
return {
...state,
txoPage: {},
fetchId: '',
fetchingTxos: false,
fetchingTxosError: action.data,
};
},
[ACTIONS.FETCH_UTXO_COUNT_STARTED]: (state: WalletState) => {
return {
...state,
fetchingUtxoCounts: true,
fetchingUtxoError: undefined,
};
},
[ACTIONS.FETCH_UTXO_COUNT_COMPLETED]: (state: WalletState, action) => {
return {
...state,
utxoCounts: action.data,
fetchingUtxoCounts: false,
};
},
[ACTIONS.FETCH_UTXO_COUNT_FAILED]: (state: WalletState, action) => {
return {
...state,
utxoCounts: {},
fetchingUtxoCounts: false,
fetchingUtxoError: action.data,
};
},
[ACTIONS.DO_UTXO_CONSOLIDATE_STARTED]: (state: WalletState) => {
return {
...state,
consolidatingUtxos: true,
};
},
[ACTIONS.DO_UTXO_CONSOLIDATE_COMPLETED]: (state: WalletState, action) => {
const { txid } = action.data;
return {
...state,
consolidatingUtxos: false,
pendingConsolidateTxid: txid,
};
},
[ACTIONS.DO_UTXO_CONSOLIDATE_FAILED]: (state: WalletState, action) => {
return {
...state,
consolidatingUtxos: false,
};
},
[ACTIONS.TIP_CLAIM_MASS_STARTED]: (state: WalletState) => {
return {
...state,
massClaimingTips: true,
};
},
[ACTIONS.TIP_CLAIM_MASS_COMPLETED]: (state: WalletState, action) => {
const { txid } = action.data;
return {
...state,
massClaimingTips: false,
pendingMassClaimTxid: txid,
};
},
[ACTIONS.TIP_CLAIM_MASS_FAILED]: (state: WalletState, action) => {
return {
...state,
massClaimingTips: false,
};
},
[ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED]: (state: WalletState, action) => {
const { pendingTxos, pendingMassClaimTxid, pendingConsolidateTxid } = state;
const { txids, remove } = action.data;
if (remove) {
const newTxos = pendingTxos.filter(txo => !txids.includes(txo));
const newPendingMassClaimTxid = txids.includes(pendingMassClaimTxid)
? undefined
: pendingMassClaimTxid;
const newPendingConsolidateTxid = txids.includes(pendingConsolidateTxid)
? undefined
: pendingConsolidateTxid;
return {
...state,
pendingTxos: newTxos,
pendingMassClaimTxid: newPendingMassClaimTxid,
pendingConsolidateTxid: newPendingConsolidateTxid,
};
} else {
const newPendingSet = new Set([...pendingTxos, ...txids]);
return { ...state, pendingTxos: Array.from(newPendingSet) };
}
},
[ACTIONS.UPDATE_TXO_FETCH_PARAMS]: (state: WalletState, action) => {
return {
...state,
txoFetchParams: action.data,
};
},
[ACTIONS.FETCH_SUPPORTS_STARTED]: (state: WalletState) => ({ [ACTIONS.FETCH_SUPPORTS_STARTED]: (state: WalletState) => ({
...state, ...state,
fetchingSupports: true, fetchingSupports: true,
@ -140,7 +305,53 @@ export const walletReducer = handleActions(
return { return {
...state, ...state,
supports: byOutpoint, supports: byOutpoint,
abandoningSupportsById: currentlyAbandoning, abandoningSupportsByOutpoint: currentlyAbandoning,
};
},
[ACTIONS.ABANDON_CLAIM_SUPPORT_STARTED]: (state: WalletState, action: any): WalletState => {
return {
...state,
abandonClaimSupportError: undefined,
};
},
[ACTIONS.ABANDON_CLAIM_SUPPORT_PREVIEW]: (state: WalletState, action: any): WalletState => {
return {
...state,
abandonClaimSupportError: undefined,
};
},
[ACTIONS.ABANDON_CLAIM_SUPPORT_COMPLETED]: (state: WalletState, action: any): WalletState => {
const {
claimId,
type,
txid,
effective,
}: { claimId: string, type: string, txid: string, effective: string } = action.data;
const pendingtxs = Object.assign({}, state.pendingSupportTransactions);
pendingtxs[claimId] = { txid, type, effective };
return {
...state,
pendingSupportTransactions: pendingtxs,
abandonClaimSupportError: undefined,
};
},
[ACTIONS.ABANDON_CLAIM_SUPPORT_FAILED]: (state: WalletState, action: any): WalletState => {
return {
...state,
abandonClaimSupportError: action.data,
};
},
[ACTIONS.PENDING_SUPPORTS_UPDATED]: (state: WalletState, action: any): WalletState => {
return {
...state,
pendingSupportTransactions: action.data,
}; };
}, },

View file

@ -1,18 +1,35 @@
// @flow // @flow
import { normalizeURI, buildURI, parseURI } from 'lbryURI'; import { normalizeURI, parseURI } from 'lbryURI';
import { selectSearchUrisByQuery } from 'redux/selectors/search';
import { selectSupportsByOutpoint } from 'redux/selectors/wallet'; import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
import { createSelector } from 'reselect'; import { createSelector } from 'reselect';
import { isClaimNsfw, createNormalizedClaimSearchKey } from 'util/claim'; import { isClaimNsfw, filterClaims } from 'util/claim';
import { getSearchQueryString } from 'util/query-params'; import * as CLAIM from 'constants/claim';
import { PAGE_SIZE } from 'constants/claim';
const selectState = state => state.claims || {}; const selectState = state => state.claims || {};
export const selectClaimsById = createSelector( export const selectById = createSelector(
selectState, selectState,
state => state.byId || {} state => state.byId || {}
); );
export const selectPendingClaimsById = createSelector(
selectState,
state => state.pendingById || {}
);
export const selectClaimsById = createSelector(
selectById,
selectPendingClaimsById,
(byId, pendingById) => {
return Object.assign(byId, pendingById); // do I need merged to keep metadata?
}
);
export const selectClaimIdsByUri = createSelector(
selectState,
state => state.claimsByUri || {}
);
export const selectCurrentChannelPage = createSelector( export const selectCurrentChannelPage = createSelector(
selectState, selectState,
state => state.currentChannelPage || 1 state => state.currentChannelPage || 1
@ -28,11 +45,20 @@ export const selectCreateChannelError = createSelector(
state => state.createChannelError state => state.createChannelError
); );
export const selectClaimsByUri = createSelector( export const selectRepostLoading = createSelector(
selectState, selectState,
state => state.repostLoading
);
export const selectRepostError = createSelector(
selectState,
state => state.repostError
);
export const selectClaimsByUri = createSelector(
selectClaimIdsByUri,
selectClaimsById, selectClaimsById,
(state, byId) => { (byUri, byId) => {
const byUri = state.claimsByUri || {};
const claims = {}; const claims = {};
Object.keys(byUri).forEach(uri => { Object.keys(byUri).forEach(uri => {
@ -57,76 +83,117 @@ export const selectAllClaimsByChannel = createSelector(
state => state.paginatedClaimsByChannel || {} state => state.paginatedClaimsByChannel || {}
); );
export const selectPendingById = createSelector( export const selectPendingIds = createSelector(
selectState, selectState,
state => state.pendingById || {} state => Object.keys(state.pendingById) || []
); );
export const selectPendingClaims = createSelector( export const selectPendingClaims = createSelector(
selectState, selectPendingClaimsById,
state => Object.values(state.pendingById || []) pendingById => Object.values(pendingById)
); );
export const makeSelectClaimIsPending = (uri: string) => export const makeSelectClaimIsPending = (uri: string) =>
createSelector( createSelector(
selectPendingById, selectClaimIdsByUri,
pendingById => { selectPendingClaimsById,
let claimId; (idsByUri, pendingById) => {
const claimId = idsByUri[normalizeURI(uri)];
try {
const { isChannel, channelClaimId, streamClaimId } = parseURI(uri);
claimId = isChannel ? channelClaimId : streamClaimId;
} catch (e) {}
if (claimId) { if (claimId) {
return Boolean(pendingById[claimId]); return Boolean(pendingById[claimId]);
} }
return false;
} }
); );
export const makeSelectPendingByUri = (uri: string) => export const makeSelectClaimIdIsPending = (claimId: string) =>
createSelector( createSelector(
selectPendingById, selectPendingClaimsById,
pendingById => { pendingById => {
const { isChannel, channelClaimId, streamClaimId } = parseURI(uri); return Boolean(pendingById[claimId]);
const claimId = isChannel ? channelClaimId : streamClaimId;
return pendingById[claimId];
} }
); );
export const makeSelectClaimForUri = (uri: string) => export const makeSelectClaimIdForUri = (uri: string) =>
createSelector( createSelector(
selectClaimsByUri, selectClaimIdsByUri,
selectPendingById, claimIds => claimIds[uri]
(byUri, pendingById) => { );
// Check if a claim is pending first
// It won't be in claimsByUri because resolving it will return nothing
let valid; export const selectReflectingById = createSelector(
selectState,
state => state.reflectingById
);
export const makeSelectClaimForClaimId = (claimId: string) =>
createSelector(
selectClaimsById,
byId => byId[claimId]
);
export const makeSelectClaimForUri = (uri: string, returnRepost: boolean = true) =>
createSelector(
selectClaimIdsByUri,
selectClaimsById,
(byUri, byId) => {
let validUri;
let channelClaimId; let channelClaimId;
let streamClaimId; let streamClaimId;
let isChannel; let isChannel;
try { try {
({ isChannel, channelClaimId, streamClaimId } = parseURI(uri)); ({ isChannel, channelClaimId, streamClaimId } = parseURI(uri));
valid = true; validUri = true;
} catch (e) {} } catch (e) {}
if (valid) { if (validUri && byUri) {
const claimId = isChannel ? channelClaimId : streamClaimId; const claimId = uri && byUri[normalizeURI(uri)];
const pendingClaim = pendingById[claimId]; const claim = byId[claimId];
if (pendingClaim) { // Make sure to return the claim as is so apps can check if it's been resolved before (null) or still needs to be resolved (undefined)
return pendingClaim; if (claimId === null) {
return null;
} else if (claimId === undefined) {
return undefined;
} }
return byUri && byUri[normalizeURI(uri)]; const repostedClaim = claim && claim.reposted_claim;
if (repostedClaim && returnRepost) {
const channelUrl =
claim.signing_channel &&
(claim.signing_channel.canonical_url || claim.signing_channel.permanent_url);
return {
...repostedClaim,
repost_url: normalizeURI(uri),
repost_channel_url: channelUrl,
repost_bid_amount: claim && claim.meta && claim.meta.effective_amount,
};
} else {
return claim;
}
} }
} }
); );
export const selectMyClaimsRaw = createSelector( export const selectMyClaimsRaw = createSelector(
selectState, selectState,
state => state.myClaims selectClaimsById,
(state, byId) => {
const ids = state.myClaims;
if (!ids) {
return ids;
}
const claims = [];
ids.forEach(id => {
if (byId[id]) {
// I'm not sure why this check is necessary, but it ought to be a quick fix for https://github.com/lbryio/lbry-desktop/issues/544
claims.push(byId[id]);
}
});
return claims;
}
); );
export const selectAbandoningIds = createSelector( export const selectAbandoningIds = createSelector(
@ -134,6 +201,22 @@ export const selectAbandoningIds = createSelector(
state => Object.keys(state.abandoningById || {}) state => Object.keys(state.abandoningById || {})
); );
export const makeSelectAbandoningClaimById = (claimId: string) =>
createSelector(
selectAbandoningIds,
ids => ids.includes(claimId)
);
export const makeSelectIsAbandoningClaimForUri = (uri: string) =>
createSelector(
selectClaimIdsByUri,
selectAbandoningIds,
(claimIdsByUri, abandoningById) => {
const claimId = claimIdsByUri[normalizeURI(uri)];
return abandoningById.indexOf(claimId) >= 0;
}
);
export const selectMyActiveClaims = createSelector( export const selectMyActiveClaims = createSelector(
selectMyClaimsRaw, selectMyClaimsRaw,
selectAbandoningIds, selectAbandoningIds,
@ -162,11 +245,74 @@ export const makeSelectClaimIsMine = (rawUri: string) => {
return false; return false;
} }
return claims && claims[uri] && claims[uri].claim_id && myClaims.has(claims[uri].claim_id); return (
claims &&
claims[uri] &&
(claims[uri].is_my_output || (claims[uri].claim_id && myClaims.has(claims[uri].claim_id)))
);
} }
); );
}; };
export const selectMyPurchases = createSelector(
selectState,
state => state.myPurchases
);
export const selectPurchaseUriSuccess = createSelector(
selectState,
state => state.purchaseUriSuccess
);
export const selectMyPurchasesCount = createSelector(
selectState,
state => state.myPurchasesPageTotalResults
);
export const selectIsFetchingMyPurchases = createSelector(
selectState,
state => state.fetchingMyPurchases
);
export const selectFetchingMyPurchasesError = createSelector(
selectState,
state => state.fetchingMyPurchasesError
);
export const makeSelectMyPurchasesForPage = (query: ?string, page: number = 1) =>
createSelector(
selectMyPurchases,
selectClaimsByUri,
(myPurchases: Array<string>, claimsByUri: { [string]: Claim }) => {
if (!myPurchases) {
return undefined;
}
if (!query) {
// ensure no duplicates from double purchase bugs
return [...new Set(myPurchases)];
}
const fileInfos = myPurchases.map(uri => claimsByUri[uri]);
const matchingFileInfos = filterClaims(fileInfos, query);
const start = (Number(page) - 1) * Number(CLAIM.PAGE_SIZE);
const end = Number(page) * Number(CLAIM.PAGE_SIZE);
return matchingFileInfos && matchingFileInfos.length
? matchingFileInfos
.slice(start, end)
.map(fileInfo => fileInfo.canonical_url || fileInfo.permanent_url)
: [];
}
);
export const makeSelectClaimWasPurchased = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri),
claim => {
return claim && claim.purchase_receipt !== undefined;
}
);
export const selectAllFetchingChannelClaims = createSelector( export const selectAllFetchingChannelClaims = createSelector(
selectState, selectState,
state => state.fetchingChannelClaims || {} state => state.fetchingChannelClaims || {}
@ -192,6 +338,7 @@ export const makeSelectClaimsInChannelForPage = (uri: string, page?: number) =>
} }
); );
// THIS IS LEFT OVER FROM ONE TAB CHANNEL_CONTENT
export const makeSelectTotalClaimsInChannelSearch = (uri: string) => export const makeSelectTotalClaimsInChannelSearch = (uri: string) =>
createSelector( createSelector(
selectClaimsById, selectClaimsById,
@ -202,6 +349,7 @@ export const makeSelectTotalClaimsInChannelSearch = (uri: string) =>
} }
); );
// THIS IS LEFT OVER FROM ONE_TAB CHANNEL CONTENT
export const makeSelectTotalPagesInChannelSearch = (uri: string) => export const makeSelectTotalPagesInChannelSearch = (uri: string) =>
createSelector( createSelector(
selectClaimsById, selectClaimsById,
@ -212,22 +360,6 @@ export const makeSelectTotalPagesInChannelSearch = (uri: string) =>
} }
); );
export const makeSelectClaimsInChannelForCurrentPageState = (uri: string) =>
createSelector(
selectClaimsById,
selectAllClaimsByChannel,
selectCurrentChannelPage,
(byId, allClaims, page) => {
const byChannel = allClaims[uri] || {};
const claimIds = byChannel[page || 1];
if (!claimIds) return claimIds;
return claimIds.map(claimId => byId[claimId]);
}
);
export const makeSelectMetadataForUri = (uri: string) => export const makeSelectMetadataForUri = (uri: string) =>
createSelector( createSelector(
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
@ -279,6 +411,19 @@ export const makeSelectAmountForUri = (uri: string) =>
} }
); );
export const makeSelectEffectiveAmountForUri = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri, false),
claim => {
return (
claim &&
claim.meta &&
typeof claim.meta.effective_amount === 'string' &&
Number(claim.meta.effective_amount)
);
}
);
export const makeSelectContentTypeForUri = (uri: string) => export const makeSelectContentTypeForUri = (uri: string) =>
createSelector( createSelector(
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
@ -293,7 +438,9 @@ export const makeSelectThumbnailForUri = (uri: string) =>
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
claim => { claim => {
const thumbnail = claim && claim.value && claim.value.thumbnail; const thumbnail = claim && claim.value && claim.value.thumbnail;
return thumbnail && thumbnail.url ? thumbnail.url.trim() : undefined; return thumbnail && thumbnail.url
? thumbnail.url.trim().replace(/^http:\/\//i, 'https://')
: undefined;
} }
); );
@ -302,7 +449,7 @@ export const makeSelectCoverForUri = (uri: string) =>
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
claim => { claim => {
const cover = claim && claim.value && claim.value.cover; const cover = claim && claim.value && claim.value.cover;
return cover && cover.url ? cover.url.trim() : undefined; return cover && cover.url ? cover.url.trim().replace(/^http:\/\//i, 'https://') : undefined;
} }
); );
@ -311,12 +458,33 @@ export const selectIsFetchingClaimListMine = createSelector(
state => state.isFetchingClaimListMine state => state.isFetchingClaimListMine
); );
export const selectMyClaimsPage = createSelector(
selectState,
state => state.myClaimsPageResults || []
);
export const selectMyClaimsPageNumber = createSelector(
selectState,
state => (state.claimListMinePage && state.claimListMinePage.items) || [],
state => (state.txoPage && state.txoPage.page) || 1
);
export const selectMyClaimsPageItemCount = createSelector(
selectState,
state => state.myClaimsPageTotalResults || 1
);
export const selectFetchingMyClaimsPageError = createSelector(
selectState,
state => state.fetchingClaimListMinePageError
);
export const selectMyClaims = createSelector( export const selectMyClaims = createSelector(
selectMyActiveClaims, selectMyActiveClaims,
selectClaimsById, selectClaimsById,
selectAbandoningIds, selectAbandoningIds,
selectPendingClaims, (myClaimIds, byId, abandoningIds) => {
(myClaimIds, byId, abandoningIds, pendingClaims) => {
const claims = []; const claims = [];
myClaimIds.forEach(id => { myClaimIds.forEach(id => {
@ -325,30 +493,35 @@ export const selectMyClaims = createSelector(
if (claim && abandoningIds.indexOf(id) === -1) claims.push(claim); if (claim && abandoningIds.indexOf(id) === -1) claims.push(claim);
}); });
return [...claims, ...pendingClaims]; return [...claims];
} }
); );
export const selectMyClaimsWithoutChannels = createSelector( export const selectMyClaimsWithoutChannels = createSelector(
selectMyClaims, selectMyClaims,
myClaims => myClaims =>
myClaims.filter(claim => !claim.name.match(/^@/)).sort((a, b) => a.timestamp - b.timestamp) myClaims
.filter(claim => claim && !claim.name.match(/^@/))
.sort((a, b) => a.timestamp - b.timestamp)
); );
export const selectMyClaimUrisWithoutChannels = createSelector( export const selectMyClaimUrisWithoutChannels = createSelector(
selectMyClaimsWithoutChannels, selectMyClaimsWithoutChannels,
myClaims => myClaims => {
myClaims return myClaims
.sort((a, b) => { .sort((a, b) => {
if (!a.timestamp) { if (a.height < 1) {
return -1; return -1;
} else if (!b.timestamp) { } else if (b.height < 1) {
return 1; return 1;
} else { } else {
return b.timestamp - a.timestamp; return b.timestamp - a.timestamp;
} }
}) })
.map(claim => `lbry://${claim.name}#${claim.claim_id}`) .map(claim => {
return claim.canonical_url || claim.permanent_url;
});
}
); );
export const selectAllMyClaimsByOutpoint = createSelector( export const selectAllMyClaimsByOutpoint = createSelector(
@ -373,6 +546,11 @@ export const selectFetchingMyChannels = createSelector(
state => state.fetchingMyChannels state => state.fetchingMyChannels
); );
export const selectFetchingMyCollections = createSelector(
selectState,
state => state.fetchingMyCollections
);
export const selectMyChannelClaims = createSelector( export const selectMyChannelClaims = createSelector(
selectState, selectState,
selectClaimsById, selectClaimsById,
@ -394,6 +572,16 @@ export const selectMyChannelClaims = createSelector(
} }
); );
export const selectMyChannelUrls = createSelector(
selectMyChannelClaims,
claims => (claims ? claims.map(claim => claim.canonical_url || claim.permanent_url) : undefined)
);
export const selectMyCollectionIds = createSelector(
selectState,
state => state.myCollectionClaims
);
export const selectResolvingUris = createSelector( export const selectResolvingUris = createSelector(
selectState, selectState,
state => state.resolvingUris || [] state => state.resolvingUris || []
@ -420,16 +608,35 @@ export const selectChannelClaimCounts = createSelector(
state => state.channelClaimCounts || {} state => state.channelClaimCounts || {}
); );
export const makeSelectPendingClaimForUri = (uri: string) =>
createSelector(
selectPendingClaimsById,
pendingById => {
let uriStreamName;
let uriChannelName;
try {
({ streamName: uriStreamName, channelName: uriChannelName } = parseURI(uri));
} catch (e) {
return null;
}
const pendingClaims = (Object.values(pendingById): any);
const matchingClaim = pendingClaims.find((claim: GenericClaim) => {
return claim.normalized_name === uriChannelName || claim.normalized_name === uriStreamName;
});
return matchingClaim || null;
}
);
export const makeSelectTotalItemsForChannel = (uri: string) => export const makeSelectTotalItemsForChannel = (uri: string) =>
createSelector( createSelector(
selectChannelClaimCounts, selectChannelClaimCounts,
byUri => byUri && byUri[uri] byUri => byUri && byUri[normalizeURI(uri)]
); );
export const makeSelectTotalPagesForChannel = (uri: string, pageSize: number = 10) => export const makeSelectTotalPagesForChannel = (uri: string, pageSize: number = 10) =>
createSelector( createSelector(
selectChannelClaimCounts, selectChannelClaimCounts,
byUri => byUri && byUri[uri] && Math.ceil(byUri[uri] / pageSize) byUri => byUri && byUri[uri] && Math.ceil(byUri[normalizeURI(uri)] / pageSize)
); );
export const makeSelectNsfwCountFromUris = (uris: Array<string>) => export const makeSelectNsfwCountFromUris = (uris: Array<string>) =>
@ -445,27 +652,6 @@ export const makeSelectNsfwCountFromUris = (uris: Array<string>) =>
}, 0) }, 0)
); );
export const makeSelectNsfwCountForChannel = (uri: string) =>
createSelector(
selectClaimsById,
selectAllClaimsByChannel,
selectCurrentChannelPage,
(byId, allClaims, page) => {
const byChannel = allClaims[uri] || {};
const claimIds = byChannel[page || 1];
if (!claimIds) return 0;
return claimIds.reduce((acc, claimId) => {
const claim = byId[claimId];
if (isClaimNsfw(claim)) {
return acc + 1;
}
return acc;
}, 0);
}
);
export const makeSelectOmittedCountForChannel = (uri: string) => export const makeSelectOmittedCountForChannel = (uri: string) =>
createSelector( createSelector(
makeSelectTotalItemsForChannel(uri), makeSelectTotalItemsForChannel(uri),
@ -473,8 +659,7 @@ export const makeSelectOmittedCountForChannel = (uri: string) =>
(claimsInChannel, claimsInSearch) => { (claimsInChannel, claimsInSearch) => {
if (claimsInChannel && typeof claimsInSearch === 'number' && claimsInSearch >= 0) { if (claimsInChannel && typeof claimsInSearch === 'number' && claimsInSearch >= 0) {
return claimsInChannel - claimsInSearch; return claimsInChannel - claimsInSearch;
} } else return 0;
else return 0;
} }
); );
@ -494,39 +679,6 @@ export const makeSelectClaimIsNsfw = (uri: string): boolean =>
} }
); );
export const makeSelectRecommendedContentForUri = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri),
selectSearchUrisByQuery,
(claim, searchUrisByQuery) => {
const atVanityURI = !uri.includes('#');
let recommendedContent;
if (claim) {
// always grab full URL - this can change once search returns canonical
const currentUri = buildURI({ streamClaimId: claim.claim_id, streamName: claim.name });
const { title } = claim.value;
const searchQuery = getSearchQueryString(title ? title.replace(/\//, ' ') : '');
let searchUris = searchUrisByQuery[searchQuery];
if (searchUris) {
searchUris = searchUris.filter(searchUri => searchUri !== currentUri);
recommendedContent = searchUris;
}
}
return recommendedContent;
}
);
export const makeSelectFirstRecommendedFileForUri = (uri: string) =>
createSelector(
makeSelectRecommendedContentForUri(uri),
recommendedContent => (recommendedContent ? recommendedContent[0] : null)
);
// Returns the associated channel uri for a given claim uri // Returns the associated channel uri for a given claim uri
// accepts a regular claim uri lbry://something // accepts a regular claim uri lbry://something
// returns the channel uri that created this claim lbry://@channel // returns the channel uri that created this claim lbry://@channel
@ -534,12 +686,40 @@ export const makeSelectChannelForClaimUri = (uri: string, includePrefix: boolean
createSelector( createSelector(
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
(claim: ?Claim) => { (claim: ?Claim) => {
if (!claim || !claim.signing_channel || !claim.signing_channel.canonical_url) { if (!claim || !claim.signing_channel || !claim.is_channel_signature_valid) {
return null; return null;
} }
const { canonical_url: canonicalUrl } = claim.signing_channel; const { canonical_url: canonicalUrl, permanent_url: permanentUrl } = claim.signing_channel;
if (canonicalUrl) {
return includePrefix ? canonicalUrl : canonicalUrl.slice('lbry://'.length); return includePrefix ? canonicalUrl : canonicalUrl.slice('lbry://'.length);
} else {
return includePrefix ? permanentUrl : permanentUrl.slice('lbry://'.length);
}
}
);
export const makeSelectChannelPermUrlForClaimUri = (uri: string, includePrefix: boolean = false) =>
createSelector(
makeSelectClaimForUri(uri),
(claim: ?Claim) => {
if (claim && claim.value_type === 'channel') {
return claim.permanent_url;
}
if (!claim || !claim.signing_channel || !claim.is_channel_signature_valid) {
return null;
}
return claim.signing_channel.permanent_url;
}
);
export const makeSelectMyChannelPermUrlForName = (name: string) =>
createSelector(
selectMyChannelClaims,
claims => {
const matchingClaim = claims && claims.find(claim => claim.name === name);
return matchingClaim ? matchingClaim.permanent_url : null;
} }
); );
@ -594,7 +774,7 @@ export const makeSelectSupportsForUri = (uri: string) =>
selectSupportsByOutpoint, selectSupportsByOutpoint,
makeSelectClaimForUri(uri), makeSelectClaimForUri(uri),
(byOutpoint, claim: ?StreamClaim) => { (byOutpoint, claim: ?StreamClaim) => {
if (!claim || !claim.is_mine) { if (!claim || !claim.is_my_output) {
return null; return null;
} }
@ -621,15 +801,24 @@ export const selectUpdateChannelError = createSelector(
state => state.updateChannelError state => state.updateChannelError
); );
export const makeSelectReflectingClaimForUri = (uri: string) =>
createSelector(
selectClaimIdsByUri,
selectReflectingById,
(claimIdsByUri, reflectingById) => {
const claimId = claimIdsByUri[normalizeURI(uri)];
return reflectingById[claimId];
}
);
export const makeSelectMyStreamUrlsForPage = (page: number = 1) => export const makeSelectMyStreamUrlsForPage = (page: number = 1) =>
createSelector( createSelector(
selectMyClaimUrisWithoutChannels, selectMyClaimUrisWithoutChannels,
urls => { urls => {
const start = ((Number(page) - 1) * Number(PAGE_SIZE)); const start = (Number(page) - 1) * Number(CLAIM.PAGE_SIZE);
const end = (Number(page) * Number(PAGE_SIZE)); const end = Number(page) * Number(CLAIM.PAGE_SIZE);
return (urls && urls.length)
? urls.slice(start, end) return urls && urls.length ? urls.slice(start, end) : [];
: [];
} }
); );
@ -637,3 +826,97 @@ export const selectMyStreamUrlsCount = createSelector(
selectMyClaimUrisWithoutChannels, selectMyClaimUrisWithoutChannels,
channels => channels.length channels => channels.length
); );
export const makeSelectTagInClaimOrChannelForUri = (uri: string, tag: string) =>
createSelector(
makeSelectClaimForUri(uri),
claim => {
const claimTags = (claim && claim.value && claim.value.tags) || [];
const channelTags =
(claim &&
claim.signing_channel &&
claim.signing_channel.value &&
claim.signing_channel.value.tags) ||
[];
return claimTags.includes(tag) || channelTags.includes(tag);
}
);
export const makeSelectClaimHasSource = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri),
claim => {
if (!claim) {
return false;
}
return Boolean(claim.value.source);
}
);
export const makeSelectClaimIsStreamPlaceholder = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri),
claim => {
if (!claim) {
return false;
}
return Boolean(claim.value_type === 'stream' && !claim.value.source);
}
);
export const makeSelectTotalStakedAmountForChannelUri = (uri: string) =>
createSelector(
makeSelectClaimForUri(uri),
claim => {
if (!claim || !claim.amount || !claim.meta || !claim.meta.support_amount) {
return 0;
}
return parseFloat(claim.amount) + parseFloat(claim.meta.support_amount) || 0;
}
);
export const makeSelectStakedLevelForChannelUri = (uri: string) =>
createSelector(
makeSelectTotalStakedAmountForChannelUri(uri),
amount => {
let level = 1;
switch (true) {
case amount >= CLAIM.LEVEL_2_STAKED_AMOUNT && amount < CLAIM.LEVEL_3_STAKED_AMOUNT:
level = 2;
break;
case amount >= CLAIM.LEVEL_3_STAKED_AMOUNT && amount < CLAIM.LEVEL_4_STAKED_AMOUNT:
level = 3;
break;
case amount >= CLAIM.LEVEL_4_STAKED_AMOUNT && amount < CLAIM.LEVEL_5_STAKED_AMOUNT:
level = 4;
break;
case amount >= CLAIM.LEVEL_5_STAKED_AMOUNT:
level = 5;
break;
}
return level;
}
);
export const selectUpdatingCollection = createSelector(
selectState,
state => state.updatingCollection
);
export const selectUpdateCollectionError = createSelector(
selectState,
state => state.updateCollectionError
);
export const selectCreatingCollection = createSelector(
selectState,
state => state.creatingCollection
);
export const selectCreateCollectionError = createSelector(
selectState,
state => state.createCollectionError
);

View file

@ -0,0 +1,311 @@
// @flow
import fromEntries from '@ungap/from-entries';
import { createSelector } from 'reselect';
import {
selectMyCollectionIds,
makeSelectClaimForUri,
selectClaimsByUri,
} from 'redux/selectors/claims';
import { parseURI } from 'lbryURI';
const selectState = (state: { collections: CollectionState }) => state.collections;
export const selectSavedCollectionIds = createSelector(
selectState,
collectionState => collectionState.saved
);
export const selectBuiltinCollections = createSelector(
selectState,
state => state.builtin
);
export const selectResolvedCollections = createSelector(
selectState,
state => state.resolved
);
export const selectMyUnpublishedCollections = createSelector(
selectState,
state => state.unpublished
);
export const selectMyEditedCollections = createSelector(
selectState,
state => state.edited
);
export const selectPendingCollections = createSelector(
selectState,
state => state.pending
);
export const makeSelectEditedCollectionForId = (id: string) =>
createSelector(
selectMyEditedCollections,
eLists => eLists[id]
);
export const makeSelectPendingCollectionForId = (id: string) =>
createSelector(
selectPendingCollections,
pending => pending[id]
);
export const makeSelectPublishedCollectionForId = (id: string) =>
createSelector(
selectResolvedCollections,
rLists => rLists[id]
);
export const makeSelectUnpublishedCollectionForId = (id: string) =>
createSelector(
selectMyUnpublishedCollections,
rLists => rLists[id]
);
export const makeSelectCollectionIsMine = (id: string) =>
createSelector(
selectMyCollectionIds,
selectMyUnpublishedCollections,
selectBuiltinCollections,
(publicIds, privateIds, builtinIds) => {
return Boolean(publicIds.includes(id) || privateIds[id] || builtinIds[id]);
}
);
export const selectMyPublishedCollections = createSelector(
selectResolvedCollections,
selectPendingCollections,
selectMyEditedCollections,
selectMyCollectionIds,
(resolved, pending, edited, myIds) => {
// all resolved in myIds, plus those in pending and edited
const myPublishedCollections = fromEntries(
Object.entries(pending).concat(
Object.entries(resolved).filter(
([key, val]) =>
myIds.includes(key) &&
// $FlowFixMe
!pending[key]
)
)
);
// now add in edited:
Object.entries(edited).forEach(([id, item]) => {
myPublishedCollections[id] = item;
});
return myPublishedCollections;
}
);
export const selectMyPublishedMixedCollections = createSelector(
selectMyPublishedCollections,
published => {
const myCollections = fromEntries(
// $FlowFixMe
Object.entries(published).filter(([key, collection]) => {
// $FlowFixMe
return collection.type === 'collection';
})
);
return myCollections;
}
);
export const selectMyPublishedPlaylistCollections = createSelector(
selectMyPublishedCollections,
published => {
const myCollections = fromEntries(
// $FlowFixMe
Object.entries(published).filter(([key, collection]) => {
// $FlowFixMe
return collection.type === 'playlist';
})
);
return myCollections;
}
);
export const makeSelectMyPublishedCollectionForId = (id: string) =>
createSelector(
selectMyPublishedCollections,
myPublishedCollections => myPublishedCollections[id]
);
// export const selectSavedCollections = createSelector(
// selectResolvedCollections,
// selectSavedCollectionIds,
// (resolved, myIds) => {
// const mySavedCollections = fromEntries(
// Object.entries(resolved).filter(([key, val]) => myIds.includes(key))
// );
// return mySavedCollections;
// }
// );
export const makeSelectIsResolvingCollectionForId = (id: string) =>
createSelector(
selectState,
state => {
return state.isResolvingCollectionById[id];
}
);
export const makeSelectCollectionForId = (id: string) =>
createSelector(
selectBuiltinCollections,
selectResolvedCollections,
selectMyUnpublishedCollections,
selectMyEditedCollections,
selectPendingCollections,
(bLists, rLists, uLists, eLists, pLists) => {
const collection = bLists[id] || uLists[id] || eLists[id] || pLists[id] || rLists[id];
return collection;
}
);
export const makeSelectClaimUrlInCollection = (url: string) =>
createSelector(
selectBuiltinCollections,
selectMyPublishedCollections,
selectMyUnpublishedCollections,
selectMyEditedCollections,
selectPendingCollections,
(bLists, myRLists, uLists, eLists, pLists) => {
const collections = [bLists, uLists, eLists, myRLists, pLists];
const itemsInCollections = [];
collections.map(list => {
Object.entries(list).forEach(([key, value]) => {
// $FlowFixMe
value.items.map(item => {
itemsInCollections.push(item);
});
});
});
return itemsInCollections.includes(url);
}
);
export const makeSelectCollectionForIdHasClaimUrl = (id: string, url: string) =>
createSelector(
makeSelectCollectionForId(id),
collection => collection && collection.items.includes(url)
);
export const makeSelectUrlsForCollectionId = (id: string) =>
createSelector(
makeSelectCollectionForId(id),
collection => collection && collection.items
);
export const makeSelectClaimIdsForCollectionId = (id: string) =>
createSelector(
makeSelectCollectionForId(id),
collection => {
const items = (collection && collection.items) || [];
const ids = items.map(item => {
const { claimId } = parseURI(item);
return claimId;
});
return ids;
}
);
export const makeSelectIndexForUrlInCollection = (url: string, id: string) =>
createSelector(
state => state.content.shuffleList,
makeSelectUrlsForCollectionId(id),
makeSelectClaimForUri(url),
(shuffleState, urls, claim) => {
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
const listUrls = shuffleUrls || urls;
const index = listUrls && listUrls.findIndex(u => u === url);
if (index > -1) {
return index;
} else if (claim) {
const index = listUrls && listUrls.findIndex(u => u === claim.permanent_url);
if (index > -1) return index;
return claim;
}
return null;
}
);
export const makeSelectPreviousUrlForCollectionAndUrl = (id: string, url: string) =>
createSelector(
state => state.content.shuffleList,
state => state.content.loopList,
makeSelectIndexForUrlInCollection(url, id),
makeSelectUrlsForCollectionId(id),
(shuffleState, loopState, index, urls) => {
const loopList = loopState && loopState.collectionId === id && loopState.loop;
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
if (index > -1) {
const listUrls = shuffleUrls || urls;
let nextUrl;
if (index === 0 && loopList) {
nextUrl = listUrls[listUrls.length - 1];
} else {
nextUrl = listUrls[index - 1];
}
return nextUrl || null;
} else {
return null;
}
}
);
export const makeSelectNextUrlForCollectionAndUrl = (id: string, url: string) =>
createSelector(
state => state.content.shuffleList,
state => state.content.loopList,
makeSelectIndexForUrlInCollection(url, id),
makeSelectUrlsForCollectionId(id),
(shuffleState, loopState, index, urls) => {
const loopList = loopState && loopState.collectionId === id && loopState.loop;
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
if (index > -1) {
const listUrls = shuffleUrls || urls;
// We'll get the next playble url
let remainingUrls = listUrls.slice(index + 1);
if (!remainingUrls.length && loopList) {
remainingUrls = listUrls.slice(0);
}
const nextUrl = remainingUrls && remainingUrls[0];
return nextUrl || null;
} else {
return null;
}
}
);
export const makeSelectNameForCollectionId = (id: string) =>
createSelector(
makeSelectCollectionForId(id),
collection => {
return (collection && collection.name) || '';
}
);
export const makeSelectCountForCollectionId = (id: string) =>
createSelector(
makeSelectCollectionForId(id),
collection => {
if (collection) {
if (collection.itemCount !== undefined) {
return collection.itemCount;
}
let itemCount = 0;
collection.items.map(item => {
if (item) {
itemCount += 1;
}
});
return itemCount;
}
return null;
}
);

View file

@ -1,36 +0,0 @@
// @flow
import { createSelector } from 'reselect';
const selectState = state => state.comments || {};
export const selectCommentsById = createSelector(
selectState,
state => state.byId || {}
);
export const selectCommentsByUri = createSelector(
selectState,
state => {
const byUri = state.commentsByUri || {};
const comments = {};
Object.keys(byUri).forEach(uri => {
const claimId = byUri[uri];
if (claimId === null) {
comments[uri] = null;
} else {
comments[uri] = claimId;
}
});
return comments;
}
);
export const makeSelectCommentsForUri = (uri: string) =>
createSelector(
selectCommentsById,
selectCommentsByUri,
(byId, byUri) => {
const claimId = byUri[uri];
return byId && byId[claimId];
}
);

View file

@ -1,14 +1,19 @@
// @flow
import { createSelector } from 'reselect'; import { createSelector } from 'reselect';
import { makeSelectClaimForUri } from 'redux/selectors/claims'; import { makeSelectClaimForUri } from 'redux/selectors/claims';
export const selectState = (state: any) => state.content || {}; export const selectState = (state: any) => state.content || {};
export const makeSelectContentPositionForUri = (uri: string) => export const makeSelectContentPositionForUri = (uri: string) =>
createSelector(selectState, makeSelectClaimForUri(uri), (state, claim) => { createSelector(
selectState,
makeSelectClaimForUri(uri),
(state, claim) => {
if (!claim) { if (!claim) {
return null; return null;
} }
const outpoint = `${claim.txid}:${claim.nout}`; const outpoint = `${claim.txid}:${claim.nout}`;
const id = claim.claim_id; const id = claim.claim_id;
return state.positions[id] ? state.positions[id][outpoint] : null; return state.positions[id] ? state.positions[id][outpoint] : null;
}); }
);

View file

@ -1,36 +0,0 @@
// @flow
import { createSelector } from 'reselect';
import { makeSelectFileInfoForUri } from 'redux/selectors/file_info';
type State = { file: FileState };
export const selectState = (state: State): FileState => state.file || {};
export const selectPurchaseUriErrorMessage: (state: State) => string = createSelector(
selectState,
state => state.purchaseUriErrorMessage
);
export const selectFailedPurchaseUris: (state: State) => Array<string> = createSelector(
selectState,
state => state.failedPurchaseUris
);
export const selectPurchasedUris: (state: State) => Array<string> = createSelector(
selectState,
state => state.purchasedUris
);
export const selectLastPurchasedUri: (state: State) => string = createSelector(
selectState,
state =>
state.purchasedUris.length > 0 ? state.purchasedUris[state.purchasedUris.length - 1] : null
);
export const makeSelectStreamingUrlForUri = (uri: string) =>
createSelector(
makeSelectFileInfoForUri(uri),
fileInfo => {
return fileInfo && fileInfo.streaming_url;
}
);

View file

@ -212,6 +212,7 @@ function filterFileInfos(fileInfos, query) {
const queryMatchRegExp = new RegExp(query, 'i'); const queryMatchRegExp = new RegExp(query, 'i');
return fileInfos.filter(fileInfo => { return fileInfos.filter(fileInfo => {
const { metadata } = fileInfo; const { metadata } = fileInfo;
return ( return (
(metadata.title && metadata.title.match(queryMatchRegExp)) || (metadata.title && metadata.title.match(queryMatchRegExp)) ||
(fileInfo.channel_name && fileInfo.channel_name.match(queryMatchRegExp)) || (fileInfo.channel_name && fileInfo.channel_name.match(queryMatchRegExp)) ||
@ -250,3 +251,11 @@ export const makeSelectSearchDownloadUrlsCount = query =>
return fileInfos && fileInfos.length ? filterFileInfos(fileInfos, query).length : 0; return fileInfos && fileInfos.length ? filterFileInfos(fileInfos, query).length : 0;
} }
); );
export const makeSelectStreamingUrlForUri = uri =>
createSelector(
makeSelectFileInfoForUri(uri),
fileInfo => {
return fileInfo && fileInfo.streaming_url;
}
);

View file

@ -1,8 +1,10 @@
import { createSelector } from 'reselect'; import { createSelector } from 'reselect';
export const selectState = (state) => state.notifications || {}; export const selectState = state => state.notifications || {};
export const selectToast = createSelector(selectState, (state) => { export const selectToast = createSelector(
selectState,
state => {
if (state.toasts.length) { if (state.toasts.length) {
const { id, params } = state.toasts[0]; const { id, params } = state.toasts[0];
return { return {
@ -12,9 +14,12 @@ export const selectToast = createSelector(selectState, (state) => {
} }
return null; return null;
}); }
);
export const selectError = createSelector(selectState, (state) => { export const selectError = createSelector(
selectState,
state => {
if (state.errors.length) { if (state.errors.length) {
const { error } = state.errors[0]; const { error } = state.errors[0];
return { return {
@ -23,4 +28,5 @@ export const selectError = createSelector(selectState, (state) => {
} }
return null; return null;
}); }
);

View file

@ -9,23 +9,9 @@ import {
const selectState = state => state.publish || {}; const selectState = state => state.publish || {};
export const selectPublishFormValues = createSelector(
selectState,
state => {
const { pendingPublish, ...formValues } = state;
return formValues;
}
);
export const makeSelectPublishFormValue = item =>
createSelector(
selectState,
state => state[item]
);
// Is the current uri the same as the uri they clicked "edit" on // Is the current uri the same as the uri they clicked "edit" on
export const selectIsStillEditing = createSelector( export const selectIsStillEditing = createSelector(
selectPublishFormValues, selectState,
publishState => { publishState => {
const { editingURI, uri } = publishState; const { editingURI, uri } = publishState;
@ -52,6 +38,34 @@ export const selectIsStillEditing = createSelector(
} }
); );
export const selectPublishFormValues = createSelector(
selectState,
state => state.settings,
selectIsStillEditing,
(publishState, settingsState, isStillEditing) => {
const { languages, ...formValues } = publishState;
const language = languages && languages.length && languages[0];
const { clientSettings } = settingsState;
const { language: languageSet } = clientSettings;
let actualLanguage;
// Sets default if editing a claim with a set language
if (!language && isStillEditing && languageSet) {
actualLanguage = languageSet;
} else {
actualLanguage = language || languageSet || 'en';
}
return { ...formValues, language: actualLanguage };
}
);
export const makeSelectPublishFormValue = item =>
createSelector(
selectState,
state => state[item]
);
export const selectMyClaimForUri = createSelector( export const selectMyClaimForUri = createSelector(
selectPublishFormValues, selectPublishFormValues,
selectIsStillEditing, selectIsStillEditing,

View file

@ -1,156 +0,0 @@
// @flow
import { SEARCH_TYPES, SEARCH_OPTIONS } from 'constants/search';
import { getSearchQueryString } from 'util/query-params';
import { normalizeURI, parseURI } from 'lbryURI';
import { createSelector } from 'reselect';
type State = { search: SearchState };
export const selectState = (state: State): SearchState => state.search;
export const selectSearchValue: (state: State) => string = createSelector(
selectState,
state => state.searchQuery
);
export const selectSearchOptions: (state: State) => SearchOptions = createSelector(
selectState,
state => state.options
);
export const selectSuggestions: (
state: State
) => { [string]: Array<SearchSuggestion> } = createSelector(
selectState,
state => state.suggestions
);
export const selectIsSearching: (state: State) => boolean = createSelector(
selectState,
state => state.searching
);
export const selectSearchUrisByQuery: (
state: State
) => { [string]: Array<string> } = createSelector(
selectState,
state => state.urisByQuery
);
export const makeSelectSearchUris = (query: string): ((state: State) => Array<string>) =>
// replace statement below is kind of ugly, and repeated in doSearch action
createSelector(
selectSearchUrisByQuery,
byQuery => byQuery[query ? query.replace(/^lbry:\/\//i, '').replace(/\//, ' ') : query]
);
export const selectSearchBarFocused: boolean = createSelector(
selectState,
state => state.focused
);
export const selectSearchSuggestions: Array<SearchSuggestion> = createSelector(
selectSearchValue,
selectSuggestions,
(query: string, suggestions: { [string]: Array<string> }) => {
if (!query) {
return [];
}
const queryIsPrefix =
query === 'lbry:' || query === 'lbry:/' || query === 'lbry://' || query === 'lbry://@';
if (queryIsPrefix) {
// If it is a prefix, wait until something else comes to figure out what to do
return [];
} else if (query.startsWith('lbry://')) {
// If it starts with a prefix, don't show any autocomplete results
// They are probably typing/pasting in a lbry uri
return [
{
value: query,
type: query[7] === '@' ? SEARCH_TYPES.CHANNEL : SEARCH_TYPES.FILE,
},
];
}
let searchSuggestions = [];
try {
const uri = normalizeURI(query);
const { channelName, streamName, isChannel } = parseURI(uri);
searchSuggestions.push(
{
value: query,
type: SEARCH_TYPES.SEARCH,
},
{
value: uri,
shorthand: isChannel ? channelName : streamName,
type: isChannel ? SEARCH_TYPES.CHANNEL : SEARCH_TYPES.FILE,
}
);
} catch (e) {
searchSuggestions.push({
value: query,
type: SEARCH_TYPES.SEARCH,
});
}
searchSuggestions.push({
value: query,
type: SEARCH_TYPES.TAG,
});
const apiSuggestions = suggestions[query] || [];
if (apiSuggestions.length) {
searchSuggestions = searchSuggestions.concat(
apiSuggestions
.filter(suggestion => suggestion !== query)
.map(suggestion => {
// determine if it's a channel
try {
const uri = normalizeURI(suggestion);
const { channelName, streamName, isChannel } = parseURI(uri);
return {
value: uri,
shorthand: isChannel ? channelName : streamName,
type: isChannel ? SEARCH_TYPES.CHANNEL : SEARCH_TYPES.FILE,
};
} catch (e) {
// search result includes some character that isn't valid in claim names
return {
value: suggestion,
type: SEARCH_TYPES.SEARCH,
};
}
})
);
}
return searchSuggestions;
}
);
// Creates a query string based on the state in the search reducer
// Can be overrided by passing in custom sizes/from values for other areas pagination
export const makeSelectQueryWithOptions = (
customQuery: ?string,
customSize: ?number,
customFrom: ?number,
isBackgroundSearch: boolean = false // If it's a background search, don't use the users settings
) =>
createSelector(
selectSearchValue,
selectSearchOptions,
(query, options) => {
const size = customSize || options[SEARCH_OPTIONS.RESULT_COUNT];
const queryString = getSearchQueryString(
customQuery || query,
{ ...options, size, from: customFrom },
!isBackgroundSearch
);
return queryString;
}
);

View file

@ -1,47 +0,0 @@
// @flow
import { createSelector } from 'reselect';
const selectState = (state: { tags: TagState }) => state.tags || {};
export const selectKnownTagsByName = createSelector(
selectState,
(state: TagState): KnownTags => state.knownTags
);
export const selectFollowedTagsList = createSelector(
selectState,
(state: TagState): Array<string> => state.followedTags.filter(tag => typeof tag === 'string')
);
export const selectFollowedTags = createSelector(
selectFollowedTagsList,
(followedTags: Array<string>): Array<Tag> =>
followedTags
.map(tag => ({ name: tag.toLowerCase() }))
.sort((a, b) => a.name.localeCompare(b.name))
);
export const selectUnfollowedTags = createSelector(
selectKnownTagsByName,
selectFollowedTagsList,
(tagsByName: KnownTags, followedTags: Array<string>): Array<Tag> => {
const followedTagsSet = new Set(followedTags);
let tagsToReturn = [];
Object.keys(tagsByName).forEach(key => {
if (!followedTagsSet.has(key)) {
const { name } = tagsByName[key];
tagsToReturn.push({ name: name.toLowerCase() });
}
});
return tagsToReturn;
}
);
export const makeSelectIsFollowingTag = (tag: string) =>
createSelector(
selectFollowedTags,
followedTags => {
return followedTags.some(followedTag => followedTag.name === tag.toLowerCase());
}
);

View file

@ -1,7 +1,8 @@
import { createSelector } from 'reselect'; import { createSelector } from 'reselect';
import * as TRANSACTIONS from 'constants/transaction_types'; import * as TRANSACTIONS from 'constants/transaction_types';
import { PAGE_SIZE, LATEST_PAGE_SIZE } from 'constants/transaction_list'; import { PAGE_SIZE, LATEST_PAGE_SIZE } from 'constants/transaction_list';
import { selectClaimIdsByUri } from 'redux/selectors/claims';
import parseData from 'util/parse-data';
export const selectState = state => state.wallet || {}; export const selectState = state => state.wallet || {};
export const selectWalletState = selectState; export const selectWalletState = selectState;
@ -21,6 +22,33 @@ export const selectWalletEncryptSucceeded = createSelector(
state => state.walletEncryptSucceded state => state.walletEncryptSucceded
); );
export const selectPendingSupportTransactions = createSelector(
selectState,
state => state.pendingSupportTransactions
);
export const selectPendingOtherTransactions = createSelector(
selectState,
state => state.pendingTxos
);
export const selectAbandonClaimSupportError = createSelector(
selectState,
state => state.abandonClaimSupportError
);
export const makeSelectPendingAmountByUri = uri =>
createSelector(
selectClaimIdsByUri,
selectPendingSupportTransactions,
(claimIdsByUri, pendingSupports) => {
const uriEntry = Object.entries(claimIdsByUri).find(([u, cid]) => u === uri);
const claimId = uriEntry && uriEntry[1];
const pendingSupport = claimId && pendingSupports[claimId];
return pendingSupport ? pendingSupport.effective : undefined;
}
);
export const selectWalletEncryptResult = createSelector( export const selectWalletEncryptResult = createSelector(
selectState, selectState,
state => state.walletEncryptResult state => state.walletEncryptResult
@ -240,6 +268,27 @@ export const selectIsFetchingTransactions = createSelector(
state => state.fetchingTransactions state => state.fetchingTransactions
); );
/**
* CSV of 'selectTransactionItems'.
*/
export const selectTransactionsFile = createSelector(
selectTransactionItems,
transactions => {
if (!transactions || transactions.length === 0) {
// No data.
return undefined;
}
const parsed = parseData(transactions, 'csv');
if (!parsed) {
// Invalid data, or failed to parse.
return null;
}
return parsed;
}
);
export const selectIsSendingSupport = createSelector( export const selectIsSendingSupport = createSelector(
selectState, selectState,
state => state.sendingSupport state => state.sendingSupport
@ -300,6 +349,36 @@ export const selectFilteredTransactions = createSelector(
} }
); );
export const selectTxoPageParams = createSelector(
selectState,
state => state.txoFetchParams
);
export const selectTxoPage = createSelector(
selectState,
state => (state.txoPage && state.txoPage.items) || []
);
export const selectTxoPageNumber = createSelector(
selectState,
state => (state.txoPage && state.txoPage.page) || 1
);
export const selectTxoItemCount = createSelector(
selectState,
state => (state.txoPage && state.txoPage.total_items) || 1
);
export const selectFetchingTxosError = createSelector(
selectState,
state => state.fetchingTxosError
);
export const selectIsFetchingTxos = createSelector(
selectState,
state => state.fetchingTxos
);
export const makeSelectFilteredTransactionsForPage = (page = 1) => export const makeSelectFilteredTransactionsForPage = (page = 1) =>
createSelector( createSelector(
selectFilteredTransactions, selectFilteredTransactions,
@ -328,3 +407,33 @@ export const selectIsWalletReconnecting = createSelector(
selectState, selectState,
state => state.walletReconnecting state => state.walletReconnecting
); );
export const selectIsFetchingUtxoCounts = createSelector(
selectState,
state => state.fetchingUtxoCounts
);
export const selectIsConsolidatingUtxos = createSelector(
selectState,
state => state.consolidatingUtxos
);
export const selectIsMassClaimingTips = createSelector(
selectState,
state => state.massClaimingTips
);
export const selectPendingConsolidateTxid = createSelector(
selectState,
state => state.pendingConsolidateTxid
);
export const selectPendingMassClaimTxid = createSelector(
selectState,
state => state.pendingMassClaimTxid
);
export const selectUtxoCounts = createSelector(
selectState,
state => state.utxoCounts
);

View file

@ -51,3 +51,20 @@ export function concatClaims(
return claims; return claims;
} }
export function filterClaims(claims: Array<Claim>, query: ?string): Array<Claim> {
if (query) {
const queryMatchRegExp = new RegExp(query, 'i');
return claims.filter(claim => {
const { value } = claim;
return (
(value.title && value.title.match(queryMatchRegExp)) ||
(claim.signing_channel && claim.signing_channel.name.match(queryMatchRegExp)) ||
(claim.name && claim.name.match(queryMatchRegExp))
);
});
}
return claims;
}

7
src/util/merge-claim.js Normal file
View file

@ -0,0 +1,7 @@
/*
new claim = { ...maybeResolvedClaim, ...pendingClaim, meta: maybeResolvedClaim['meta'] }
*/
export default function mergeClaims(maybeResolved, pending){
return { ...maybeResolved, ...pending, meta: maybeResolved.meta };
}

61
src/util/parse-data.js Normal file
View file

@ -0,0 +1,61 @@
// JSON parser
const parseJson = (data, filters = []) => {
const list = data.map(item => {
const temp = {};
// Apply filters
Object.entries(item).forEach(([key, value]) => {
if (!filters.includes(key)) temp[key] = value;
});
return temp;
});
// Beautify JSON
return JSON.stringify(list, null, '\t');
};
// CSV Parser
// No need for an external module:
// https://gist.github.com/btzr-io/55c3450ea3d709fc57540e762899fb85
const parseCsv = (data, filters = []) => {
// Get items for header
const getHeaders = item => {
const list = [];
// Apply filters
Object.entries(item).forEach(([key]) => {
if (!filters.includes(key)) list.push(key);
});
// return headers
return list.join(',');
};
// Get rows content
const getData = list =>
list
.map(item => {
const row = [];
// Apply filters
Object.entries(item).forEach(([key, value]) => {
if (!filters.includes(key)) row.push(value);
});
// return rows
return row.join(',');
})
.join('\n');
// Return CSV string
return `${getHeaders(data[0])} \n ${getData(data)}`;
};
const parseData = (data, format, filters = []) => {
// Check for validation
const valid = data && data[0] && format;
// Pick a format
const formats = {
csv: list => parseCsv(list, filters),
json: list => parseJson(list, filters),
};
// Return parsed data: JSON || CSV
return valid && formats[format] ? formats[format](data) : undefined;
};
export default parseData;

View file

@ -1,8 +1,4 @@
// @flow // @flow
import { SEARCH_OPTIONS } from 'constants/search';
const DEFAULT_SEARCH_RESULT_FROM = 0;
const DEFAULT_SEARCH_SIZE = 20;
export function parseQueryParams(queryString: string) { export function parseQueryParams(queryString: string) {
if (queryString === '') return {}; if (queryString === '') return {};
@ -32,40 +28,3 @@ export function toQueryString(params: { [string]: string | number }) {
return parts.join('&'); return parts.join('&');
} }
export const getSearchQueryString = (
query: string,
options: any = {},
includeUserOptions: boolean = false
) => {
const encodedQuery = encodeURIComponent(query);
const queryParams = [
`s=${encodedQuery}`,
`size=${options.size || DEFAULT_SEARCH_SIZE}`,
`from=${options.from || DEFAULT_SEARCH_RESULT_FROM}`,
];
if (includeUserOptions) {
const claimType = options[SEARCH_OPTIONS.CLAIM_TYPE];
queryParams.push(`claimType=${claimType}`);
// If they are only searching for channels, strip out the media info
if (!claimType.includes(SEARCH_OPTIONS.INCLUDE_CHANNELS)) {
queryParams.push(
`mediaType=${[
SEARCH_OPTIONS.MEDIA_FILE,
SEARCH_OPTIONS.MEDIA_AUDIO,
SEARCH_OPTIONS.MEDIA_VIDEO,
SEARCH_OPTIONS.MEDIA_TEXT,
SEARCH_OPTIONS.MEDIA_IMAGE,
SEARCH_OPTIONS.MEDIA_APPLICATION,
].reduce(
(acc, currentOption) => (options[currentOption] ? `${acc}${currentOption},` : acc),
''
)}`
);
}
}
return queryParams.join('&');
};

View file

@ -0,0 +1,19 @@
const config = {
babelrc: false,
presets: [
[
"@babel/env",
{
modules: false
}
],
"@babel/react"
],
plugins: [
["@babel/plugin-proposal-decorators", { legacy: true }],
["@babel/plugin-proposal-class-properties", { loose: true }],
"@babel/plugin-transform-flow-strip-types",
"transform-es2015-modules-commonjs"
]
};
module.exports = require("babel-jest").createTransformer(config);

44
tests/parseURI.test.js Normal file
View file

@ -0,0 +1,44 @@
import * as lbryURI from '../src/lbryURI.js';
import {describe, test} from "@jest/globals";
describe('parseURI tests', () => {
test('Correctly parses channel URI', () => {
let result = lbryURI.parseURI('lbry://@ChannelName');
expect(result.isChannel).toBeTruthy();
expect(result.path).toStrictEqual("@ChannelName");
expect(result.channelName).toStrictEqual("ChannelName");
expect(result.claimName).toStrictEqual("@ChannelName");
});
test('Correctly parses test case channel/stream lbry URI', () => {
let result = lbryURI.parseURI('lbry://@CryptoGnome#1/whale-pool-how-to#e');
expect(result.isChannel).toStrictEqual(false);;
expect(result.path).toStrictEqual("@CryptoGnome#1/whale-pool-how-to#e");
expect(result.claimId).toStrictEqual("1");
expect(result.streamClaimId).toStrictEqual("e");
expect(result.streamName).toStrictEqual("whale-pool-how-to");
expect(result.channelName).toStrictEqual("CryptoGnome");
expect(result.contentName).toStrictEqual("whale-pool-how-to");
});
test('Correctly parses lbry URI without protocol', () => {
let result = lbryURI.parseURI('@CryptoGnome#1/whale-pool-how-to#e');
expect(result.isChannel).toStrictEqual(false);;
expect(result.streamName).toStrictEqual("whale-pool-how-to");
expect(result.channelName).toStrictEqual("CryptoGnome");
});
test('Throws error for http protocol', () => {
// TODO - this catches wrong type of error..
let uri = 'http://@CryptoGnome#1/whale-pool-how-to#e';
expect(() => lbryURI.parseURI(uri)).toThrowError();
});
test('Correctly parses search', () => {
let result = lbryURI.parseURI('CryptoGn%ome');
expect(result.isChannel).toStrictEqual(false);
expect(result.path).toStrictEqual("CryptoGn%ome");
expect(result.contentName).toStrictEqual("CryptoGn%ome");
});
})

3331
yarn.lock

File diff suppressed because it is too large Load diff