Compare commits
1 commit
master
...
channel_si
Author | SHA1 | Date | |
---|---|---|---|
|
b2cd195b14 |
38 changed files with 626 additions and 4586 deletions
2
LICENSE
2
LICENSE
|
@ -1,6 +1,6 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017-2021 LBRY Inc
|
||||
Copyright (c) 2017-2020 LBRY Inc
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish,
|
||||
|
|
2255
dist/bundle.es.js
vendored
2255
dist/bundle.es.js
vendored
File diff suppressed because one or more lines are too long
80
dist/flow-typed/Claim.js
vendored
80
dist/flow-typed/Claim.js
vendored
|
@ -1,15 +1,11 @@
|
|||
// @flow
|
||||
|
||||
declare type Claim = StreamClaim | ChannelClaim | CollectionClaim;
|
||||
declare type Claim = StreamClaim | ChannelClaim;
|
||||
|
||||
declare type ChannelClaim = GenericClaim & {
|
||||
value: ChannelMetadata,
|
||||
};
|
||||
|
||||
declare type CollectionClaim = GenericClaim & {
|
||||
value: CollectionMetadata,
|
||||
};
|
||||
|
||||
declare type StreamClaim = GenericClaim & {
|
||||
value: StreamMetadata,
|
||||
};
|
||||
|
@ -34,7 +30,7 @@ declare type GenericClaim = {
|
|||
short_url: string, // permanent_url with short id, no channel
|
||||
txid: string, // unique tx id
|
||||
type: 'claim' | 'update' | 'support',
|
||||
value_type: 'stream' | 'channel' | 'collection',
|
||||
value_type: 'stream' | 'channel',
|
||||
signing_channel?: ChannelClaim,
|
||||
reposted_claim?: GenericClaim,
|
||||
repost_channel_url?: string,
|
||||
|
@ -78,10 +74,6 @@ declare type ChannelMetadata = GenericMetadata & {
|
|||
featured?: Array<string>,
|
||||
};
|
||||
|
||||
declare type CollectionMetadata = GenericMetadata & {
|
||||
claims: Array<string>,
|
||||
}
|
||||
|
||||
declare type StreamMetadata = GenericMetadata & {
|
||||
license?: string, // License "title" ex: Creative Commons, Custom copyright
|
||||
license_url?: string, // Link to full license
|
||||
|
@ -144,71 +136,3 @@ declare type PurchaseReceipt = {
|
|||
txid: string,
|
||||
type: 'purchase',
|
||||
};
|
||||
|
||||
declare type ClaimActionResolveInfo = {
|
||||
[string]: {
|
||||
stream: ?StreamClaim,
|
||||
channel: ?ChannelClaim,
|
||||
claimsInChannel: ?number,
|
||||
collection: ?CollectionClaim,
|
||||
},
|
||||
}
|
||||
|
||||
declare type ChannelUpdateParams = {
|
||||
claim_id: string,
|
||||
bid?: string,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
replace?: boolean,
|
||||
languages?: Array<string>,
|
||||
locations?: Array<string>,
|
||||
blocking?: boolean,
|
||||
}
|
||||
|
||||
declare type ChannelPublishParams = {
|
||||
name: string,
|
||||
bid: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
}
|
||||
|
||||
declare type CollectionUpdateParams = {
|
||||
claim_id: string,
|
||||
claim_ids?: Array<string>,
|
||||
bid?: string,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
replace?: boolean,
|
||||
languages?: Array<string>,
|
||||
locations?: Array<string>,
|
||||
blocking?: boolean,
|
||||
}
|
||||
|
||||
declare type CollectionPublishParams = {
|
||||
name: string,
|
||||
bid: string,
|
||||
claim_ids: Array<string>,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
}
|
||||
|
|
29
dist/flow-typed/CoinSwap.js
vendored
29
dist/flow-typed/CoinSwap.js
vendored
|
@ -1,29 +0,0 @@
|
|||
declare type CoinSwapInfo = {
|
||||
chargeCode: string,
|
||||
coins: Array<string>,
|
||||
sendAddresses: { [string]: string},
|
||||
sendAmounts: { [string]: any },
|
||||
lbcAmount: number,
|
||||
status?: {
|
||||
status: string,
|
||||
receiptCurrency: string,
|
||||
receiptTxid: string,
|
||||
lbcTxid: string,
|
||||
},
|
||||
}
|
||||
|
||||
declare type CoinSwapState = {
|
||||
coinSwaps: Array<CoinSwapInfo>,
|
||||
};
|
||||
|
||||
declare type CoinSwapAddAction = {
|
||||
type: string,
|
||||
data: CoinSwapInfo,
|
||||
};
|
||||
|
||||
declare type CoinSwapRemoveAction = {
|
||||
type: string,
|
||||
data: {
|
||||
chargeCode: string,
|
||||
},
|
||||
};
|
34
dist/flow-typed/Collections.js
vendored
34
dist/flow-typed/Collections.js
vendored
|
@ -1,34 +0,0 @@
|
|||
declare type Collection = {
|
||||
id: string,
|
||||
items: Array<?string>,
|
||||
name: string,
|
||||
type: string,
|
||||
updatedAt: number,
|
||||
totalItems?: number,
|
||||
sourceId?: string, // if copied, claimId of original collection
|
||||
};
|
||||
|
||||
declare type CollectionState = {
|
||||
unpublished: CollectionGroup,
|
||||
resolved: CollectionGroup,
|
||||
pending: CollectionGroup,
|
||||
edited: CollectionGroup,
|
||||
builtin: CollectionGroup,
|
||||
saved: Array<string>,
|
||||
isResolvingCollectionById: { [string]: boolean },
|
||||
error?: string | null,
|
||||
};
|
||||
|
||||
declare type CollectionGroup = {
|
||||
[string]: Collection,
|
||||
}
|
||||
|
||||
declare type CollectionEditParams = {
|
||||
claims?: Array<Claim>,
|
||||
remove?: boolean,
|
||||
claimIds?: Array<string>,
|
||||
replace?: boolean,
|
||||
order?: { from: number, to: number },
|
||||
type?: string,
|
||||
name?: string,
|
||||
}
|
53
dist/flow-typed/Lbry.js
vendored
53
dist/flow-typed/Lbry.js
vendored
|
@ -7,6 +7,10 @@ declare type StatusResponse = {
|
|||
download_progress: number,
|
||||
downloading_headers: boolean,
|
||||
},
|
||||
connection_status: {
|
||||
code: string,
|
||||
message: string,
|
||||
},
|
||||
dht: {
|
||||
node_id: string,
|
||||
peers_in_routing_table: number,
|
||||
|
@ -41,7 +45,6 @@ declare type StatusResponse = {
|
|||
redirects: {},
|
||||
},
|
||||
wallet: ?{
|
||||
connected: string,
|
||||
best_blockhash: string,
|
||||
blocks: number,
|
||||
blocks_behind: number,
|
||||
|
@ -75,7 +78,7 @@ declare type BalanceResponse = {
|
|||
|
||||
declare type ResolveResponse = {
|
||||
// Keys are the url(s) passed to resolve
|
||||
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, collection?: CollectionClaim, claimsInChannel?: number },
|
||||
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, claimsInChannel?: number },
|
||||
};
|
||||
|
||||
declare type GetResponse = FileListItem & { error?: string };
|
||||
|
@ -124,6 +127,14 @@ declare type ChannelUpdateResponse = GenericTxResponse & {
|
|||
declare type CommentCreateResponse = Comment;
|
||||
declare type CommentUpdateResponse = Comment;
|
||||
|
||||
declare type CommentListResponse = {
|
||||
items: Array<Comment>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
};
|
||||
|
||||
declare type MyReactions = {
|
||||
// Keys are the commentId
|
||||
[string]: Array<string>,
|
||||
|
@ -170,37 +181,6 @@ declare type ChannelSignResponse = {
|
|||
signing_ts: string,
|
||||
};
|
||||
|
||||
declare type CollectionCreateResponse = {
|
||||
outputs: Array<Claim>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
}
|
||||
|
||||
declare type CollectionListResponse = {
|
||||
items: Array<Claim>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
};
|
||||
|
||||
declare type CollectionResolveResponse = {
|
||||
items: Array<Claim>,
|
||||
total_items: number,
|
||||
};
|
||||
|
||||
declare type CollectionResolveOptions = {
|
||||
claim_id: string,
|
||||
};
|
||||
|
||||
declare type CollectionListOptions = {
|
||||
page: number,
|
||||
page_size: number,
|
||||
resolve?: boolean,
|
||||
};
|
||||
|
||||
declare type FileListResponse = {
|
||||
items: Array<FileListItem>,
|
||||
page: number,
|
||||
|
@ -319,10 +299,6 @@ declare type LbryTypes = {
|
|||
support_abandon: (params: {}) => Promise<SupportAbandonResponse>,
|
||||
stream_repost: (params: StreamRepostOptions) => Promise<StreamRepostResponse>,
|
||||
purchase_list: (params: PurchaseListOptions) => Promise<PurchaseListResponse>,
|
||||
collection_resolve: (params: CollectionResolveOptions) => Promise<CollectionResolveResponse>,
|
||||
collection_list: (params: CollectionListOptions) => Promise<CollectionListResponse>,
|
||||
collection_create: (params: {}) => Promise<CollectionCreateResponse>,
|
||||
collection_update: (params: {}) => Promise<CollectionCreateResponse>,
|
||||
|
||||
// File fetching and manipulation
|
||||
file_list: (params: {}) => Promise<FileListResponse>,
|
||||
|
@ -335,6 +311,8 @@ declare type LbryTypes = {
|
|||
preference_set: (params: {}) => Promise<any>,
|
||||
|
||||
// Commenting
|
||||
comment_list: (params: {}) => Promise<CommentListResponse>,
|
||||
comment_create: (params: {}) => Promise<CommentCreateResponse>,
|
||||
comment_update: (params: {}) => Promise<CommentUpdateResponse>,
|
||||
comment_hide: (params: {}) => Promise<CommentHideResponse>,
|
||||
comment_abandon: (params: {}) => Promise<CommentAbandonResponse>,
|
||||
|
@ -351,7 +329,6 @@ declare type LbryTypes = {
|
|||
address_unused: (params: {}) => Promise<string>, // New address
|
||||
address_list: (params: {}) => Promise<string>,
|
||||
transaction_list: (params: {}) => Promise<TxListResponse>,
|
||||
txo_list: (params: {}) => Promise<any>,
|
||||
|
||||
// Sync
|
||||
sync_hash: (params: {}) => Promise<string>,
|
||||
|
|
5
dist/flow-typed/npm/from-entries.js
vendored
5
dist/flow-typed/npm/from-entries.js
vendored
|
@ -1,5 +0,0 @@
|
|||
// @flow
|
||||
|
||||
declare module '@ungap/from-entries' {
|
||||
declare module.exports: any;
|
||||
}
|
80
flow-typed/Claim.js
vendored
80
flow-typed/Claim.js
vendored
|
@ -1,15 +1,11 @@
|
|||
// @flow
|
||||
|
||||
declare type Claim = StreamClaim | ChannelClaim | CollectionClaim;
|
||||
declare type Claim = StreamClaim | ChannelClaim;
|
||||
|
||||
declare type ChannelClaim = GenericClaim & {
|
||||
value: ChannelMetadata,
|
||||
};
|
||||
|
||||
declare type CollectionClaim = GenericClaim & {
|
||||
value: CollectionMetadata,
|
||||
};
|
||||
|
||||
declare type StreamClaim = GenericClaim & {
|
||||
value: StreamMetadata,
|
||||
};
|
||||
|
@ -34,7 +30,7 @@ declare type GenericClaim = {
|
|||
short_url: string, // permanent_url with short id, no channel
|
||||
txid: string, // unique tx id
|
||||
type: 'claim' | 'update' | 'support',
|
||||
value_type: 'stream' | 'channel' | 'collection',
|
||||
value_type: 'stream' | 'channel',
|
||||
signing_channel?: ChannelClaim,
|
||||
reposted_claim?: GenericClaim,
|
||||
repost_channel_url?: string,
|
||||
|
@ -78,10 +74,6 @@ declare type ChannelMetadata = GenericMetadata & {
|
|||
featured?: Array<string>,
|
||||
};
|
||||
|
||||
declare type CollectionMetadata = GenericMetadata & {
|
||||
claims: Array<string>,
|
||||
}
|
||||
|
||||
declare type StreamMetadata = GenericMetadata & {
|
||||
license?: string, // License "title" ex: Creative Commons, Custom copyright
|
||||
license_url?: string, // Link to full license
|
||||
|
@ -144,71 +136,3 @@ declare type PurchaseReceipt = {
|
|||
txid: string,
|
||||
type: 'purchase',
|
||||
};
|
||||
|
||||
declare type ClaimActionResolveInfo = {
|
||||
[string]: {
|
||||
stream: ?StreamClaim,
|
||||
channel: ?ChannelClaim,
|
||||
claimsInChannel: ?number,
|
||||
collection: ?CollectionClaim,
|
||||
},
|
||||
}
|
||||
|
||||
declare type ChannelUpdateParams = {
|
||||
claim_id: string,
|
||||
bid?: string,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
replace?: boolean,
|
||||
languages?: Array<string>,
|
||||
locations?: Array<string>,
|
||||
blocking?: boolean,
|
||||
}
|
||||
|
||||
declare type ChannelPublishParams = {
|
||||
name: string,
|
||||
bid: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
}
|
||||
|
||||
declare type CollectionUpdateParams = {
|
||||
claim_id: string,
|
||||
claim_ids?: Array<string>,
|
||||
bid?: string,
|
||||
title?: string,
|
||||
cover_url?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<string>,
|
||||
replace?: boolean,
|
||||
languages?: Array<string>,
|
||||
locations?: Array<string>,
|
||||
blocking?: boolean,
|
||||
}
|
||||
|
||||
declare type CollectionPublishParams = {
|
||||
name: string,
|
||||
bid: string,
|
||||
claim_ids: Array<string>,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
}
|
||||
|
|
29
flow-typed/CoinSwap.js
vendored
29
flow-typed/CoinSwap.js
vendored
|
@ -1,29 +0,0 @@
|
|||
declare type CoinSwapInfo = {
|
||||
chargeCode: string,
|
||||
coins: Array<string>,
|
||||
sendAddresses: { [string]: string},
|
||||
sendAmounts: { [string]: any },
|
||||
lbcAmount: number,
|
||||
status?: {
|
||||
status: string,
|
||||
receiptCurrency: string,
|
||||
receiptTxid: string,
|
||||
lbcTxid: string,
|
||||
},
|
||||
}
|
||||
|
||||
declare type CoinSwapState = {
|
||||
coinSwaps: Array<CoinSwapInfo>,
|
||||
};
|
||||
|
||||
declare type CoinSwapAddAction = {
|
||||
type: string,
|
||||
data: CoinSwapInfo,
|
||||
};
|
||||
|
||||
declare type CoinSwapRemoveAction = {
|
||||
type: string,
|
||||
data: {
|
||||
chargeCode: string,
|
||||
},
|
||||
};
|
34
flow-typed/Collections.js
vendored
34
flow-typed/Collections.js
vendored
|
@ -1,34 +0,0 @@
|
|||
declare type Collection = {
|
||||
id: string,
|
||||
items: Array<?string>,
|
||||
name: string,
|
||||
type: string,
|
||||
updatedAt: number,
|
||||
totalItems?: number,
|
||||
sourceId?: string, // if copied, claimId of original collection
|
||||
};
|
||||
|
||||
declare type CollectionState = {
|
||||
unpublished: CollectionGroup,
|
||||
resolved: CollectionGroup,
|
||||
pending: CollectionGroup,
|
||||
edited: CollectionGroup,
|
||||
builtin: CollectionGroup,
|
||||
saved: Array<string>,
|
||||
isResolvingCollectionById: { [string]: boolean },
|
||||
error?: string | null,
|
||||
};
|
||||
|
||||
declare type CollectionGroup = {
|
||||
[string]: Collection,
|
||||
}
|
||||
|
||||
declare type CollectionEditParams = {
|
||||
claims?: Array<Claim>,
|
||||
remove?: boolean,
|
||||
claimIds?: Array<string>,
|
||||
replace?: boolean,
|
||||
order?: { from: number, to: number },
|
||||
type?: string,
|
||||
name?: string,
|
||||
}
|
53
flow-typed/Lbry.js
vendored
53
flow-typed/Lbry.js
vendored
|
@ -7,6 +7,10 @@ declare type StatusResponse = {
|
|||
download_progress: number,
|
||||
downloading_headers: boolean,
|
||||
},
|
||||
connection_status: {
|
||||
code: string,
|
||||
message: string,
|
||||
},
|
||||
dht: {
|
||||
node_id: string,
|
||||
peers_in_routing_table: number,
|
||||
|
@ -41,7 +45,6 @@ declare type StatusResponse = {
|
|||
redirects: {},
|
||||
},
|
||||
wallet: ?{
|
||||
connected: string,
|
||||
best_blockhash: string,
|
||||
blocks: number,
|
||||
blocks_behind: number,
|
||||
|
@ -75,7 +78,7 @@ declare type BalanceResponse = {
|
|||
|
||||
declare type ResolveResponse = {
|
||||
// Keys are the url(s) passed to resolve
|
||||
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, collection?: CollectionClaim, claimsInChannel?: number },
|
||||
[string]: { error?: {}, stream?: StreamClaim, channel?: ChannelClaim, claimsInChannel?: number },
|
||||
};
|
||||
|
||||
declare type GetResponse = FileListItem & { error?: string };
|
||||
|
@ -124,6 +127,14 @@ declare type ChannelUpdateResponse = GenericTxResponse & {
|
|||
declare type CommentCreateResponse = Comment;
|
||||
declare type CommentUpdateResponse = Comment;
|
||||
|
||||
declare type CommentListResponse = {
|
||||
items: Array<Comment>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
};
|
||||
|
||||
declare type MyReactions = {
|
||||
// Keys are the commentId
|
||||
[string]: Array<string>,
|
||||
|
@ -170,37 +181,6 @@ declare type ChannelSignResponse = {
|
|||
signing_ts: string,
|
||||
};
|
||||
|
||||
declare type CollectionCreateResponse = {
|
||||
outputs: Array<Claim>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
}
|
||||
|
||||
declare type CollectionListResponse = {
|
||||
items: Array<Claim>,
|
||||
page: number,
|
||||
page_size: number,
|
||||
total_items: number,
|
||||
total_pages: number,
|
||||
};
|
||||
|
||||
declare type CollectionResolveResponse = {
|
||||
items: Array<Claim>,
|
||||
total_items: number,
|
||||
};
|
||||
|
||||
declare type CollectionResolveOptions = {
|
||||
claim_id: string,
|
||||
};
|
||||
|
||||
declare type CollectionListOptions = {
|
||||
page: number,
|
||||
page_size: number,
|
||||
resolve?: boolean,
|
||||
};
|
||||
|
||||
declare type FileListResponse = {
|
||||
items: Array<FileListItem>,
|
||||
page: number,
|
||||
|
@ -319,10 +299,6 @@ declare type LbryTypes = {
|
|||
support_abandon: (params: {}) => Promise<SupportAbandonResponse>,
|
||||
stream_repost: (params: StreamRepostOptions) => Promise<StreamRepostResponse>,
|
||||
purchase_list: (params: PurchaseListOptions) => Promise<PurchaseListResponse>,
|
||||
collection_resolve: (params: CollectionResolveOptions) => Promise<CollectionResolveResponse>,
|
||||
collection_list: (params: CollectionListOptions) => Promise<CollectionListResponse>,
|
||||
collection_create: (params: {}) => Promise<CollectionCreateResponse>,
|
||||
collection_update: (params: {}) => Promise<CollectionCreateResponse>,
|
||||
|
||||
// File fetching and manipulation
|
||||
file_list: (params: {}) => Promise<FileListResponse>,
|
||||
|
@ -335,6 +311,8 @@ declare type LbryTypes = {
|
|||
preference_set: (params: {}) => Promise<any>,
|
||||
|
||||
// Commenting
|
||||
comment_list: (params: {}) => Promise<CommentListResponse>,
|
||||
comment_create: (params: {}) => Promise<CommentCreateResponse>,
|
||||
comment_update: (params: {}) => Promise<CommentUpdateResponse>,
|
||||
comment_hide: (params: {}) => Promise<CommentHideResponse>,
|
||||
comment_abandon: (params: {}) => Promise<CommentAbandonResponse>,
|
||||
|
@ -351,7 +329,6 @@ declare type LbryTypes = {
|
|||
address_unused: (params: {}) => Promise<string>, // New address
|
||||
address_list: (params: {}) => Promise<string>,
|
||||
transaction_list: (params: {}) => Promise<TxListResponse>,
|
||||
txo_list: (params: {}) => Promise<any>,
|
||||
|
||||
// Sync
|
||||
sync_hash: (params: {}) => Promise<string>,
|
||||
|
|
5
flow-typed/npm/from-entries.js
vendored
5
flow-typed/npm/from-entries.js
vendored
|
@ -1,5 +0,0 @@
|
|||
// @flow
|
||||
|
||||
declare module '@ungap/from-entries' {
|
||||
declare module.exports: any;
|
||||
}
|
|
@ -29,7 +29,6 @@
|
|||
"test": "jest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ungap/from-entries": "^0.2.1",
|
||||
"proxy-polyfill": "0.1.6",
|
||||
"reselect": "^3.0.0",
|
||||
"uuid": "^8.3.1"
|
||||
|
|
|
@ -82,9 +82,6 @@ export const SET_DRAFT_TRANSACTION_ADDRESS = 'SET_DRAFT_TRANSACTION_ADDRESS';
|
|||
export const FETCH_UTXO_COUNT_STARTED = 'FETCH_UTXO_COUNT_STARTED';
|
||||
export const FETCH_UTXO_COUNT_COMPLETED = 'FETCH_UTXO_COUNT_COMPLETED';
|
||||
export const FETCH_UTXO_COUNT_FAILED = 'FETCH_UTXO_COUNT_FAILED';
|
||||
export const TIP_CLAIM_MASS_STARTED = 'TIP_CLAIM_MASS_STARTED';
|
||||
export const TIP_CLAIM_MASS_COMPLETED = 'TIP_CLAIM_MASS_COMPLETED';
|
||||
export const TIP_CLAIM_MASS_FAILED = 'TIP_CLAIM_MASS_FAILED';
|
||||
export const DO_UTXO_CONSOLIDATE_STARTED = 'DO_UTXO_CONSOLIDATE_STARTED';
|
||||
export const DO_UTXO_CONSOLIDATE_COMPLETED = 'DO_UTXO_CONSOLIDATE_COMPLETED';
|
||||
export const DO_UTXO_CONSOLIDATE_FAILED = 'DO_UTXO_CONSOLIDATE_FAILED';
|
||||
|
@ -102,9 +99,6 @@ export const ABANDON_CLAIM_SUCCEEDED = 'ABANDON_CLAIM_SUCCEEDED';
|
|||
export const FETCH_CHANNEL_LIST_STARTED = 'FETCH_CHANNEL_LIST_STARTED';
|
||||
export const FETCH_CHANNEL_LIST_COMPLETED = 'FETCH_CHANNEL_LIST_COMPLETED';
|
||||
export const FETCH_CHANNEL_LIST_FAILED = 'FETCH_CHANNEL_LIST_FAILED';
|
||||
export const FETCH_COLLECTION_LIST_STARTED = 'FETCH_COLLECTION_LIST_STARTED';
|
||||
export const FETCH_COLLECTION_LIST_COMPLETED = 'FETCH_COLLECTION_LIST_COMPLETED';
|
||||
export const FETCH_COLLECTION_LIST_FAILED = 'FETCH_COLLECTION_LIST_FAILED';
|
||||
export const CREATE_CHANNEL_STARTED = 'CREATE_CHANNEL_STARTED';
|
||||
export const CREATE_CHANNEL_COMPLETED = 'CREATE_CHANNEL_COMPLETED';
|
||||
export const CREATE_CHANNEL_FAILED = 'CREATE_CHANNEL_FAILED';
|
||||
|
@ -114,7 +108,6 @@ export const UPDATE_CHANNEL_FAILED = 'UPDATE_CHANNEL_FAILED';
|
|||
export const IMPORT_CHANNEL_STARTED = 'IMPORT_CHANNEL_STARTED';
|
||||
export const IMPORT_CHANNEL_COMPLETED = 'IMPORT_CHANNEL_COMPLETED';
|
||||
export const IMPORT_CHANNEL_FAILED = 'IMPORT_CHANNEL_FAILED';
|
||||
export const CLEAR_CHANNEL_ERRORS = 'CLEAR_CHANNEL_ERRORS';
|
||||
export const PUBLISH_STARTED = 'PUBLISH_STARTED';
|
||||
export const PUBLISH_COMPLETED = 'PUBLISH_COMPLETED';
|
||||
export const PUBLISH_FAILED = 'PUBLISH_FAILED';
|
||||
|
@ -133,6 +126,7 @@ export const CLAIM_REPOST_STARTED = 'CLAIM_REPOST_STARTED';
|
|||
export const CLAIM_REPOST_COMPLETED = 'CLAIM_REPOST_COMPLETED';
|
||||
export const CLAIM_REPOST_FAILED = 'CLAIM_REPOST_FAILED';
|
||||
export const CLEAR_REPOST_ERROR = 'CLEAR_REPOST_ERROR';
|
||||
export const CLEAR_CHANNEL_ERRORS = 'CLEAR_CHANNEL_ERRORS';
|
||||
export const CHECK_PUBLISH_NAME_STARTED = 'CHECK_PUBLISH_NAME_STARTED';
|
||||
export const CHECK_PUBLISH_NAME_COMPLETED = 'CHECK_PUBLISH_NAME_COMPLETED';
|
||||
export const UPDATE_PENDING_CLAIMS = 'UPDATE_PENDING_CLAIMS';
|
||||
|
@ -145,27 +139,6 @@ export const PURCHASE_LIST_STARTED = 'PURCHASE_LIST_STARTED';
|
|||
export const PURCHASE_LIST_COMPLETED = 'PURCHASE_LIST_COMPLETED';
|
||||
export const PURCHASE_LIST_FAILED = 'PURCHASE_LIST_FAILED';
|
||||
|
||||
export const COLLECTION_PUBLISH_STARTED = 'COLLECTION_PUBLISH_STARTED';
|
||||
export const COLLECTION_PUBLISH_COMPLETED = 'COLLECTION_PUBLISH_COMPLETED';
|
||||
export const COLLECTION_PUBLISH_FAILED = 'COLLECTION_PUBLISH_FAILED';
|
||||
export const COLLECTION_PUBLISH_UPDATE_STARTED = 'COLLECTION_PUBLISH_UPDATE_STARTED';
|
||||
export const COLLECTION_PUBLISH_UPDATE_COMPLETED = 'COLLECTION_PUBLISH_UPDATE_COMPLETED';
|
||||
export const COLLECTION_PUBLISH_UPDATE_FAILED = 'COLLECTION_PUBLISH_UPDATE_FAILED';
|
||||
export const COLLECTION_PUBLISH_ABANDON_STARTED = 'COLLECTION_PUBLISH_ABANDON_STARTED';
|
||||
export const COLLECTION_PUBLISH_ABANDON_COMPLETED = 'COLLECTION_PUBLISH_ABANDON_COMPLETED';
|
||||
export const COLLECTION_PUBLISH_ABANDON_FAILED = 'COLLECTION_PUBLISH_ABANDON_FAILED';
|
||||
export const CLEAR_COLLECTION_ERRORS = 'CLEAR_COLLECTION_ERRORS';
|
||||
export const COLLECTION_ITEMS_RESOLVE_STARTED = 'COLLECTION_ITEMS_RESOLVE_STARTED';
|
||||
export const COLLECTION_ITEMS_RESOLVE_COMPLETED = 'COLLECTION_ITEMS_RESOLVE_COMPLETED';
|
||||
export const COLLECTION_ITEMS_RESOLVE_FAILED = 'COLLECTION_ITEMS_RESOLVE_FAILED';
|
||||
export const COLLECTION_NEW = 'COLLECTION_NEW';
|
||||
export const COLLECTION_DELETE = 'COLLECTION_DELETE';
|
||||
export const COLLECTION_PENDING = 'COLLECTION_PENDING';
|
||||
export const COLLECTION_EDIT = 'COLLECTION_EDIT';
|
||||
export const COLLECTION_COPY = 'COLLECTION_COPY';
|
||||
export const COLLECTION_SAVE = 'COLLECTION_SAVE';
|
||||
export const COLLECTION_ERROR = 'COLLECTION_ERROR';
|
||||
|
||||
// Comments
|
||||
export const COMMENT_LIST_STARTED = 'COMMENT_LIST_STARTED';
|
||||
export const COMMENT_LIST_COMPLETED = 'COMMENT_LIST_COMPLETED';
|
||||
|
|
|
@ -3,9 +3,3 @@ export const MINIMUM_PUBLISH_BID = 0.00000001;
|
|||
export const CHANNEL_ANONYMOUS = 'anonymous';
|
||||
export const CHANNEL_NEW = 'new';
|
||||
export const PAGE_SIZE = 20;
|
||||
|
||||
export const LEVEL_1_STAKED_AMOUNT = 0;
|
||||
export const LEVEL_2_STAKED_AMOUNT = 1;
|
||||
export const LEVEL_3_STAKED_AMOUNT = 50;
|
||||
export const LEVEL_4_STAKED_AMOUNT = 250;
|
||||
export const LEVEL_5_STAKED_AMOUNT = 1000;
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
export const COLLECTION_ID = 'lid';
|
||||
export const COLLECTION_INDEX = 'linx';
|
||||
|
||||
export const COL_TYPE_PLAYLIST = 'playlist';
|
||||
export const COL_TYPE_CHANNELS = 'channelList';
|
||||
|
||||
export const WATCH_LATER_ID = 'watchlater';
|
||||
export const FAVORITES_ID = 'favorites';
|
||||
export const FAVORITE_CHANNELS_ID = 'favoriteChannels';
|
||||
export const BUILTIN_LISTS = [WATCH_LATER_ID, FAVORITES_ID, FAVORITE_CHANNELS_ID];
|
||||
|
||||
export const COL_KEY_EDITED = 'edited';
|
||||
export const COL_KEY_UNPUBLISHED = 'unpublished';
|
||||
export const COL_KEY_PENDING = 'pending';
|
||||
export const COL_KEY_SAVED = 'saved';
|
|
@ -23,7 +23,7 @@ export const INSTANT_PURCHASE_MAX = 'instant_purchase_max';
|
|||
export const THEME = 'theme';
|
||||
export const THEMES = 'themes';
|
||||
export const AUTOMATIC_DARK_MODE_ENABLED = 'automatic_dark_mode_enabled';
|
||||
export const AUTOPLAY_MEDIA = 'autoplay';
|
||||
export const AUTOPLAY = 'autoplay';
|
||||
export const AUTOPLAY_NEXT = 'autoplay_next';
|
||||
export const OS_NOTIFICATIONS_ENABLED = 'os_notifications_enabled';
|
||||
export const AUTO_DOWNLOAD = 'auto_download';
|
||||
|
@ -39,8 +39,6 @@ export const ENABLE_PUBLISH_PREVIEW = 'enable-publish-preview';
|
|||
export const TILE_LAYOUT = 'tile_layout';
|
||||
export const VIDEO_THEATER_MODE = 'video_theater_mode';
|
||||
export const VIDEO_PLAYBACK_RATE = 'video_playback_rate';
|
||||
export const CUSTOM_COMMENTS_SERVER_ENABLED = 'custom_comments_server_enabled';
|
||||
export const CUSTOM_COMMENTS_SERVER_URL = 'custom_comments_server_url';
|
||||
|
||||
// mobile settings
|
||||
export const BACKGROUND_PLAY_ENABLED = 'backgroundPlayEnabled';
|
||||
|
|
|
@ -21,12 +21,10 @@ export const CLIENT_SYNC_KEYS = [
|
|||
SETTINGS.INSTANT_PURCHASE_ENABLED,
|
||||
SETTINGS.INSTANT_PURCHASE_MAX,
|
||||
SETTINGS.THEME,
|
||||
SETTINGS.AUTOPLAY_MEDIA,
|
||||
SETTINGS.AUTOPLAY_NEXT,
|
||||
SETTINGS.AUTOPLAY,
|
||||
SETTINGS.HIDE_BALANCE,
|
||||
SETTINGS.HIDE_SPLASH_ANIMATION,
|
||||
SETTINGS.FLOATING_PLAYER,
|
||||
SETTINGS.DARK_MODE_TIMES,
|
||||
SETTINGS.AUTOMATIC_DARK_MODE_ENABLED,
|
||||
SETTINGS.LANGUAGE,
|
||||
];
|
||||
|
|
|
@ -520,8 +520,7 @@ const DEFAULT_ENGLISH_KNOWN_TAGS = [
|
|||
'2020protests',
|
||||
'covidcuts',
|
||||
'covid-19',
|
||||
'LBRYFoundationBoardCandidacy',
|
||||
'helplbrysavecrypto'
|
||||
'LBRYFoundationBoardCandidacy'
|
||||
];
|
||||
|
||||
const DEFAULT_SPANISH_KNOWN_TAGS = [
|
||||
|
|
63
src/index.js
63
src/index.js
|
@ -12,7 +12,6 @@ import * as TXO_LIST from 'constants/txo_list';
|
|||
import * as SPEECH_URLS from 'constants/speech_urls';
|
||||
import * as DAEMON_SETTINGS from 'constants/daemon_settings';
|
||||
import * as SHARED_PREFERENCES from 'constants/shared_preferences';
|
||||
import * as COLLECTIONS_CONSTS from 'constants/collections';
|
||||
import { DEFAULT_KNOWN_TAGS, DEFAULT_FOLLOWED_TAGS, MATURE_TAGS } from 'constants/tags';
|
||||
import Lbry, { apiCall } from 'lbry';
|
||||
import LbryFirst from 'lbry-first';
|
||||
|
@ -36,7 +35,6 @@ export {
|
|||
MATURE_TAGS,
|
||||
SPEECH_URLS,
|
||||
SHARED_PREFERENCES,
|
||||
COLLECTIONS_CONSTS,
|
||||
};
|
||||
|
||||
// common
|
||||
|
@ -52,8 +50,6 @@ export {
|
|||
isURIClaimable,
|
||||
isNameValid,
|
||||
convertToShareLink,
|
||||
splitBySeparator,
|
||||
isURIEqual,
|
||||
} from 'lbryURI';
|
||||
|
||||
// middlware
|
||||
|
@ -61,13 +57,6 @@ export { buildSharedStateMiddleware } from 'redux/middleware/shared-state';
|
|||
|
||||
// actions
|
||||
export { doToast, doDismissToast, doError, doDismissError } from 'redux/actions/notifications';
|
||||
export {
|
||||
doLocalCollectionCreate,
|
||||
doFetchItemsInCollection,
|
||||
doFetchItemsInCollections,
|
||||
doCollectionEdit,
|
||||
doCollectionDelete,
|
||||
} from 'redux/actions/collections';
|
||||
|
||||
export {
|
||||
doFetchClaimsByChannel,
|
||||
|
@ -77,7 +66,6 @@ export {
|
|||
doResolveUris,
|
||||
doResolveUri,
|
||||
doFetchChannelListMine,
|
||||
doFetchCollectionListMine,
|
||||
doCreateChannel,
|
||||
doUpdateChannel,
|
||||
doClaimSearch,
|
||||
|
@ -88,8 +76,6 @@ export {
|
|||
doCheckPublishNameAvailability,
|
||||
doPurchaseList,
|
||||
doCheckPendingClaims,
|
||||
doCollectionPublish,
|
||||
doCollectionPublishUpdate,
|
||||
} from 'redux/actions/claims';
|
||||
|
||||
export { doClearPurchasedUriSuccess, doPurchaseUri, doFileGet } from 'redux/actions/file';
|
||||
|
@ -136,7 +122,6 @@ export {
|
|||
doSupportAbandonForClaim,
|
||||
doFetchUtxoCounts,
|
||||
doUtxoConsolidate,
|
||||
doTipClaimMass,
|
||||
} from 'redux/actions/wallet';
|
||||
|
||||
export { doPopulateSharedUserState, doPreferenceGet, doPreferenceSet } from 'redux/actions/sync';
|
||||
|
@ -154,39 +139,11 @@ export { fileInfoReducer } from 'redux/reducers/file_info';
|
|||
export { notificationsReducer } from 'redux/reducers/notifications';
|
||||
export { publishReducer } from 'redux/reducers/publish';
|
||||
export { walletReducer } from 'redux/reducers/wallet';
|
||||
export { collectionsReducer } from 'redux/reducers/collections';
|
||||
|
||||
// selectors
|
||||
export { makeSelectContentPositionForUri } from 'redux/selectors/content';
|
||||
|
||||
export { selectToast, selectError } from 'redux/selectors/notifications';
|
||||
export {
|
||||
selectSavedCollectionIds,
|
||||
selectBuiltinCollections,
|
||||
selectResolvedCollections,
|
||||
selectMyUnpublishedCollections,
|
||||
selectMyEditedCollections,
|
||||
selectMyPublishedCollections,
|
||||
selectMyPublishedMixedCollections,
|
||||
selectMyPublishedPlaylistCollections,
|
||||
makeSelectEditedCollectionForId,
|
||||
makeSelectPendingCollectionForId,
|
||||
makeSelectPublishedCollectionForId,
|
||||
makeSelectCollectionIsMine,
|
||||
makeSelectMyPublishedCollectionForId,
|
||||
makeSelectUnpublishedCollectionForId,
|
||||
makeSelectCollectionForId,
|
||||
makeSelectClaimUrlInCollection,
|
||||
makeSelectUrlsForCollectionId,
|
||||
makeSelectClaimIdsForCollectionId,
|
||||
makeSelectNameForCollectionId,
|
||||
makeSelectCountForCollectionId,
|
||||
makeSelectIsResolvingCollectionForId,
|
||||
makeSelectIndexForUrlInCollection,
|
||||
makeSelectPreviousUrlForCollectionAndUrl,
|
||||
makeSelectNextUrlForCollectionAndUrl,
|
||||
makeSelectCollectionForIdHasClaimUrl,
|
||||
} from 'redux/selectors/collections';
|
||||
|
||||
export {
|
||||
makeSelectClaimForUri,
|
||||
|
@ -205,14 +162,13 @@ export {
|
|||
makeSelectEffectiveAmountForUri,
|
||||
makeSelectTagsForUri,
|
||||
makeSelectTagInClaimOrChannelForUri,
|
||||
makeSelectTotalStakedAmountForChannelUri,
|
||||
makeSelectStakedLevelForChannelUri,
|
||||
makeSelectContentTypeForUri,
|
||||
makeSelectIsUriResolving,
|
||||
makeSelectPendingClaimForUri,
|
||||
makeSelectTotalItemsForChannel,
|
||||
makeSelectTotalPagesForChannel,
|
||||
makeSelectNsfwCountFromUris,
|
||||
makeSelectNsfwCountForChannel,
|
||||
makeSelectOmittedCountForChannel,
|
||||
makeSelectClaimIsNsfw,
|
||||
makeSelectChannelForClaimUri,
|
||||
|
@ -220,6 +176,7 @@ export {
|
|||
makeSelectMyChannelPermUrlForName,
|
||||
makeSelectClaimIsPending,
|
||||
makeSelectReflectingClaimForUri,
|
||||
makeSelectClaimsInChannelForCurrentPageState,
|
||||
makeSelectShortUrlForUri,
|
||||
makeSelectCanonicalUrlForUri,
|
||||
makeSelectPermanentUrlForUri,
|
||||
|
@ -228,11 +185,8 @@ export {
|
|||
makeSelectClaimWasPurchased,
|
||||
makeSelectAbandoningClaimById,
|
||||
makeSelectIsAbandoningClaimForUri,
|
||||
makeSelectClaimHasSource,
|
||||
makeSelectClaimIsStreamPlaceholder,
|
||||
selectPendingIds,
|
||||
selectReflectingById,
|
||||
makeSelectClaimForClaimId,
|
||||
selectClaimsById,
|
||||
selectClaimsByUri,
|
||||
selectAllClaimsByChannel,
|
||||
|
@ -242,15 +196,12 @@ export {
|
|||
selectAllFetchingChannelClaims,
|
||||
selectIsFetchingClaimListMine,
|
||||
selectMyClaims,
|
||||
selectPendingClaims,
|
||||
selectMyClaimsWithoutChannels,
|
||||
selectMyChannelUrls,
|
||||
selectMyClaimUrisWithoutChannels,
|
||||
selectAllMyClaimsByOutpoint,
|
||||
selectMyClaimsOutpoints,
|
||||
selectFetchingMyChannels,
|
||||
selectFetchingMyCollections,
|
||||
selectMyCollectionIds,
|
||||
selectMyChannelClaims,
|
||||
selectResolvingUris,
|
||||
selectPlayingUri,
|
||||
|
@ -279,12 +230,6 @@ export {
|
|||
selectFetchingMyPurchasesError,
|
||||
selectMyPurchasesCount,
|
||||
selectPurchaseUriSuccess,
|
||||
makeSelectClaimIdForUri,
|
||||
selectUpdatingCollection,
|
||||
selectUpdateCollectionError,
|
||||
selectCreatingCollection,
|
||||
selectCreateCollectionError,
|
||||
makeSelectClaimIdIsPending,
|
||||
} from 'redux/selectors/claims';
|
||||
|
||||
export {
|
||||
|
@ -333,7 +278,6 @@ export {
|
|||
selectSupportsByOutpoint,
|
||||
selectTotalSupports,
|
||||
selectTransactionItems,
|
||||
selectTransactionsFile,
|
||||
selectRecentTransactions,
|
||||
selectHasTransactions,
|
||||
selectIsFetchingTransactions,
|
||||
|
@ -373,9 +317,6 @@ export {
|
|||
makeSelectPendingAmountByUri,
|
||||
selectIsFetchingUtxoCounts,
|
||||
selectIsConsolidatingUtxos,
|
||||
selectIsMassClaimingTips,
|
||||
selectUtxoCounts,
|
||||
selectPendingOtherTransactions,
|
||||
selectPendingConsolidateTxid,
|
||||
selectPendingMassClaimTxid,
|
||||
} from 'redux/selectors/wallet';
|
||||
|
|
|
@ -90,10 +90,6 @@ const Lbry: LbryTypes = {
|
|||
support_create: params => daemonCallWithResult('support_create', params),
|
||||
support_list: params => daemonCallWithResult('support_list', params),
|
||||
stream_repost: params => daemonCallWithResult('stream_repost', params),
|
||||
collection_resolve: params => daemonCallWithResult('collection_resolve', params),
|
||||
collection_list: params => daemonCallWithResult('collection_list', params),
|
||||
collection_create: params => daemonCallWithResult('collection_create', params),
|
||||
collection_update: params => daemonCallWithResult('collection_update', params),
|
||||
|
||||
// File fetching and manipulation
|
||||
file_list: (params = {}) => daemonCallWithResult('file_list', params),
|
||||
|
@ -117,7 +113,6 @@ const Lbry: LbryTypes = {
|
|||
utxo_release: (params = {}) => daemonCallWithResult('utxo_release', params),
|
||||
support_abandon: (params = {}) => daemonCallWithResult('support_abandon', params),
|
||||
purchase_list: (params = {}) => daemonCallWithResult('purchase_list', params),
|
||||
txo_list: (params = {}) => daemonCallWithResult('txo_list', params),
|
||||
|
||||
sync_hash: (params = {}) => daemonCallWithResult('sync_hash', params),
|
||||
sync_apply: (params = {}) => daemonCallWithResult('sync_apply', params),
|
||||
|
|
|
@ -4,7 +4,7 @@ const channelNameMinLength = 1;
|
|||
const claimIdMaxLength = 40;
|
||||
|
||||
// see https://spec.lbry.com/#urls
|
||||
export const regexInvalidURI = /[ =&#:$@%?;/\\"<>%{}|^~[\]`\u{0000}-\u{0008}\u{000b}-\u{000c}\u{000e}-\u{001F}\u{D800}-\u{DFFF}\u{FFFE}-\u{FFFF}]/u;
|
||||
export const regexInvalidURI = /[ =&#:$@%?;/\\"<>%\{\}|^~[\]`\u{0000}-\u{0008}\u{000b}-\u{000c}\u{000e}-\u{001F}\u{D800}-\u{DFFF}\u{FFFE}-\u{FFFF}]/u;
|
||||
export const regexAddress = /^(b|r)(?=[^0OIl]{32,33})[0-9A-Za-z]{32,33}$/;
|
||||
const regexPartProtocol = '^((?:lbry://)?)';
|
||||
const regexPartStreamOrChannelName = '([^:$#/]*)';
|
||||
|
@ -12,11 +12,6 @@ const regexPartModifierSeparator = '([:$#]?)([^/]*)';
|
|||
const queryStringBreaker = '^([\\S]+)([?][\\S]*)';
|
||||
const separateQuerystring = new RegExp(queryStringBreaker);
|
||||
|
||||
const MOD_SEQUENCE_SEPARATOR = '*';
|
||||
const MOD_CLAIM_ID_SEPARATOR_OLD = '#';
|
||||
const MOD_CLAIM_ID_SEPARATOR = ':';
|
||||
const MOD_BID_POSITION_SEPARATOR = '$';
|
||||
|
||||
/**
|
||||
* Parses a LBRY name into its component parts. Throws errors with user-friendly
|
||||
* messages for invalid names.
|
||||
|
@ -149,11 +144,11 @@ function parseURIModifier(modSeperator: ?string, modValue: ?string) {
|
|||
throw new Error(__(`No modifier provided after separator %modSeperator%.`, { modSeperator }));
|
||||
}
|
||||
|
||||
if (modSeperator === MOD_CLAIM_ID_SEPARATOR || MOD_CLAIM_ID_SEPARATOR_OLD) {
|
||||
if (modSeperator === '#') {
|
||||
claimId = modValue;
|
||||
} else if (modSeperator === MOD_SEQUENCE_SEPARATOR) {
|
||||
} else if (modSeperator === ':') {
|
||||
claimSequence = modValue;
|
||||
} else if (modSeperator === MOD_BID_POSITION_SEPARATOR) {
|
||||
} else if (modSeperator === '$') {
|
||||
bidPosition = modValue;
|
||||
}
|
||||
}
|
||||
|
@ -325,22 +320,3 @@ export function convertToShareLink(URL: string) {
|
|||
'https://open.lbry.com/'
|
||||
);
|
||||
}
|
||||
|
||||
export function splitBySeparator(uri: string) {
|
||||
const protocolLength = 7;
|
||||
return uri.startsWith('lbry://') ? uri.slice(protocolLength).split(/[#:*]/) : uri.split(/#:\*\$/);
|
||||
}
|
||||
|
||||
export function isURIEqual(uriA: string, uriB: string) {
|
||||
const parseA = parseURI(normalizeURI(uriA));
|
||||
const parseB = parseURI(normalizeURI(uriB));
|
||||
if (parseA.isChannel) {
|
||||
if (parseB.isChannel && parseA.channelClaimId === parseB.channelClaimId) {
|
||||
return true;
|
||||
}
|
||||
} else if (parseA.streamClaimId === parseB.streamClaimId) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,27 +10,16 @@ import {
|
|||
selectClaimsByUri,
|
||||
selectMyChannelClaims,
|
||||
selectPendingIds,
|
||||
selectPendingClaimsById,
|
||||
selectClaimsById,
|
||||
} from 'redux/selectors/claims';
|
||||
|
||||
import { doFetchTxoPage } from 'redux/actions/wallet';
|
||||
import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
|
||||
import { creditsToString } from 'util/format-credits';
|
||||
import { batchActions } from 'util/batch-actions';
|
||||
import { createNormalizedClaimSearchKey } from 'util/claim';
|
||||
import { PAGE_SIZE } from 'constants/claim';
|
||||
import {
|
||||
selectPendingCollections,
|
||||
makeSelectClaimIdsForCollectionId,
|
||||
} from 'redux/selectors/collections';
|
||||
import {
|
||||
doFetchItemsInCollection,
|
||||
doFetchItemsInCollections,
|
||||
doCollectionDelete,
|
||||
} from 'redux/actions/collections';
|
||||
|
||||
let onChannelConfirmCallback;
|
||||
let checkPendingInterval;
|
||||
type ResolveEntries = Array<[string, GenericClaim]>;
|
||||
|
||||
export function doResolveUris(
|
||||
uris: Array<string>,
|
||||
|
@ -72,14 +61,11 @@ export function doResolveUris(
|
|||
stream: ?StreamClaim,
|
||||
channel: ?ChannelClaim,
|
||||
claimsInChannel: ?number,
|
||||
collection: ?CollectionClaim,
|
||||
},
|
||||
} = {};
|
||||
|
||||
const collectionIds: Array<string> = [];
|
||||
|
||||
return Lbry.resolve({ urls: urisToResolve, ...options }).then(
|
||||
async(result: ResolveResponse) => {
|
||||
async (result: ResolveResponse) => {
|
||||
let repostedResults = {};
|
||||
const repostsToResolve = [];
|
||||
const fallbackResolveInfo = {
|
||||
|
@ -94,7 +80,6 @@ export function doResolveUris(
|
|||
// https://github.com/facebook/flow/issues/2221
|
||||
if (uriResolveInfo) {
|
||||
if (uriResolveInfo.error) {
|
||||
// $FlowFixMe
|
||||
resolveInfo[uri] = { ...fallbackResolveInfo };
|
||||
} else {
|
||||
if (checkReposts) {
|
||||
|
@ -111,10 +96,6 @@ export function doResolveUris(
|
|||
result.channel = uriResolveInfo;
|
||||
// $FlowFixMe
|
||||
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
|
||||
} else if (uriResolveInfo.value_type === 'collection') {
|
||||
result.collection = uriResolveInfo;
|
||||
// $FlowFixMe
|
||||
collectionIds.push(uriResolveInfo.claim_id);
|
||||
} else {
|
||||
result.stream = uriResolveInfo;
|
||||
if (uriResolveInfo.signing_channel) {
|
||||
|
@ -146,11 +127,6 @@ export function doResolveUris(
|
|||
type: ACTIONS.RESOLVE_URIS_COMPLETED,
|
||||
data: { resolveInfo },
|
||||
});
|
||||
|
||||
if (collectionIds.length) {
|
||||
dispatch(doFetchItemsInCollections({ collectionIds: collectionIds, pageSize: 5 }));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
);
|
||||
|
@ -164,24 +140,18 @@ export function doResolveUri(uri: string) {
|
|||
export function doFetchClaimListMine(
|
||||
page: number = 1,
|
||||
pageSize: number = 99999,
|
||||
resolve: boolean = true,
|
||||
filterBy: Array<string> = []
|
||||
resolve: boolean = true
|
||||
) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED,
|
||||
});
|
||||
|
||||
let claimTypes = ['stream', 'repost'];
|
||||
if (filterBy && filterBy.length !== 0) {
|
||||
claimTypes = claimTypes.filter(t => filterBy.includes(t));
|
||||
}
|
||||
|
||||
// $FlowFixMe
|
||||
Lbry.claim_list({
|
||||
page: page,
|
||||
page_size: pageSize,
|
||||
claim_type: claimTypes,
|
||||
claim_type: ['stream', 'repost'],
|
||||
resolve,
|
||||
}).then((result: StreamListResponse) => {
|
||||
dispatch({
|
||||
|
@ -407,7 +377,7 @@ export function doClearChannelErrors() {
|
|||
};
|
||||
}
|
||||
|
||||
export function doCreateChannel(name: string, amount: number, optionalParams: any, onConfirm: any) {
|
||||
export function doCreateChannel(name: string, amount: number, optionalParams: any, cb: any) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.CREATE_CHANNEL_STARTED,
|
||||
|
@ -423,7 +393,7 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
|
|||
description?: string,
|
||||
website_url?: string,
|
||||
email?: string,
|
||||
tags?: Array<Tag>,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
} = {
|
||||
name,
|
||||
|
@ -474,7 +444,7 @@ export function doCreateChannel(name: string, amount: number, optionalParams: an
|
|||
claims: [channelClaim],
|
||||
},
|
||||
});
|
||||
dispatch(doCheckPendingClaims(onConfirm));
|
||||
dispatch(doCheckPendingClaims(cb));
|
||||
return channelClaim;
|
||||
})
|
||||
.catch(error => {
|
||||
|
@ -517,6 +487,7 @@ export function doUpdateChannel(params: any, cb: any) {
|
|||
}
|
||||
|
||||
// we'll need to remove these once we add locations/channels to channel page edit/create options
|
||||
|
||||
if (channelClaim && channelClaim.value && channelClaim.value.locations) {
|
||||
updateParams.locations = channelClaim.value.locations;
|
||||
}
|
||||
|
@ -554,7 +525,7 @@ export function doImportChannel(certificate: string) {
|
|||
});
|
||||
|
||||
return Lbry.channel_import({ channel_data: certificate })
|
||||
.then(() => {
|
||||
.then((result: string) => {
|
||||
dispatch({
|
||||
type: ACTIONS.IMPORT_CHANNEL_COMPLETED,
|
||||
});
|
||||
|
@ -596,54 +567,17 @@ export function doFetchChannelListMine(
|
|||
};
|
||||
}
|
||||
|
||||
export function doFetchCollectionListMine(page: number = 1, pageSize: number = 99999) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_COLLECTION_LIST_STARTED,
|
||||
});
|
||||
|
||||
const callback = (response: CollectionListResponse) => {
|
||||
const { items } = response;
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_COLLECTION_LIST_COMPLETED,
|
||||
data: { claims: items },
|
||||
});
|
||||
dispatch(
|
||||
doFetchItemsInCollections({
|
||||
collectionIds: items.map(claim => claim.claim_id),
|
||||
page_size: 5,
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const failure = error => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_COLLECTION_LIST_FAILED,
|
||||
data: error,
|
||||
});
|
||||
};
|
||||
|
||||
Lbry.collection_list({ page, page_size: pageSize, resolve_claims: 1, resolve: true }).then(
|
||||
callback,
|
||||
failure
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
export function doClaimSearch(
|
||||
options: {
|
||||
page_size: number,
|
||||
page: number,
|
||||
no_totals?: boolean,
|
||||
no_totals: boolean,
|
||||
any_tags?: Array<string>,
|
||||
claim_ids?: Array<string>,
|
||||
channel_ids?: Array<string>,
|
||||
not_channel_ids?: Array<string>,
|
||||
not_tags?: Array<string>,
|
||||
order_by?: Array<string>,
|
||||
release_time?: string,
|
||||
has_source?: boolean,
|
||||
has_no_souce?: boolean,
|
||||
} = {
|
||||
no_totals: true,
|
||||
page_size: 10,
|
||||
|
@ -651,7 +585,7 @@ export function doClaimSearch(
|
|||
}
|
||||
) {
|
||||
const query = createNormalizedClaimSearchKey(options);
|
||||
return async(dispatch: Dispatch) => {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
type: ACTIONS.CLAIM_SEARCH_STARTED,
|
||||
data: { query: query },
|
||||
|
@ -675,7 +609,6 @@ export function doClaimSearch(
|
|||
pageSize: options.page_size,
|
||||
},
|
||||
});
|
||||
return resolveInfo;
|
||||
};
|
||||
|
||||
const failure = err => {
|
||||
|
@ -684,10 +617,9 @@ export function doClaimSearch(
|
|||
data: { query },
|
||||
error: err,
|
||||
});
|
||||
return false;
|
||||
};
|
||||
|
||||
return await Lbry.claim_search({
|
||||
Lbry.claim_search({
|
||||
...options,
|
||||
include_purchase_receipt: true,
|
||||
}).then(success, failure);
|
||||
|
@ -695,7 +627,8 @@ export function doClaimSearch(
|
|||
}
|
||||
|
||||
export function doRepost(options: StreamRepostOptions) {
|
||||
return (dispatch: Dispatch): Promise<any> => {
|
||||
return (dispatch: Dispatch) => {
|
||||
// $FlowFixMe
|
||||
return new Promise(resolve => {
|
||||
dispatch({
|
||||
type: ACTIONS.CLAIM_REPOST_STARTED,
|
||||
|
@ -735,209 +668,6 @@ export function doRepost(options: StreamRepostOptions) {
|
|||
};
|
||||
}
|
||||
|
||||
export function doCollectionPublish(
|
||||
options: {
|
||||
name: string,
|
||||
bid: string,
|
||||
blocking: true,
|
||||
title?: string,
|
||||
channel_id?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
tags?: Array<Tag>,
|
||||
languages?: Array<string>,
|
||||
claims: Array<string>,
|
||||
},
|
||||
localId: string
|
||||
) {
|
||||
return (dispatch: Dispatch): Promise<any> => {
|
||||
// $FlowFixMe
|
||||
|
||||
const params: {
|
||||
name: string,
|
||||
bid: string,
|
||||
channel_id?: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
claims: Array<string>,
|
||||
} = {
|
||||
name: options.name,
|
||||
bid: creditsToString(options.bid),
|
||||
title: options.title,
|
||||
thumbnail_url: options.thumbnail_url,
|
||||
description: options.description,
|
||||
tags: [],
|
||||
languages: options.languages || [],
|
||||
locations: [],
|
||||
blocking: true,
|
||||
claims: options.claims,
|
||||
};
|
||||
|
||||
if (options.tags) {
|
||||
params['tags'] = options.tags.map(tag => tag.name);
|
||||
}
|
||||
|
||||
if (options.channel_id) {
|
||||
params['channel_id'] = options.channel_id;
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_STARTED,
|
||||
});
|
||||
|
||||
function success(response) {
|
||||
const collectionClaim = response.outputs[0];
|
||||
dispatch(
|
||||
batchActions(
|
||||
{
|
||||
type: ACTIONS.COLLECTION_PUBLISH_COMPLETED,
|
||||
data: { claimId: collectionClaim.claim_id },
|
||||
},
|
||||
// move unpublished collection to pending collection with new publish id
|
||||
// recent publish won't resolve this second. handle it in checkPending
|
||||
{
|
||||
type: ACTIONS.UPDATE_PENDING_CLAIMS,
|
||||
data: {
|
||||
claims: [collectionClaim],
|
||||
},
|
||||
}
|
||||
)
|
||||
);
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PENDING,
|
||||
data: { localId: localId, claimId: collectionClaim.claim_id },
|
||||
});
|
||||
dispatch(doCheckPendingClaims());
|
||||
dispatch(doFetchCollectionListMine(1, 10));
|
||||
return resolve(collectionClaim);
|
||||
}
|
||||
|
||||
function failure(error) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_FAILED,
|
||||
data: {
|
||||
error: error.message,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return Lbry.collection_create(params).then(success, failure);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doCollectionPublishUpdate(
|
||||
options: {
|
||||
bid?: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
description?: string,
|
||||
claim_id: string,
|
||||
tags?: Array<Tag>,
|
||||
languages?: Array<string>,
|
||||
claims?: Array<string>,
|
||||
channel_id?: string,
|
||||
},
|
||||
isBackgroundUpdate?: boolean
|
||||
) {
|
||||
return (dispatch: Dispatch, getState: GetState): Promise<any> => {
|
||||
// TODO: implement one click update
|
||||
|
||||
const updateParams: {
|
||||
bid?: string,
|
||||
blocking?: true,
|
||||
title?: string,
|
||||
thumbnail_url?: string,
|
||||
channel_id?: string,
|
||||
description?: string,
|
||||
claim_id: string,
|
||||
tags?: Array<string>,
|
||||
languages?: Array<string>,
|
||||
claims?: Array<string>,
|
||||
clear_claims: boolean,
|
||||
replace?: boolean,
|
||||
} = isBackgroundUpdate
|
||||
? {
|
||||
blocking: true,
|
||||
claim_id: options.claim_id,
|
||||
clear_claims: true,
|
||||
}
|
||||
: {
|
||||
bid: creditsToString(options.bid),
|
||||
title: options.title,
|
||||
thumbnail_url: options.thumbnail_url,
|
||||
description: options.description,
|
||||
tags: [],
|
||||
languages: options.languages || [],
|
||||
locations: [],
|
||||
blocking: true,
|
||||
claim_id: options.claim_id,
|
||||
clear_claims: true,
|
||||
replace: true,
|
||||
};
|
||||
|
||||
if (isBackgroundUpdate && updateParams.claim_id) {
|
||||
const state = getState();
|
||||
updateParams['claims'] = makeSelectClaimIdsForCollectionId(updateParams.claim_id)(state);
|
||||
} else if (options.claims) {
|
||||
updateParams['claims'] = options.claims;
|
||||
}
|
||||
|
||||
if (options.tags) {
|
||||
updateParams['tags'] = options.tags.map(tag => tag.name);
|
||||
}
|
||||
|
||||
if (options.channel_id) {
|
||||
updateParams['channel_id'] = options.channel_id;
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_STARTED,
|
||||
});
|
||||
|
||||
function success(response) {
|
||||
const collectionClaim = response.outputs[0];
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_COMPLETED,
|
||||
data: {
|
||||
collectionClaim,
|
||||
},
|
||||
});
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PENDING,
|
||||
data: { claimId: collectionClaim.claim_id },
|
||||
});
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_PENDING_CLAIMS,
|
||||
data: {
|
||||
claims: [collectionClaim],
|
||||
},
|
||||
});
|
||||
dispatch(doCheckPendingClaims());
|
||||
return resolve(collectionClaim);
|
||||
}
|
||||
|
||||
function failure(error) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED,
|
||||
data: {
|
||||
error: error.message,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return Lbry.collection_update(updateParams).then(success, failure);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doCheckPublishNameAvailability(name: string) {
|
||||
return (dispatch: Dispatch) => {
|
||||
dispatch({
|
||||
|
@ -1000,71 +730,47 @@ export function doPurchaseList(page: number = 1, pageSize: number = PAGE_SIZE) {
|
|||
};
|
||||
}
|
||||
|
||||
export const doCheckPendingClaims = (onChannelConfirmed: Function) => (
|
||||
export const doCheckPendingClaims = (onConfirmed: Function) => (
|
||||
dispatch: Dispatch,
|
||||
getState: GetState
|
||||
) => {
|
||||
if (onChannelConfirmed) {
|
||||
onChannelConfirmCallback = onChannelConfirmed;
|
||||
}
|
||||
clearInterval(checkPendingInterval);
|
||||
const checkTxoList = () => {
|
||||
let claimCheckInterval;
|
||||
|
||||
const checkClaimList = () => {
|
||||
const state = getState();
|
||||
const pendingById = Object.assign({}, selectPendingClaimsById(state));
|
||||
const pendingTxos = (Object.values(pendingById): any).map(p => p.txid);
|
||||
// use collections
|
||||
const pendingCollections = selectPendingCollections(state);
|
||||
if (pendingTxos.length) {
|
||||
Lbry.txo_list({ txid: pendingTxos })
|
||||
.then(result => {
|
||||
const txos = result.items;
|
||||
const idsToConfirm = [];
|
||||
txos.forEach(txo => {
|
||||
if (txo.claim_id && txo.confirmations > 0) {
|
||||
idsToConfirm.push(txo.claim_id);
|
||||
delete pendingById[txo.claim_id];
|
||||
const pendingIdSet = new Set(selectPendingIds(state));
|
||||
Lbry.claim_list({ page: 1, page_size: 10 })
|
||||
.then(result => {
|
||||
const claims = result.items;
|
||||
const claimsToConfirm = [];
|
||||
claims.forEach(claim => {
|
||||
const { claim_id: claimId } = claim;
|
||||
if (claim.confirmations > 0 && pendingIdSet.has(claimId)) {
|
||||
pendingIdSet.delete(claimId);
|
||||
claimsToConfirm.push(claim);
|
||||
if (onConfirmed) {
|
||||
onConfirmed(claim);
|
||||
}
|
||||
});
|
||||
return { idsToConfirm, pendingById };
|
||||
})
|
||||
.then(results => {
|
||||
const { idsToConfirm, pendingById } = results;
|
||||
if (idsToConfirm.length) {
|
||||
return Lbry.claim_list({ claim_id: idsToConfirm, resolve: true }).then(results => {
|
||||
const claims = results.items;
|
||||
const collectionIds = claims
|
||||
.filter(c => c.value_type === 'collection')
|
||||
.map(c => c.claim_id);
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
|
||||
data: {
|
||||
claims: claims,
|
||||
pending: pendingById,
|
||||
},
|
||||
});
|
||||
if (collectionIds.length) {
|
||||
dispatch(
|
||||
doFetchItemsInCollections({
|
||||
collectionIds,
|
||||
})
|
||||
);
|
||||
}
|
||||
const channelClaims = claims.filter(claim => claim.value_type === 'channel');
|
||||
if (channelClaims.length && onChannelConfirmCallback) {
|
||||
channelClaims.forEach(claim => onChannelConfirmCallback(claim));
|
||||
}
|
||||
if (Object.keys(pendingById).length === 0) {
|
||||
clearInterval(checkPendingInterval);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
clearInterval(checkPendingInterval);
|
||||
}
|
||||
if (claimsToConfirm.length) {
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_CONFIRMED_CLAIMS,
|
||||
data: {
|
||||
claims: claimsToConfirm,
|
||||
},
|
||||
});
|
||||
}
|
||||
return pendingIdSet.size;
|
||||
})
|
||||
.then(len => {
|
||||
if (!len) {
|
||||
clearInterval(claimCheckInterval);
|
||||
}
|
||||
});
|
||||
};
|
||||
// do something with onConfirmed (typically get blocklist for channel)
|
||||
checkPendingInterval = setInterval(() => {
|
||||
checkTxoList();
|
||||
|
||||
claimCheckInterval = setInterval(() => {
|
||||
checkClaimList();
|
||||
}, 30000);
|
||||
};
|
||||
|
|
|
@ -1,495 +0,0 @@
|
|||
// @flow
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import Lbry from 'lbry';
|
||||
import { doClaimSearch, doAbandonClaim } from 'redux/actions/claims';
|
||||
import { makeSelectClaimForClaimId } from 'redux/selectors/claims';
|
||||
import {
|
||||
makeSelectCollectionForId,
|
||||
// makeSelectPublishedCollectionForId, // for "save" or "copy" action
|
||||
makeSelectMyPublishedCollectionForId,
|
||||
makeSelectPublishedCollectionForId,
|
||||
makeSelectUnpublishedCollectionForId,
|
||||
makeSelectEditedCollectionForId,
|
||||
} from 'redux/selectors/collections';
|
||||
import * as COLS from 'constants/collections';
|
||||
|
||||
const getTimestamp = () => {
|
||||
return Math.floor(Date.now() / 1000);
|
||||
};
|
||||
|
||||
const FETCH_BATCH_SIZE = 50;
|
||||
|
||||
export const doLocalCollectionCreate = (
|
||||
name: string,
|
||||
collectionItems: Array<string>,
|
||||
type: string,
|
||||
sourceId: string
|
||||
) => (dispatch: Dispatch) => {
|
||||
return dispatch({
|
||||
type: ACTIONS.COLLECTION_NEW,
|
||||
data: {
|
||||
entry: {
|
||||
id: uuid(), // start with a uuid, this becomes a claimId after publish
|
||||
name: name,
|
||||
updatedAt: getTimestamp(),
|
||||
items: collectionItems || [],
|
||||
sourceId: sourceId,
|
||||
type: type,
|
||||
},
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const doCollectionDelete = (id: string, colKey: ?string = undefined) => (
|
||||
dispatch: Dispatch,
|
||||
getState: GetState
|
||||
) => {
|
||||
const state = getState();
|
||||
const claim = makeSelectClaimForClaimId(id)(state);
|
||||
const collectionDelete = () =>
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_DELETE,
|
||||
data: {
|
||||
id: id,
|
||||
collectionKey: colKey,
|
||||
},
|
||||
});
|
||||
if (claim && !colKey) {
|
||||
// could support "abandon, but keep" later
|
||||
const { txid, nout } = claim;
|
||||
return dispatch(doAbandonClaim(txid, nout, collectionDelete));
|
||||
}
|
||||
return collectionDelete();
|
||||
};
|
||||
|
||||
// Given a collection, save its collectionId to be resolved and displayed in Library
|
||||
// export const doCollectionSave = (
|
||||
// id: string,
|
||||
// ) => (dispatch: Dispatch) => {
|
||||
// return dispatch({
|
||||
// type: ACTIONS.COLLECTION_SAVE,
|
||||
// data: {
|
||||
// id: id,
|
||||
// },
|
||||
// });
|
||||
// };
|
||||
|
||||
// Given a collection and name, copy it to a local private collection with a name
|
||||
// export const doCollectionCopy = (
|
||||
// id: string,
|
||||
// ) => (dispatch: Dispatch) => {
|
||||
// return dispatch({
|
||||
// type: ACTIONS.COLLECTION_COPY,
|
||||
// data: {
|
||||
// id: id,
|
||||
// },
|
||||
// });
|
||||
// };
|
||||
|
||||
export const doFetchItemsInCollections = (
|
||||
resolveItemsOptions: {
|
||||
collectionIds: Array<string>,
|
||||
pageSize?: number,
|
||||
},
|
||||
resolveStartedCallback?: () => void
|
||||
) => async(dispatch: Dispatch, getState: GetState) => {
|
||||
/*
|
||||
1) make sure all the collection claims are loaded into claims reducer, search/resolve if necessary.
|
||||
2) get the item claims for each
|
||||
3) format and make sure they're in the order as in the claim
|
||||
4) Build the collection objects and update collections reducer
|
||||
5) Update redux claims reducer
|
||||
*/
|
||||
let state = getState();
|
||||
const { collectionIds, pageSize } = resolveItemsOptions;
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_ITEMS_RESOLVE_STARTED,
|
||||
data: { ids: collectionIds },
|
||||
});
|
||||
|
||||
if (resolveStartedCallback) resolveStartedCallback();
|
||||
|
||||
const collectionIdsToSearch = collectionIds.filter(claimId => !state.claims.byId[claimId]);
|
||||
|
||||
if (collectionIdsToSearch.length) {
|
||||
await dispatch(doClaimSearch({ claim_ids: collectionIdsToSearch, page: 1, page_size: 9999 }));
|
||||
}
|
||||
|
||||
const stateAfterClaimSearch = getState();
|
||||
|
||||
async function fetchItemsForCollectionClaim(claim: CollectionClaim, pageSize?: number) {
|
||||
const totalItems = claim.value.claims && claim.value.claims.length;
|
||||
const claimId = claim.claim_id;
|
||||
const itemOrder = claim.value.claims;
|
||||
|
||||
const sortResults = (items: Array<Claim>, claimList) => {
|
||||
const newItems: Array<Claim> = [];
|
||||
claimList.forEach(id => {
|
||||
const index = items.findIndex(i => i.claim_id === id);
|
||||
if (index >= 0) {
|
||||
newItems.push(items[index]);
|
||||
}
|
||||
});
|
||||
/*
|
||||
This will return newItems[] of length less than total_items below
|
||||
if one or more of the claims has been abandoned. That's ok for now.
|
||||
*/
|
||||
return newItems;
|
||||
};
|
||||
|
||||
const mergeBatches = (
|
||||
arrayOfResults: Array<{ items: Array<Claim>, total_items: number }>,
|
||||
claimList: Array<string>
|
||||
) => {
|
||||
const mergedResults: { items: Array<Claim>, total_items: number } = {
|
||||
items: [],
|
||||
total_items: 0,
|
||||
};
|
||||
arrayOfResults.forEach(result => {
|
||||
mergedResults.items = mergedResults.items.concat(result.items);
|
||||
mergedResults.total_items = result.total_items;
|
||||
});
|
||||
|
||||
mergedResults.items = sortResults(mergedResults.items, claimList);
|
||||
return mergedResults;
|
||||
};
|
||||
|
||||
try {
|
||||
const batchSize = pageSize || FETCH_BATCH_SIZE;
|
||||
const batches: Array<Promise<any>> = [];
|
||||
|
||||
for (let i = 0; i < Math.ceil(totalItems / batchSize); i++) {
|
||||
batches[i] = Lbry.claim_search({
|
||||
claim_ids: claim.value.claims,
|
||||
page: i + 1,
|
||||
page_size: batchSize,
|
||||
no_totals: true,
|
||||
});
|
||||
}
|
||||
const itemsInBatches = await Promise.all(batches);
|
||||
const result = mergeBatches(itemsInBatches, itemOrder);
|
||||
|
||||
// $FlowFixMe
|
||||
const itemsById: { claimId: string, items?: ?Array<GenericClaim> } = { claimId: claimId };
|
||||
if (result.items) {
|
||||
itemsById.items = result.items;
|
||||
} else {
|
||||
itemsById.items = null;
|
||||
}
|
||||
return itemsById;
|
||||
} catch (e) {
|
||||
return {
|
||||
claimId: claimId,
|
||||
items: null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function formatForClaimActions(resultClaimsByUri) {
|
||||
const formattedClaims = {};
|
||||
Object.entries(resultClaimsByUri).forEach(([uri, uriResolveInfo]) => {
|
||||
// Flow has terrible Object.entries support
|
||||
// https://github.com/facebook/flow/issues/2221
|
||||
if (uriResolveInfo) {
|
||||
let result = {};
|
||||
if (uriResolveInfo.value_type === 'channel') {
|
||||
result.channel = uriResolveInfo;
|
||||
// $FlowFixMe
|
||||
result.claimsInChannel = uriResolveInfo.meta.claims_in_channel;
|
||||
// ALSO SKIP COLLECTIONS
|
||||
} else if (uriResolveInfo.value_type === 'collection') {
|
||||
result.collection = uriResolveInfo;
|
||||
} else {
|
||||
result.stream = uriResolveInfo;
|
||||
if (uriResolveInfo.signing_channel) {
|
||||
result.channel = uriResolveInfo.signing_channel;
|
||||
result.claimsInChannel =
|
||||
(uriResolveInfo.signing_channel.meta &&
|
||||
uriResolveInfo.signing_channel.meta.claims_in_channel) ||
|
||||
0;
|
||||
}
|
||||
}
|
||||
// $FlowFixMe
|
||||
formattedClaims[uri] = result;
|
||||
}
|
||||
});
|
||||
return formattedClaims;
|
||||
}
|
||||
|
||||
const invalidCollectionIds = [];
|
||||
const promisedCollectionItemFetches = [];
|
||||
collectionIds.forEach(collectionId => {
|
||||
const claim = makeSelectClaimForClaimId(collectionId)(stateAfterClaimSearch);
|
||||
if (!claim) {
|
||||
invalidCollectionIds.push(collectionId);
|
||||
} else {
|
||||
promisedCollectionItemFetches.push(fetchItemsForCollectionClaim(claim, pageSize));
|
||||
}
|
||||
});
|
||||
|
||||
// $FlowFixMe
|
||||
const collectionItemsById: Array<{
|
||||
claimId: string,
|
||||
items: ?Array<GenericClaim>,
|
||||
}> = await Promise.all(promisedCollectionItemFetches);
|
||||
|
||||
const newCollectionObjectsById = {};
|
||||
const resolvedItemsByUrl = {};
|
||||
collectionItemsById.forEach(entry => {
|
||||
// $FlowFixMe
|
||||
const collectionItems: Array<any> = entry.items;
|
||||
const collectionId = entry.claimId;
|
||||
if (collectionItems) {
|
||||
const claim = makeSelectClaimForClaimId(collectionId)(stateAfterClaimSearch);
|
||||
|
||||
const editedCollection = makeSelectEditedCollectionForId(collectionId)(stateAfterClaimSearch);
|
||||
const { name, timestamp, value } = claim || {};
|
||||
const { title } = value;
|
||||
const valueTypes = new Set();
|
||||
const streamTypes = new Set();
|
||||
|
||||
let newItems = [];
|
||||
let isPlaylist;
|
||||
|
||||
if (collectionItems) {
|
||||
collectionItems.forEach(collectionItem => {
|
||||
newItems.push(collectionItem.permanent_url);
|
||||
valueTypes.add(collectionItem.value_type);
|
||||
if (collectionItem.value.stream_type) {
|
||||
streamTypes.add(collectionItem.value.stream_type);
|
||||
}
|
||||
resolvedItemsByUrl[collectionItem.canonical_url] = collectionItem;
|
||||
});
|
||||
isPlaylist =
|
||||
valueTypes.size === 1 &&
|
||||
valueTypes.has('stream') &&
|
||||
((streamTypes.size === 1 && (streamTypes.has('audio') || streamTypes.has('video'))) ||
|
||||
(streamTypes.size === 2 && (streamTypes.has('audio') && streamTypes.has('video'))));
|
||||
}
|
||||
|
||||
newCollectionObjectsById[collectionId] = {
|
||||
items: newItems,
|
||||
id: collectionId,
|
||||
name: title || name,
|
||||
itemCount: claim.value.claims.length,
|
||||
type: isPlaylist ? 'playlist' : 'collection',
|
||||
updatedAt: timestamp,
|
||||
};
|
||||
|
||||
if (editedCollection && timestamp > editedCollection['updatedAt']) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_DELETE,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'edited',
|
||||
},
|
||||
});
|
||||
}
|
||||
} else {
|
||||
invalidCollectionIds.push(collectionId);
|
||||
}
|
||||
});
|
||||
const formattedClaimsByUri = formatForClaimActions(collectionItemsById);
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.RESOLVE_URIS_COMPLETED,
|
||||
data: { resolveInfo: formattedClaimsByUri },
|
||||
});
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_ITEMS_RESOLVE_COMPLETED,
|
||||
data: {
|
||||
resolvedCollections: newCollectionObjectsById,
|
||||
failedCollectionIds: invalidCollectionIds,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const doFetchItemsInCollection = (
|
||||
options: { collectionId: string, pageSize?: number },
|
||||
cb?: () => void
|
||||
) => {
|
||||
const { collectionId, pageSize } = options;
|
||||
const newOptions: { collectionIds: Array<string>, pageSize?: number } = {
|
||||
collectionIds: [collectionId],
|
||||
};
|
||||
if (pageSize) newOptions.pageSize = pageSize;
|
||||
return doFetchItemsInCollections(newOptions, cb);
|
||||
};
|
||||
|
||||
export const doCollectionEdit = (collectionId: string, params: CollectionEditParams) => async(
|
||||
dispatch: Dispatch,
|
||||
getState: GetState
|
||||
) => {
|
||||
const state = getState();
|
||||
const collection: Collection = makeSelectCollectionForId(collectionId)(state);
|
||||
const editedCollection: Collection = makeSelectEditedCollectionForId(collectionId)(state);
|
||||
const unpublishedCollection: Collection = makeSelectUnpublishedCollectionForId(collectionId)(
|
||||
state
|
||||
);
|
||||
const publishedCollection: Collection = makeSelectPublishedCollectionForId(collectionId)(state); // needs to be published only
|
||||
|
||||
const generateCollectionItemsFromSearchResult = results => {
|
||||
return (
|
||||
Object.values(results)
|
||||
// $FlowFixMe
|
||||
.reduce(
|
||||
(
|
||||
acc,
|
||||
cur: {
|
||||
stream: ?StreamClaim,
|
||||
channel: ?ChannelClaim,
|
||||
claimsInChannel: ?number,
|
||||
collection: ?CollectionClaim,
|
||||
}
|
||||
) => {
|
||||
let url;
|
||||
if (cur.stream) {
|
||||
url = cur.stream.permanent_url;
|
||||
} else if (cur.channel) {
|
||||
url = cur.channel.permanent_url;
|
||||
} else if (cur.collection) {
|
||||
url = cur.collection.permanent_url;
|
||||
} else {
|
||||
return acc;
|
||||
}
|
||||
acc.push(url);
|
||||
return acc;
|
||||
},
|
||||
[]
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
if (!collection) {
|
||||
return dispatch({
|
||||
type: ACTIONS.COLLECTION_ERROR,
|
||||
data: {
|
||||
message: 'collection does not exist',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
let currentItems = collection.items ? collection.items.concat() : [];
|
||||
const { claims: passedClaims, order, claimIds, replace, remove, type } = params;
|
||||
|
||||
const collectionType = type || collection.type;
|
||||
let newItems: Array<?string> = currentItems;
|
||||
|
||||
if (passedClaims) {
|
||||
if (remove) {
|
||||
const passedUrls = passedClaims.map(claim => claim.permanent_url);
|
||||
// $FlowFixMe // need this?
|
||||
newItems = currentItems.filter((item: string) => !passedUrls.includes(item));
|
||||
} else {
|
||||
passedClaims.forEach(claim => newItems.push(claim.permanent_url));
|
||||
}
|
||||
}
|
||||
|
||||
if (claimIds) {
|
||||
const batches = [];
|
||||
if (claimIds.length > 50) {
|
||||
for (let i = 0; i < Math.ceil(claimIds.length / 50); i++) {
|
||||
batches[i] = claimIds.slice(i * 50, (i + 1) * 50);
|
||||
}
|
||||
} else {
|
||||
batches[0] = claimIds;
|
||||
}
|
||||
const resultArray = await Promise.all(
|
||||
batches.map(batch => {
|
||||
let options = { claim_ids: batch, page: 1, page_size: 50 };
|
||||
return dispatch(doClaimSearch(options));
|
||||
})
|
||||
);
|
||||
|
||||
const searchResults = Object.assign({}, ...resultArray);
|
||||
|
||||
if (replace) {
|
||||
newItems = generateCollectionItemsFromSearchResult(searchResults);
|
||||
} else {
|
||||
newItems = currentItems.concat(generateCollectionItemsFromSearchResult(searchResults));
|
||||
}
|
||||
}
|
||||
|
||||
if (order) {
|
||||
const [movedItem] = currentItems.splice(order.from, 1);
|
||||
currentItems.splice(order.to, 0, movedItem);
|
||||
}
|
||||
|
||||
// console.log('p&e', publishedCollection.items, newItems, publishedCollection.items.join(','), newItems.join(','))
|
||||
if (editedCollection) {
|
||||
// delete edited if newItems are the same as publishedItems
|
||||
if (publishedCollection.items.join(',') === newItems.join(',')) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_DELETE,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'edited',
|
||||
},
|
||||
});
|
||||
} else {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_EDIT,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'edited',
|
||||
collection: {
|
||||
items: newItems,
|
||||
id: collectionId,
|
||||
name: params.name || collection.name,
|
||||
updatedAt: getTimestamp(),
|
||||
type: collectionType,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
} else if (publishedCollection) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_EDIT,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'edited',
|
||||
collection: {
|
||||
items: newItems,
|
||||
id: collectionId,
|
||||
name: params.name || collection.name,
|
||||
updatedAt: getTimestamp(),
|
||||
type: collectionType,
|
||||
},
|
||||
},
|
||||
});
|
||||
} else if (COLS.BUILTIN_LISTS.includes(collectionId)) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_EDIT,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'builtin',
|
||||
collection: {
|
||||
items: newItems,
|
||||
id: collectionId,
|
||||
name: params.name || collection.name,
|
||||
updatedAt: getTimestamp(),
|
||||
type: collectionType,
|
||||
},
|
||||
},
|
||||
});
|
||||
} else if (unpublishedCollection) {
|
||||
dispatch({
|
||||
type: ACTIONS.COLLECTION_EDIT,
|
||||
data: {
|
||||
id: collectionId,
|
||||
collectionKey: 'unpublished',
|
||||
collection: {
|
||||
items: newItems,
|
||||
id: collectionId,
|
||||
name: params.name || collection.name,
|
||||
updatedAt: getTimestamp(),
|
||||
type: collectionType,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
return true;
|
||||
};
|
|
@ -21,7 +21,6 @@ export const doResetThumbnailStatus = () => (dispatch: Dispatch) => {
|
|||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
thumbnailPath: '',
|
||||
thumbnailError: undefined,
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -69,8 +68,7 @@ export const doUploadThumbnail = (
|
|||
thumbnailBlob?: File,
|
||||
fsAdapter?: any,
|
||||
fs?: any,
|
||||
path?: any,
|
||||
cb?: (string) => void
|
||||
path?: any
|
||||
) => (dispatch: Dispatch) => {
|
||||
const downMessage = __('Thumbnail upload service may be down, try again later.');
|
||||
let thumbnail, fileExt, fileName, fileType;
|
||||
|
@ -98,13 +96,6 @@ export const doUploadThumbnail = (
|
|||
);
|
||||
};
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
thumbnailError: undefined,
|
||||
},
|
||||
});
|
||||
|
||||
const doUpload = data => {
|
||||
return fetch(SPEECH_PUBLISH, {
|
||||
method: 'POST',
|
||||
|
@ -113,17 +104,15 @@ export const doUploadThumbnail = (
|
|||
.then(res => res.text())
|
||||
.then(text => (text.length ? JSON.parse(text) : {}))
|
||||
.then(json => {
|
||||
if (!json.success) return uploadError(json.message || downMessage);
|
||||
if (cb) {
|
||||
cb(json.data.serveUrl);
|
||||
}
|
||||
return dispatch({
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
|
||||
thumbnail: json.data.serveUrl,
|
||||
},
|
||||
});
|
||||
return json.success
|
||||
? dispatch({
|
||||
type: ACTIONS.UPDATE_PUBLISH_FORM,
|
||||
data: {
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
|
||||
thumbnail: json.data.serveUrl,
|
||||
},
|
||||
})
|
||||
: uploadError(json.message || downMessage);
|
||||
})
|
||||
.catch(err => {
|
||||
let message = err.message;
|
||||
|
@ -195,7 +184,6 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
|
|||
currency: 'LBC',
|
||||
},
|
||||
languages,
|
||||
release_time,
|
||||
license,
|
||||
license_url: licenseUrl,
|
||||
thumbnail,
|
||||
|
@ -211,8 +199,6 @@ export const doPrepareEdit = (claim: StreamClaim, uri: string, fileInfo: FileLis
|
|||
description,
|
||||
fee,
|
||||
languages,
|
||||
releaseTime: release_time,
|
||||
releaseTimeEdited: undefined,
|
||||
thumbnail: thumbnail ? thumbnail.url : null,
|
||||
title,
|
||||
uri,
|
||||
|
@ -266,7 +252,6 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
|
|||
filePath,
|
||||
description,
|
||||
language,
|
||||
releaseTimeEdited,
|
||||
license,
|
||||
licenseUrl,
|
||||
useLBRYUploader,
|
||||
|
@ -282,7 +267,6 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
|
|||
locations,
|
||||
optimize,
|
||||
isLivestreamPublish,
|
||||
remoteFileUrl,
|
||||
} = publishData;
|
||||
|
||||
// Handle scenario where we have a claim that has the same name as a channel we are publishing with.
|
||||
|
@ -322,7 +306,6 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
|
|||
blocking: boolean,
|
||||
optimize_file?: boolean,
|
||||
preview?: boolean,
|
||||
remote_url?: string,
|
||||
} = {
|
||||
name,
|
||||
title,
|
||||
|
@ -338,9 +321,6 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
|
|||
// Temporary solution to keep the same publish flow with the new tags api
|
||||
// Eventually we will allow users to enter their own tags on publish
|
||||
// `nsfw` will probably be removed
|
||||
if (remoteFileUrl) {
|
||||
publishPayload.remote_url = remoteFileUrl;
|
||||
}
|
||||
|
||||
if (publishingLicense) {
|
||||
publishPayload.license = publishingLicense;
|
||||
|
@ -359,9 +339,7 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
|
|||
}
|
||||
|
||||
// Set release time to curret date. On edits, keep original release/transaction time as release_time
|
||||
if (releaseTimeEdited) {
|
||||
publishPayload.release_time = releaseTimeEdited;
|
||||
} else if (myClaimForUriEditing && myClaimForUriEditing.value.release_time) {
|
||||
if (myClaimForUriEditing && myClaimForUriEditing.value.release_time) {
|
||||
publishPayload.release_time = Number(myClaimForUri.value.release_time);
|
||||
} else if (myClaimForUriEditing && myClaimForUriEditing.timestamp) {
|
||||
publishPayload.release_time = Number(myClaimForUriEditing.timestamp);
|
||||
|
@ -388,7 +366,34 @@ export const doPublish = (success: Function, fail: Function, preview: Function)
|
|||
|
||||
// Only pass file on new uploads, not metadata only edits.
|
||||
// The sdk will figure it out
|
||||
if (filePath && !isLivestreamPublish) publishPayload.file_path = filePath;
|
||||
if (filePath) publishPayload.file_path = filePath;
|
||||
|
||||
if (isLivestreamPublish) {
|
||||
var d = new Date();
|
||||
|
||||
// Set it to one month in future so it's hidden in apps
|
||||
d.setFullYear(d.getFullYear() - 10);
|
||||
d.setHours(0, 0, 0);
|
||||
d.setMilliseconds(0);
|
||||
|
||||
const releaseTimeInSeconds = d / 1000;
|
||||
|
||||
publishPayload.release_time = releaseTimeInSeconds;
|
||||
|
||||
if (publishPayload.tags) {
|
||||
if (!publishPayload.tags.includes('odysee-livestream')) {
|
||||
publishPayload.tags.push('odysee-livestream');
|
||||
}
|
||||
} else {
|
||||
publishPayload.tags = ['odysee-livestream'];
|
||||
}
|
||||
} else if (publishPayload.tags && publishPayload.tags.includes('odysee-livestream')) {
|
||||
let newReleaseTime = new Date();
|
||||
newReleaseTime.setMilliseconds(0);
|
||||
publishPayload.release_time = newReleaseTime / 1000;
|
||||
|
||||
publishPayload.tags = publishPayload.tags.filter(tag => tag !== 'odysee-livestream');
|
||||
}
|
||||
|
||||
if (preview) {
|
||||
publishPayload.preview = true;
|
||||
|
@ -424,7 +429,7 @@ export const doCheckReflectingFiles = () => (dispatch: Dispatch, getState: GetSt
|
|||
const { checkingReflector } = state.claims;
|
||||
let reflectorCheckInterval;
|
||||
|
||||
const checkFileList = async () => {
|
||||
const checkFileList = async() => {
|
||||
const state = getState();
|
||||
const reflectingById = selectReflectingById(state);
|
||||
const ids = Object.keys(reflectingById);
|
||||
|
|
|
@ -9,14 +9,9 @@ type SharedData = {
|
|||
following?: Array<{ uri: string, notificationsDisabled: boolean }>,
|
||||
tags?: Array<string>,
|
||||
blocked?: Array<string>,
|
||||
coin_swap_codes?: Array<string>,
|
||||
settings?: any,
|
||||
app_welcome_version?: number,
|
||||
sharing_3P?: boolean,
|
||||
unpublishedCollections: CollectionGroup,
|
||||
editedCollections: CollectionGroup,
|
||||
builtinCollections: CollectionGroup,
|
||||
savedCollections: Array<string>,
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -27,14 +22,9 @@ function extractUserState(rawObj: SharedData) {
|
|||
following,
|
||||
tags,
|
||||
blocked,
|
||||
coin_swap_codes,
|
||||
settings,
|
||||
app_welcome_version,
|
||||
sharing_3P,
|
||||
unpublishedCollections,
|
||||
editedCollections,
|
||||
builtinCollections,
|
||||
savedCollections,
|
||||
} = rawObj.value;
|
||||
|
||||
return {
|
||||
|
@ -42,14 +32,9 @@ function extractUserState(rawObj: SharedData) {
|
|||
...(following ? { following } : {}),
|
||||
...(tags ? { tags } : {}),
|
||||
...(blocked ? { blocked } : {}),
|
||||
...(coin_swap_codes ? { coin_swap_codes } : {}),
|
||||
...(settings ? { settings } : {}),
|
||||
...(app_welcome_version ? { app_welcome_version } : {}),
|
||||
...(sharing_3P ? { sharing_3P } : {}),
|
||||
...(unpublishedCollections ? { unpublishedCollections } : {}),
|
||||
...(editedCollections ? { editedCollections } : {}),
|
||||
...(builtinCollections ? { builtinCollections } : {}),
|
||||
...(savedCollections ? { savedCollections } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -63,14 +48,9 @@ export function doPopulateSharedUserState(sharedSettings: any) {
|
|||
following,
|
||||
tags,
|
||||
blocked,
|
||||
coin_swap_codes,
|
||||
settings,
|
||||
app_welcome_version,
|
||||
sharing_3P,
|
||||
unpublishedCollections,
|
||||
editedCollections,
|
||||
builtinCollections,
|
||||
savedCollections,
|
||||
} = extractUserState(sharedSettings);
|
||||
dispatch({
|
||||
type: ACTIONS.USER_STATE_POPULATE,
|
||||
|
@ -79,14 +59,9 @@ export function doPopulateSharedUserState(sharedSettings: any) {
|
|||
following,
|
||||
tags,
|
||||
blocked,
|
||||
coinSwapCodes: coin_swap_codes,
|
||||
settings,
|
||||
welcomeVersion: app_welcome_version,
|
||||
allowAnalytics: sharing_3P,
|
||||
unpublishedCollections,
|
||||
editedCollections,
|
||||
builtinCollections,
|
||||
savedCollections,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
|
|
@ -6,16 +6,13 @@ import {
|
|||
selectPendingSupportTransactions,
|
||||
selectTxoPageParams,
|
||||
selectPendingOtherTransactions,
|
||||
selectPendingConsolidateTxid,
|
||||
selectPendingMassClaimTxid,
|
||||
} from 'redux/selectors/wallet';
|
||||
import { creditsToString } from 'util/format-credits';
|
||||
import { selectMyClaimsRaw, selectClaimsById } from 'redux/selectors/claims';
|
||||
import { doFetchChannelListMine, doFetchClaimListMine, doClaimSearch } from 'redux/actions/claims';
|
||||
import { selectMyClaimsRaw } from 'redux/selectors/claims';
|
||||
import { doFetchChannelListMine, doFetchClaimListMine } from 'redux/actions/claims';
|
||||
|
||||
const FIFTEEN_SECONDS = 15000;
|
||||
let walletBalancePromise = null;
|
||||
|
||||
export function doUpdateBalance() {
|
||||
return (dispatch, getState) => {
|
||||
const {
|
||||
|
@ -61,8 +58,9 @@ export function doBalanceSubscribe() {
|
|||
};
|
||||
}
|
||||
|
||||
export function doFetchTransactions(page = 1, pageSize = 999999) {
|
||||
export function doFetchTransactions(page = 1, pageSize = 99999) {
|
||||
return dispatch => {
|
||||
dispatch(doFetchSupports());
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_TRANSACTIONS_STARTED,
|
||||
});
|
||||
|
@ -80,63 +78,24 @@ export function doFetchTransactions(page = 1, pageSize = 999999) {
|
|||
|
||||
export function doFetchTxoPage() {
|
||||
return (dispatch, getState) => {
|
||||
const fetchId = Math.random()
|
||||
.toString(36)
|
||||
.substr(2, 9);
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_TXO_PAGE_STARTED,
|
||||
data: fetchId,
|
||||
});
|
||||
|
||||
const state = getState();
|
||||
const queryParams = selectTxoPageParams(state);
|
||||
|
||||
Lbry.txo_list(queryParams)
|
||||
.then(res => {
|
||||
const items = res.items || [];
|
||||
const claimsById = selectClaimsById(state);
|
||||
|
||||
const channelIds = items.reduce((acc, cur) => {
|
||||
if (
|
||||
cur.type === 'support' &&
|
||||
cur.signing_channel &&
|
||||
!claimsById[cur.signing_channel.channel_id]
|
||||
) {
|
||||
acc.push(cur.signing_channel.channel_id);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
if (channelIds.length) {
|
||||
const searchParams = {
|
||||
page_size: 9999,
|
||||
page: 1,
|
||||
no_totals: true,
|
||||
claim_ids: channelIds,
|
||||
};
|
||||
// make sure redux has these channels resolved
|
||||
dispatch(doClaimSearch(searchParams));
|
||||
}
|
||||
|
||||
return res;
|
||||
})
|
||||
.then(res => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_TXO_PAGE_COMPLETED,
|
||||
data: {
|
||||
result: res,
|
||||
fetchId: fetchId,
|
||||
},
|
||||
data: res,
|
||||
});
|
||||
})
|
||||
.catch(e => {
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_TXO_PAGE_COMPLETED,
|
||||
data: {
|
||||
error: e.message,
|
||||
fetchId: fetchId,
|
||||
},
|
||||
data: e.message,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
@ -178,13 +137,14 @@ export function doFetchUtxoCounts() {
|
|||
|
||||
let resultSets = await Promise.all([
|
||||
Lbry.txo_list({ type: 'other', is_not_spent: true, page: 1, page_size: 1 }),
|
||||
Lbry.txo_list({ type: 'support', is_not_spent: true, page: 1, page_size: 1 }),
|
||||
// removing until we figure out sdk load / need it
|
||||
// Lbry.txo_list({ type: 'support', is_not_spent: true }),
|
||||
]);
|
||||
const counts = {};
|
||||
const paymentCount = resultSets[0]['total_items'];
|
||||
const supportCount = resultSets[1]['total_items'];
|
||||
// const supportCount = resultSets[1]['total_items'];
|
||||
counts['other'] = typeof paymentCount === 'number' ? paymentCount : 0;
|
||||
counts['support'] = typeof supportCount === 'number' ? supportCount : 0;
|
||||
counts['support'] = 0;
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.FETCH_UTXO_COUNT_COMPLETED,
|
||||
|
@ -210,29 +170,6 @@ export function doUtxoConsolidate() {
|
|||
|
||||
dispatch({
|
||||
type: ACTIONS.DO_UTXO_CONSOLIDATE_COMPLETED,
|
||||
data: { txid: result.txid },
|
||||
});
|
||||
dispatch(doCheckPendingTxs());
|
||||
};
|
||||
}
|
||||
|
||||
export function doTipClaimMass() {
|
||||
return async dispatch => {
|
||||
dispatch({
|
||||
type: ACTIONS.TIP_CLAIM_MASS_STARTED,
|
||||
});
|
||||
|
||||
const results = await Lbry.txo_spend({ type: 'support', is_not_my_input: true });
|
||||
const result = results[0];
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
|
||||
data: { txids: [result.txid] },
|
||||
});
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.TIP_CLAIM_MASS_COMPLETED,
|
||||
data: { txid: result.txid },
|
||||
});
|
||||
dispatch(doCheckPendingTxs());
|
||||
};
|
||||
|
@ -348,7 +285,7 @@ export function doSetDraftTransactionAddress(address) {
|
|||
};
|
||||
}
|
||||
|
||||
export function doSendTip(params, isSupport, successCallback, errorCallback, shouldNotify = true) {
|
||||
export function doSendTip(params, isSupport, successCallback, errorCallback) {
|
||||
return (dispatch, getState) => {
|
||||
const state = getState();
|
||||
const balance = selectBalance(state);
|
||||
|
@ -367,25 +304,23 @@ export function doSendTip(params, isSupport, successCallback, errorCallback, sho
|
|||
return;
|
||||
}
|
||||
|
||||
const success = response => {
|
||||
if (shouldNotify) {
|
||||
dispatch(
|
||||
doToast({
|
||||
message: shouldSupport
|
||||
? __('You deposited %amount% LBRY Credits as a support!', { amount: params.amount })
|
||||
: __('You sent %amount% LBRY Credits as a tip, Mahalo!', { amount: params.amount }),
|
||||
linkText: __('History'),
|
||||
linkTarget: '/wallet',
|
||||
})
|
||||
);
|
||||
}
|
||||
const success = () => {
|
||||
dispatch(
|
||||
doToast({
|
||||
message: shouldSupport
|
||||
? __('You deposited %amount% LBRY Credits as a support!', { amount: params.amount })
|
||||
: __('You sent %amount% LBRY Credits as a tip, Mahalo!', { amount: params.amount }),
|
||||
linkText: __('History'),
|
||||
linkTarget: '/wallet',
|
||||
})
|
||||
);
|
||||
|
||||
dispatch({
|
||||
type: ACTIONS.SUPPORT_TRANSACTION_COMPLETED,
|
||||
});
|
||||
|
||||
if (successCallback) {
|
||||
successCallback(response);
|
||||
successCallback();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -558,7 +493,6 @@ export function doWalletReconnect() {
|
|||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function doWalletDecrypt() {
|
||||
return dispatch => {
|
||||
dispatch({
|
||||
|
@ -631,8 +565,6 @@ export const doCheckPendingTxs = () => (dispatch, getState) => {
|
|||
const state = getState();
|
||||
const pendingSupportTxs = selectPendingSupportTransactions(state); // {}
|
||||
const pendingConsolidateTxes = selectPendingOtherTransactions(state);
|
||||
const pendingConsTxid = selectPendingConsolidateTxid(state);
|
||||
const pendingMassCLaimTxid = selectPendingMassClaimTxid(state);
|
||||
|
||||
const promises = [];
|
||||
const newPendingTxes = {};
|
||||
|
@ -678,20 +610,6 @@ export const doCheckPendingTxs = () => (dispatch, getState) => {
|
|||
}
|
||||
}
|
||||
if (noLongerPendingConsolidate.length) {
|
||||
if (noLongerPendingConsolidate.includes(pendingConsTxid)) {
|
||||
dispatch(
|
||||
doToast({
|
||||
message: __('Your wallet is finished consolidating'),
|
||||
})
|
||||
);
|
||||
}
|
||||
if (noLongerPendingConsolidate.includes(pendingMassCLaimTxid)) {
|
||||
dispatch(
|
||||
doToast({
|
||||
message: __('Your tips have been collected'),
|
||||
})
|
||||
);
|
||||
}
|
||||
dispatch({
|
||||
type: ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED,
|
||||
data: { txids: noLongerPendingConsolidate, remove: true },
|
||||
|
|
|
@ -2,10 +2,9 @@
|
|||
import isEqual from 'util/deep-equal';
|
||||
import { doPreferenceSet } from 'redux/actions/sync';
|
||||
|
||||
const RUN_PREFERENCES_DELAY_MS = 2000;
|
||||
const SHARED_PREFERENCE_VERSION = '0.1';
|
||||
let oldShared = {};
|
||||
let timeout;
|
||||
|
||||
export const buildSharedStateMiddleware = (
|
||||
actions: Array<string>,
|
||||
sharedStateFilters: {},
|
||||
|
@ -23,43 +22,40 @@ export const buildSharedStateMiddleware = (
|
|||
if (!actions.includes(action.type) || typeof action === 'function') {
|
||||
return next(action);
|
||||
}
|
||||
clearTimeout(timeout);
|
||||
|
||||
const actionResult = next(action);
|
||||
// Call `getState` after calling `next` to ensure the state has updated in response to the action
|
||||
function runPreferences() {
|
||||
const nextState: { user: any, settings: any } = getState();
|
||||
const syncEnabled =
|
||||
nextState.settings &&
|
||||
nextState.settings.clientSettings &&
|
||||
nextState.settings.clientSettings.enable_sync;
|
||||
const hasVerifiedEmail =
|
||||
nextState.user && nextState.user.user && nextState.user.user.has_verified_email;
|
||||
const preferenceKey = syncEnabled && hasVerifiedEmail ? 'shared' : 'local';
|
||||
const shared = {};
|
||||
const nextState: { user: any, settings: any } = getState();
|
||||
const syncEnabled =
|
||||
nextState.settings &&
|
||||
nextState.settings.clientSettings &&
|
||||
nextState.settings.clientSettings.enable_sync;
|
||||
const hasVerifiedEmail =
|
||||
nextState.user && nextState.user.user && nextState.user.user.has_verified_email;
|
||||
const preferenceKey = syncEnabled && hasVerifiedEmail ? 'shared' : 'local';
|
||||
const shared = {};
|
||||
|
||||
Object.keys(sharedStateFilters).forEach(key => {
|
||||
const filter = sharedStateFilters[key];
|
||||
const { source, property, transform } = filter;
|
||||
let value = nextState[source][property];
|
||||
if (transform) {
|
||||
value = transform(value);
|
||||
}
|
||||
|
||||
shared[key] = value;
|
||||
});
|
||||
|
||||
if (!isEqual(oldShared, shared)) {
|
||||
// only update if the preference changed from last call in the same session
|
||||
oldShared = shared;
|
||||
dispatch(doPreferenceSet(preferenceKey, shared, SHARED_PREFERENCE_VERSION));
|
||||
Object.keys(sharedStateFilters).forEach(key => {
|
||||
const filter = sharedStateFilters[key];
|
||||
const { source, property, transform } = filter;
|
||||
let value = nextState[source][property];
|
||||
if (transform) {
|
||||
value = transform(value);
|
||||
}
|
||||
|
||||
if (sharedStateCb) {
|
||||
// Pass dispatch to the callback to consumers can dispatch actions in response to preference set
|
||||
sharedStateCb({ dispatch, getState });
|
||||
}
|
||||
clearTimeout(timeout);
|
||||
return actionResult;
|
||||
shared[key] = value;
|
||||
});
|
||||
|
||||
if (!isEqual(oldShared, shared)) {
|
||||
// only update if the preference changed from last call in the same session
|
||||
oldShared = shared;
|
||||
dispatch(doPreferenceSet(preferenceKey, shared, SHARED_PREFERENCE_VERSION));
|
||||
}
|
||||
timeout = setTimeout(runPreferences, RUN_PREFERENCES_DELAY_MS);
|
||||
|
||||
if (sharedStateCb) {
|
||||
// Pass dispatch to the callback to consumers can dispatch actions in response to preference set
|
||||
sharedStateCb({ dispatch, getState });
|
||||
}
|
||||
|
||||
return actionResult;
|
||||
};
|
||||
|
|
|
@ -13,20 +13,17 @@ import mergeClaim from 'util/merge-claim';
|
|||
|
||||
type State = {
|
||||
createChannelError: ?string,
|
||||
createCollectionError: ?string,
|
||||
channelClaimCounts: { [string]: number },
|
||||
claimsByUri: { [string]: string },
|
||||
byId: { [string]: Claim },
|
||||
pendingById: { [string]: Claim }, // keep pending claims
|
||||
resolvingUris: Array<string>,
|
||||
pendingIds: Array<string>,
|
||||
reflectingById: { [string]: ReflectingUpdate },
|
||||
myClaims: ?Array<string>,
|
||||
myChannelClaims: ?Array<string>,
|
||||
myCollectionClaims: ?Array<string>,
|
||||
abandoningById: { [string]: boolean },
|
||||
fetchingChannelClaims: { [string]: number },
|
||||
fetchingMyChannels: boolean,
|
||||
fetchingMyCollections: boolean,
|
||||
fetchingClaimSearchByQuery: { [string]: boolean },
|
||||
purchaseUriSuccess: boolean,
|
||||
myPurchases: ?Array<string>,
|
||||
|
@ -37,7 +34,6 @@ type State = {
|
|||
claimSearchByQuery: { [string]: Array<string> },
|
||||
claimSearchByQueryLastPageReached: { [string]: Array<boolean> },
|
||||
creatingChannel: boolean,
|
||||
creatingCollection: boolean,
|
||||
paginatedClaimsByChannel: {
|
||||
[string]: {
|
||||
all: Array<string>,
|
||||
|
@ -47,9 +43,7 @@ type State = {
|
|||
},
|
||||
},
|
||||
updateChannelError: ?string,
|
||||
updateCollectionError: ?string,
|
||||
updatingChannel: boolean,
|
||||
updatingCollection: boolean,
|
||||
pendingChannelImport: string | boolean,
|
||||
repostLoading: boolean,
|
||||
repostError: ?string,
|
||||
|
@ -72,7 +66,6 @@ const defaultState = {
|
|||
fetchingChannelClaims: {},
|
||||
resolvingUris: [],
|
||||
myChannelClaims: undefined,
|
||||
myCollectionClaims: [],
|
||||
myClaims: undefined,
|
||||
myPurchases: undefined,
|
||||
myPurchasesPageNumber: undefined,
|
||||
|
@ -81,22 +74,17 @@ const defaultState = {
|
|||
fetchingMyPurchases: false,
|
||||
fetchingMyPurchasesError: undefined,
|
||||
fetchingMyChannels: false,
|
||||
fetchingMyCollections: false,
|
||||
abandoningById: {},
|
||||
pendingById: {},
|
||||
pendingIds: [],
|
||||
reflectingById: {},
|
||||
claimSearchError: false,
|
||||
claimSearchByQuery: {},
|
||||
claimSearchByQueryLastPageReached: {},
|
||||
fetchingClaimSearchByQuery: {},
|
||||
updateChannelError: '',
|
||||
updateCollectionError: '',
|
||||
updatingChannel: false,
|
||||
creatingChannel: false,
|
||||
createChannelError: undefined,
|
||||
updatingCollection: false,
|
||||
creatingCollection: false,
|
||||
createCollectionError: undefined,
|
||||
pendingChannelImport: false,
|
||||
repostLoading: false,
|
||||
repostError: undefined,
|
||||
|
@ -112,22 +100,29 @@ const defaultState = {
|
|||
};
|
||||
|
||||
function handleClaimAction(state: State, action: any): State {
|
||||
const { resolveInfo }: ClaimActionResolveInfo = action.data;
|
||||
const {
|
||||
resolveInfo,
|
||||
}: {
|
||||
[string]: {
|
||||
stream: ?StreamClaim,
|
||||
channel: ?ChannelClaim,
|
||||
claimsInChannel: ?number,
|
||||
},
|
||||
} = action.data;
|
||||
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const channelClaimCounts = Object.assign({}, state.channelClaimCounts);
|
||||
const pendingById = state.pendingById;
|
||||
const pendingIds = state.pendingIds;
|
||||
let newResolvingUrls = new Set(state.resolvingUris);
|
||||
let myClaimIds = new Set(state.myClaims);
|
||||
|
||||
Object.entries(resolveInfo).forEach(([url: string, resolveResponse: ResolveResponse]) => {
|
||||
// $FlowFixMe
|
||||
const { claimsInChannel, stream, channel: channelFromResolve, collection } = resolveResponse;
|
||||
const channel = channelFromResolve || (stream && stream.signing_channel);
|
||||
const { claimsInChannel, stream, channel } = resolveResponse;
|
||||
|
||||
if (stream) {
|
||||
if (pendingById[stream.claim_id]) {
|
||||
if (pendingIds.includes(stream.claim_id)) {
|
||||
byId[stream.claim_id] = mergeClaim(stream, byId[stream.claim_id]);
|
||||
} else {
|
||||
byId[stream.claim_id] = stream;
|
||||
|
@ -157,37 +152,20 @@ function handleClaimAction(state: State, action: any): State {
|
|||
channelClaimCounts[channel.canonical_url] = claimsInChannel;
|
||||
}
|
||||
|
||||
if (pendingById[channel.claim_id]) {
|
||||
if (pendingIds.includes(channel.claim_id)) {
|
||||
byId[channel.claim_id] = mergeClaim(channel, byId[channel.claim_id]);
|
||||
} else {
|
||||
byId[channel.claim_id] = channel;
|
||||
}
|
||||
|
||||
// Also add the permanent_url here until lighthouse returns canonical_url for search results
|
||||
byUri[channel.permanent_url] = channel.claim_id;
|
||||
byUri[channel.canonical_url] = channel.claim_id;
|
||||
newResolvingUrls.delete(channel.canonical_url);
|
||||
newResolvingUrls.delete(channel.permanent_url);
|
||||
}
|
||||
|
||||
if (collection) {
|
||||
if (pendingById[collection.claim_id]) {
|
||||
byId[collection.claim_id] = mergeClaim(collection, byId[collection.claim_id]);
|
||||
} else {
|
||||
byId[collection.claim_id] = collection;
|
||||
}
|
||||
byUri[url] = collection.claim_id;
|
||||
byUri[collection.canonical_url] = collection.claim_id;
|
||||
byUri[collection.permanent_url] = collection.claim_id;
|
||||
newResolvingUrls.delete(collection.canonical_url);
|
||||
newResolvingUrls.delete(collection.permanent_url);
|
||||
|
||||
if (collection.is_my_output) {
|
||||
myClaimIds.add(collection.claim_id);
|
||||
}
|
||||
}
|
||||
|
||||
newResolvingUrls.delete(url);
|
||||
if (!stream && !channel && !collection && !pendingById[byUri[url]]) {
|
||||
if (!stream && !channel && !pendingIds.includes(byUri[url])) {
|
||||
byUri[url] = null;
|
||||
}
|
||||
});
|
||||
|
@ -230,33 +208,34 @@ reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_STARTED] = (state: State): State =>
|
|||
});
|
||||
|
||||
reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any): State => {
|
||||
const { result }: { result: ClaimListResponse } = action.data;
|
||||
const { result, resolve }: { result: ClaimListResponse, resolve: boolean } = action.data;
|
||||
const claims = result.items;
|
||||
const page = result.page;
|
||||
const totalItems = result.total_items;
|
||||
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
const pendingIds = state.pendingIds || [];
|
||||
let myClaimIds = new Set(state.myClaims);
|
||||
let urlsForCurrentPage = [];
|
||||
|
||||
const pendingIdSet = new Set(pendingIds);
|
||||
|
||||
claims.forEach((claim: Claim) => {
|
||||
const { permanent_url: permanentUri, claim_id: claimId, canonical_url: canonicalUri } = claim;
|
||||
const { permanent_url: permanentUri, claim_id: claimId } = claim;
|
||||
if (claim.type && claim.type.match(/claim|update/)) {
|
||||
urlsForCurrentPage.push(permanentUri);
|
||||
if (claim.confirmations < 1) {
|
||||
pendingById[claimId] = claim;
|
||||
if (byId[claimId]) {
|
||||
byId[claimId] = mergeClaim(claim, byId[claimId]);
|
||||
} else {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
pendingIdSet.add(claimId);
|
||||
} else if (!resolve && pendingIdSet.has(claimId) && claim.confirmations > 0) {
|
||||
pendingIdSet.delete(claimId);
|
||||
}
|
||||
if (pendingIds.includes(claimId)) {
|
||||
byId[claimId] = mergeClaim(claim, byId[claimId]);
|
||||
} else {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
byUri[permanentUri] = claimId;
|
||||
byUri[canonicalUri] = claimId;
|
||||
myClaimIds.add(claimId);
|
||||
}
|
||||
});
|
||||
|
@ -265,7 +244,7 @@ reducers[ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED] = (state: State, action: any):
|
|||
isFetchingClaimListMine: false,
|
||||
myClaims: Array.from(myClaimIds),
|
||||
byId,
|
||||
pendingById,
|
||||
pendingIds: Array.from(pendingIdSet),
|
||||
claimsByUri: byUri,
|
||||
myClaimsPageResults: urlsForCurrentPage,
|
||||
myClaimsPageNumber: page,
|
||||
|
@ -278,8 +257,9 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_STARTED] = (state: State): State =>
|
|||
|
||||
reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): State => {
|
||||
const { claims }: { claims: Array<ChannelClaim> } = action.data;
|
||||
const myClaims = state.myClaims || [];
|
||||
let myClaimIds = new Set(state.myClaims);
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
const pendingIds = state.pendingIds || [];
|
||||
let myChannelClaims;
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
|
@ -293,12 +273,7 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
|
|||
claims.forEach(claim => {
|
||||
const { meta } = claim;
|
||||
const { claims_in_channel: claimsInChannel } = claim.meta;
|
||||
const {
|
||||
canonical_url: canonicalUrl,
|
||||
permanent_url: permanentUrl,
|
||||
claim_id: claimId,
|
||||
confirmations,
|
||||
} = claim;
|
||||
const { canonical_url: canonicalUrl, permanent_url: permanentUrl, claim_id: claimId } = claim;
|
||||
|
||||
byUri[canonicalUrl] = claimId;
|
||||
byUri[permanentUrl] = claimId;
|
||||
|
@ -307,14 +282,7 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
|
|||
|
||||
// $FlowFixMe
|
||||
myChannelClaims.add(claimId);
|
||||
if (confirmations < 1) {
|
||||
pendingById[claimId] = claim;
|
||||
if (byId[claimId]) {
|
||||
byId[claimId] = mergeClaim(claim, byId[claimId]);
|
||||
} else {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
} else {
|
||||
if (!pendingIds.some(c => c === claimId)) {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
myClaimIds.add(claimId);
|
||||
|
@ -323,7 +291,6 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_COMPLETED] = (state: State, action: any): St
|
|||
|
||||
return Object.assign({}, state, {
|
||||
byId,
|
||||
pendingById,
|
||||
claimsByUri: byUri,
|
||||
channelClaimCounts,
|
||||
fetchingMyChannels: false,
|
||||
|
@ -338,66 +305,6 @@ reducers[ACTIONS.FETCH_CHANNEL_LIST_FAILED] = (state: State, action: any): State
|
|||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.FETCH_COLLECTION_LIST_STARTED] = (state: State): State => ({
|
||||
...state,
|
||||
fetchingMyCollections: true,
|
||||
});
|
||||
|
||||
reducers[ACTIONS.FETCH_COLLECTION_LIST_COMPLETED] = (state: State, action: any): State => {
|
||||
const { claims }: { claims: Array<CollectionClaim> } = action.data;
|
||||
const myClaims = state.myClaims || [];
|
||||
let myClaimIds = new Set(myClaims);
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
let myCollectionClaimsSet = new Set([]);
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
|
||||
if (claims.length) {
|
||||
myCollectionClaimsSet = new Set(state.myCollectionClaims);
|
||||
claims.forEach(claim => {
|
||||
const { meta } = claim;
|
||||
const {
|
||||
canonical_url: canonicalUrl,
|
||||
permanent_url: permanentUrl,
|
||||
claim_id: claimId,
|
||||
confirmations,
|
||||
} = claim;
|
||||
|
||||
byUri[canonicalUrl] = claimId;
|
||||
byUri[permanentUrl] = claimId;
|
||||
|
||||
// $FlowFixMe
|
||||
myCollectionClaimsSet.add(claimId);
|
||||
// we don't want to overwrite a pending result with a resolve
|
||||
if (confirmations < 1) {
|
||||
pendingById[claimId] = claim;
|
||||
if (byId[claimId]) {
|
||||
byId[claimId] = mergeClaim(claim, byId[claimId]);
|
||||
} else {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
} else {
|
||||
byId[claimId] = claim;
|
||||
}
|
||||
myClaimIds.add(claimId);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
byId,
|
||||
pendingById,
|
||||
claimsByUri: byUri,
|
||||
fetchingMyCollections: false,
|
||||
myCollectionClaims: Array.from(myCollectionClaimsSet),
|
||||
myClaims: myClaimIds ? Array.from(myClaimIds) : null,
|
||||
};
|
||||
};
|
||||
|
||||
reducers[ACTIONS.FETCH_COLLECTION_LIST_FAILED] = (state: State): State => {
|
||||
return { ...state, fetchingMyCollections: false };
|
||||
};
|
||||
|
||||
reducers[ACTIONS.FETCH_CHANNEL_CLAIMS_STARTED] = (state: State, action: any): State => {
|
||||
const { uri, page } = action.data;
|
||||
const fetchingChannelClaims = Object.assign({}, state.fetchingChannelClaims);
|
||||
|
@ -479,8 +386,9 @@ reducers[ACTIONS.ABANDON_CLAIM_STARTED] = (state: State, action: any): State =>
|
|||
reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State => {
|
||||
const { claims: pendingClaims }: { claims: Array<Claim> } = action.data;
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const pendingById = Object.assign({}, state.pendingById);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
const pendingIds = state.pendingIds;
|
||||
const pendingIdSet = new Set(pendingIds);
|
||||
let myClaimIds = new Set(state.myClaims);
|
||||
const myChannelClaims = new Set(state.myChannelClaims);
|
||||
|
||||
|
@ -488,7 +396,7 @@ reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State =>
|
|||
pendingClaims.forEach((claim: Claim) => {
|
||||
let newClaim;
|
||||
const { permanent_url: uri, claim_id: claimId, type, value_type: valueType } = claim;
|
||||
pendingById[claimId] = claim; // make sure we don't need to merge?
|
||||
pendingIdSet.add(claimId);
|
||||
const oldClaim = byId[claimId];
|
||||
if (oldClaim && oldClaim.canonical_url) {
|
||||
newClaim = mergeClaim(oldClaim, claim);
|
||||
|
@ -508,22 +416,21 @@ reducers[ACTIONS.UPDATE_PENDING_CLAIMS] = (state: State, action: any): State =>
|
|||
return Object.assign({}, state, {
|
||||
myClaims: Array.from(myClaimIds),
|
||||
byId,
|
||||
pendingById,
|
||||
myChannelClaims: Array.from(myChannelClaims),
|
||||
claimsByUri: byUri,
|
||||
pendingIds: Array.from(pendingIdSet),
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.UPDATE_CONFIRMED_CLAIMS] = (state: State, action: any): State => {
|
||||
const {
|
||||
claims: confirmedClaims,
|
||||
pending: pendingClaims,
|
||||
}: { claims: Array<Claim>, pending: { [string]: Claim } } = action.data;
|
||||
const { claims: confirmedClaims }: { claims: Array<Claim> } = action.data;
|
||||
const byId = Object.assign({}, state.byId);
|
||||
const byUri = Object.assign({}, state.claimsByUri);
|
||||
//
|
||||
const pendingIds = state.pendingIds;
|
||||
const pendingIdSet = new Set(pendingIds);
|
||||
|
||||
confirmedClaims.forEach((claim: GenericClaim) => {
|
||||
const { claim_id: claimId, type } = claim;
|
||||
const { permanent_url: permanentUri, claim_id: claimId, type } = claim;
|
||||
let newClaim = claim;
|
||||
const oldClaim = byId[claimId];
|
||||
if (oldClaim && oldClaim.canonical_url) {
|
||||
|
@ -531,10 +438,11 @@ reducers[ACTIONS.UPDATE_CONFIRMED_CLAIMS] = (state: State, action: any): State =
|
|||
}
|
||||
if (type && type.match(/claim|update|channel/)) {
|
||||
byId[claimId] = newClaim;
|
||||
pendingIdSet.delete(claimId);
|
||||
}
|
||||
});
|
||||
return Object.assign({}, state, {
|
||||
pendingById: pendingClaims,
|
||||
pendingIds: Array.from(pendingIdSet),
|
||||
byId,
|
||||
claimsByUri: byUri,
|
||||
});
|
||||
|
@ -546,7 +454,6 @@ reducers[ACTIONS.ABANDON_CLAIM_SUCCEEDED] = (state: State, action: any): State =
|
|||
const newMyClaims = state.myClaims ? state.myClaims.slice() : [];
|
||||
const newMyChannelClaims = state.myChannelClaims ? state.myChannelClaims.slice() : [];
|
||||
const claimsByUri = Object.assign({}, state.claimsByUri);
|
||||
const newMyCollectionClaims = state.myCollectionClaims ? state.myCollectionClaims.slice() : [];
|
||||
|
||||
Object.keys(claimsByUri).forEach(uri => {
|
||||
if (claimsByUri[uri] === claimId) {
|
||||
|
@ -555,14 +462,12 @@ reducers[ACTIONS.ABANDON_CLAIM_SUCCEEDED] = (state: State, action: any): State =
|
|||
});
|
||||
const myClaims = newMyClaims.filter(i => i !== claimId);
|
||||
const myChannelClaims = newMyChannelClaims.filter(i => i !== claimId);
|
||||
const myCollectionClaims = newMyCollectionClaims.filter(i => i !== claimId);
|
||||
|
||||
delete byId[claimId];
|
||||
|
||||
return Object.assign({}, state, {
|
||||
myClaims,
|
||||
myChannelClaims,
|
||||
myCollectionClaims,
|
||||
byId,
|
||||
claimsByUri,
|
||||
});
|
||||
|
@ -614,61 +519,6 @@ reducers[ACTIONS.UPDATE_CHANNEL_FAILED] = (state: State, action: any): State =>
|
|||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.CLEAR_COLLECTION_ERRORS] = (state: State): State => ({
|
||||
...state,
|
||||
createCollectionError: null,
|
||||
updateCollectionError: null,
|
||||
});
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_STARTED] = (state: State): State => ({
|
||||
...state,
|
||||
creatingCollection: true,
|
||||
createCollectionError: null,
|
||||
});
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_COMPLETED] = (state: State, action: any): State => {
|
||||
const myCollections = state.myCollectionClaims || [];
|
||||
const myClaims = state.myClaims || [];
|
||||
const { claimId } = action.data;
|
||||
let myClaimIds = new Set(myClaims);
|
||||
let myCollectionClaimsSet = new Set(myCollections);
|
||||
myClaimIds.add(claimId);
|
||||
myCollectionClaimsSet.add(claimId);
|
||||
return Object.assign({}, state, {
|
||||
creatingCollection: false,
|
||||
myClaims: Array.from(myClaimIds),
|
||||
myCollectionClaims: Array.from(myCollectionClaimsSet),
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_FAILED] = (state: State, action: any): State => {
|
||||
return Object.assign({}, state, {
|
||||
creatingCollection: false,
|
||||
createCollectionError: action.data.error,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_STARTED] = (state: State, action: any): State => {
|
||||
return Object.assign({}, state, {
|
||||
updateCollectionError: '',
|
||||
updatingCollection: true,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_COMPLETED] = (state: State, action: any): State => {
|
||||
return Object.assign({}, state, {
|
||||
updateCollectionError: '',
|
||||
updatingCollection: false,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.COLLECTION_PUBLISH_UPDATE_FAILED] = (state: State, action: any): State => {
|
||||
return Object.assign({}, state, {
|
||||
updateCollectionError: action.data.error,
|
||||
updatingCollection: false,
|
||||
});
|
||||
};
|
||||
|
||||
reducers[ACTIONS.IMPORT_CHANNEL_STARTED] = (state: State): State =>
|
||||
Object.assign({}, state, { pendingChannelImports: true });
|
||||
|
||||
|
@ -720,10 +570,7 @@ reducers[ACTIONS.CLAIM_SEARCH_FAILED] = (state: State, action: any): State => {
|
|||
const { query } = action.data;
|
||||
const claimSearchByQuery = Object.assign({}, state.claimSearchByQuery);
|
||||
const fetchingClaimSearchByQuery = Object.assign({}, state.fetchingClaimSearchByQuery);
|
||||
const claimSearchByQueryLastPageReached = Object.assign(
|
||||
{},
|
||||
state.claimSearchByQueryLastPageReached
|
||||
);
|
||||
const claimSearchByQueryLastPageReached = Object.assign({}, state.claimSearchByQueryLastPageReached);
|
||||
|
||||
delete fetchingClaimSearchByQuery[query];
|
||||
|
||||
|
|
|
@ -1,239 +0,0 @@
|
|||
// @flow
|
||||
import { handleActions } from 'util/redux-utils';
|
||||
import * as ACTIONS from 'constants/action_types';
|
||||
import * as COLS from 'constants/collections';
|
||||
|
||||
const getTimestamp = () => {
|
||||
return Math.floor(Date.now() / 1000);
|
||||
};
|
||||
|
||||
const defaultState: CollectionState = {
|
||||
builtin: {
|
||||
watchlater: {
|
||||
items: [],
|
||||
id: COLS.WATCH_LATER_ID,
|
||||
name: 'Watch Later',
|
||||
updatedAt: getTimestamp(),
|
||||
type: COLS.COL_TYPE_PLAYLIST,
|
||||
},
|
||||
favorites: {
|
||||
items: [],
|
||||
id: COLS.FAVORITES_ID,
|
||||
name: 'Favorites',
|
||||
type: COLS.COL_TYPE_PLAYLIST,
|
||||
updatedAt: getTimestamp(),
|
||||
},
|
||||
},
|
||||
resolved: {},
|
||||
unpublished: {}, // sync
|
||||
edited: {},
|
||||
pending: {},
|
||||
saved: [],
|
||||
isResolvingCollectionById: {},
|
||||
error: null,
|
||||
};
|
||||
|
||||
const collectionsReducer = handleActions(
|
||||
{
|
||||
[ACTIONS.COLLECTION_NEW]: (state, action) => {
|
||||
const { entry: params } = action.data; // { id:, items: Array<string>}
|
||||
// entry
|
||||
const newListTemplate = {
|
||||
id: params.id,
|
||||
name: params.name,
|
||||
items: [],
|
||||
updatedAt: getTimestamp(),
|
||||
type: params.type,
|
||||
};
|
||||
|
||||
const newList = Object.assign({}, newListTemplate, { ...params });
|
||||
const { unpublished: lists } = state;
|
||||
const newLists = Object.assign({}, lists, { [params.id]: newList });
|
||||
|
||||
return {
|
||||
...state,
|
||||
unpublished: newLists,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.COLLECTION_DELETE]: (state, action) => {
|
||||
const { id, collectionKey } = action.data;
|
||||
const { edited: editList, unpublished: unpublishedList, pending: pendingList } = state;
|
||||
const newEditList = Object.assign({}, editList);
|
||||
const newUnpublishedList = Object.assign({}, unpublishedList);
|
||||
|
||||
const newPendingList = Object.assign({}, pendingList);
|
||||
|
||||
if (collectionKey && state[collectionKey] && state[collectionKey][id]) {
|
||||
const newList = Object.assign({}, state[collectionKey]);
|
||||
delete newList[id];
|
||||
return {
|
||||
...state,
|
||||
[collectionKey]: newList,
|
||||
};
|
||||
} else {
|
||||
if (newEditList[id]) {
|
||||
delete newEditList[id];
|
||||
} else if (newUnpublishedList[id]) {
|
||||
delete newUnpublishedList[id];
|
||||
} else if (newPendingList[id]) {
|
||||
delete newPendingList[id];
|
||||
}
|
||||
}
|
||||
return {
|
||||
...state,
|
||||
edited: newEditList,
|
||||
unpublished: newUnpublishedList,
|
||||
pending: newPendingList,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.COLLECTION_PENDING]: (state, action) => {
|
||||
const { localId, claimId } = action.data;
|
||||
const {
|
||||
resolved: resolvedList,
|
||||
edited: editList,
|
||||
unpublished: unpublishedList,
|
||||
pending: pendingList,
|
||||
} = state;
|
||||
|
||||
const newEditList = Object.assign({}, editList);
|
||||
const newResolvedList = Object.assign({}, resolvedList);
|
||||
const newUnpublishedList = Object.assign({}, unpublishedList);
|
||||
const newPendingList = Object.assign({}, pendingList);
|
||||
|
||||
if (localId) {
|
||||
// new publish
|
||||
newPendingList[claimId] = Object.assign({}, newUnpublishedList[localId] || {});
|
||||
delete newUnpublishedList[localId];
|
||||
} else {
|
||||
// edit update
|
||||
newPendingList[claimId] = Object.assign(
|
||||
{},
|
||||
newEditList[claimId] || newResolvedList[claimId]
|
||||
);
|
||||
delete newEditList[claimId];
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
edited: newEditList,
|
||||
unpublished: newUnpublishedList,
|
||||
pending: newPendingList,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.COLLECTION_EDIT]: (state, action) => {
|
||||
const { id, collectionKey, collection } = action.data;
|
||||
|
||||
if (COLS.BUILTIN_LISTS.includes(id)) {
|
||||
const { builtin: lists } = state;
|
||||
return {
|
||||
...state,
|
||||
[collectionKey]: { ...lists, [id]: collection },
|
||||
};
|
||||
}
|
||||
|
||||
if (collectionKey === 'edited') {
|
||||
const { edited: lists } = state;
|
||||
return {
|
||||
...state,
|
||||
edited: { ...lists, [id]: collection },
|
||||
};
|
||||
}
|
||||
const { unpublished: lists } = state;
|
||||
return {
|
||||
...state,
|
||||
unpublished: { ...lists, [id]: collection },
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.COLLECTION_ERROR]: (state, action) => {
|
||||
return Object.assign({}, state, {
|
||||
error: action.data.message,
|
||||
});
|
||||
},
|
||||
|
||||
[ACTIONS.COLLECTION_ITEMS_RESOLVE_STARTED]: (state, action) => {
|
||||
const { ids } = action.data;
|
||||
const { isResolvingCollectionById } = state;
|
||||
const newResolving = Object.assign({}, isResolvingCollectionById);
|
||||
ids.forEach(id => {
|
||||
newResolving[id] = true;
|
||||
});
|
||||
return Object.assign({}, state, {
|
||||
...state,
|
||||
error: '',
|
||||
isResolvingCollectionById: newResolving,
|
||||
});
|
||||
},
|
||||
[ACTIONS.USER_STATE_POPULATE]: (state, action) => {
|
||||
const {
|
||||
builtinCollections,
|
||||
savedCollections,
|
||||
unpublishedCollections,
|
||||
editedCollections,
|
||||
} = action.data;
|
||||
return {
|
||||
...state,
|
||||
edited: editedCollections || state.edited,
|
||||
unpublished: unpublishedCollections || state.unpublished,
|
||||
builtin: builtinCollections || state.builtin,
|
||||
saved: savedCollections || state.saved,
|
||||
};
|
||||
},
|
||||
[ACTIONS.COLLECTION_ITEMS_RESOLVE_COMPLETED]: (state, action) => {
|
||||
const { resolvedCollections, failedCollectionIds } = action.data;
|
||||
const { pending, edited, isResolvingCollectionById, resolved } = state;
|
||||
const newPending = Object.assign({}, pending);
|
||||
const newEdited = Object.assign({}, edited);
|
||||
const newResolved = Object.assign({}, resolved, resolvedCollections);
|
||||
|
||||
const resolvedIds = Object.keys(resolvedCollections);
|
||||
const newResolving = Object.assign({}, isResolvingCollectionById);
|
||||
if (resolvedCollections && Object.keys(resolvedCollections).length) {
|
||||
resolvedIds.forEach(resolvedId => {
|
||||
if (newEdited[resolvedId]) {
|
||||
if (newEdited[resolvedId]['updatedAt'] < resolvedCollections[resolvedId]['updatedAt']) {
|
||||
delete newEdited[resolvedId];
|
||||
}
|
||||
}
|
||||
delete newResolving[resolvedId];
|
||||
if (newPending[resolvedId]) {
|
||||
delete newPending[resolvedId];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (failedCollectionIds && Object.keys(failedCollectionIds).length) {
|
||||
failedCollectionIds.forEach(failedId => {
|
||||
delete newResolving[failedId];
|
||||
});
|
||||
}
|
||||
|
||||
return Object.assign({}, state, {
|
||||
...state,
|
||||
pending: newPending,
|
||||
resolved: newResolved,
|
||||
edited: newEdited,
|
||||
isResolvingCollectionById: newResolving,
|
||||
});
|
||||
},
|
||||
[ACTIONS.COLLECTION_ITEMS_RESOLVE_FAILED]: (state, action) => {
|
||||
const { ids } = action.data;
|
||||
const { isResolvingCollectionById } = state;
|
||||
const newResolving = Object.assign({}, isResolvingCollectionById);
|
||||
ids.forEach(id => {
|
||||
delete newResolving[id];
|
||||
});
|
||||
return Object.assign({}, state, {
|
||||
...state,
|
||||
isResolvingCollectionById: newResolving,
|
||||
error: action.data.message,
|
||||
});
|
||||
},
|
||||
},
|
||||
defaultState
|
||||
);
|
||||
|
||||
export { collectionsReducer };
|
|
@ -9,7 +9,6 @@ type PublishState = {
|
|||
editingURI: ?string,
|
||||
fileText: ?string,
|
||||
filePath: ?string,
|
||||
remoteFileUrl: ?string,
|
||||
contentIsFree: boolean,
|
||||
fileDur: number,
|
||||
fileSize: number,
|
||||
|
@ -22,11 +21,8 @@ type PublishState = {
|
|||
thumbnail_url: string,
|
||||
thumbnailPath: string,
|
||||
uploadThumbnailStatus: string,
|
||||
thumbnailError: ?boolean,
|
||||
description: string,
|
||||
language: string,
|
||||
releaseTime: ?number,
|
||||
releaseTimeEdited: ?number,
|
||||
channel: string,
|
||||
channelId: ?string,
|
||||
name: string,
|
||||
|
@ -47,7 +43,6 @@ const defaultState: PublishState = {
|
|||
fileDur: 0,
|
||||
fileSize: 0,
|
||||
fileVid: false,
|
||||
remoteFileUrl: undefined,
|
||||
contentIsFree: true,
|
||||
fee: {
|
||||
amount: 1,
|
||||
|
@ -57,11 +52,8 @@ const defaultState: PublishState = {
|
|||
thumbnail_url: '',
|
||||
thumbnailPath: '',
|
||||
uploadThumbnailStatus: THUMBNAIL_STATUSES.API_DOWN,
|
||||
thumbnailError: undefined,
|
||||
description: '',
|
||||
language: '',
|
||||
releaseTime: undefined,
|
||||
releaseTimeEdited: undefined,
|
||||
nsfw: false,
|
||||
channel: CHANNEL_ANONYMOUS,
|
||||
channelId: '',
|
||||
|
@ -91,7 +83,6 @@ export const publishReducer = handleActions(
|
|||
},
|
||||
[ACTIONS.CLEAR_PUBLISH]: (state: PublishState): PublishState => ({
|
||||
...defaultState,
|
||||
uri: undefined,
|
||||
channel: state.channel,
|
||||
bid: state.bid,
|
||||
optimize: state.optimize,
|
||||
|
|
|
@ -47,15 +47,10 @@ type WalletState = {
|
|||
txoFetchParams: {},
|
||||
utxoCounts: {},
|
||||
txoPage: any,
|
||||
fetchId: string,
|
||||
fetchingTxos: boolean,
|
||||
fetchingTxosError?: string,
|
||||
consolidatingUtxos: boolean,
|
||||
pendingConsolidateTxid?: string,
|
||||
massClaimingTips: boolean,
|
||||
pendingMassClaimTxid?: string,
|
||||
pendingSupportTransactions: {}, // { claimId: {txid: 123, amount 12.3}, }
|
||||
pendingTxos: Array<string>,
|
||||
pendingConsolidateTxos: Array<string>,
|
||||
abandonClaimSupportError?: string,
|
||||
};
|
||||
|
||||
|
@ -96,15 +91,11 @@ const defaultState = {
|
|||
fetchingUtxoCounts: false,
|
||||
fetchingUtxoError: undefined,
|
||||
consolidatingUtxos: false,
|
||||
pendingConsolidateTxid: null,
|
||||
massClaimingTips: false,
|
||||
pendingMassClaimTxid: null,
|
||||
txoPage: {},
|
||||
fetchId: '',
|
||||
fetchingTxos: false,
|
||||
fetchingTxosError: undefined,
|
||||
pendingSupportTransactions: {},
|
||||
pendingTxos: [],
|
||||
pendingConsolidateTxos: [],
|
||||
|
||||
abandonClaimSupportError: undefined,
|
||||
};
|
||||
|
@ -131,26 +122,18 @@ export const walletReducer = handleActions(
|
|||
};
|
||||
},
|
||||
|
||||
[ACTIONS.FETCH_TXO_PAGE_STARTED]: (state: WalletState, action) => {
|
||||
[ACTIONS.FETCH_TXO_PAGE_STARTED]: (state: WalletState) => {
|
||||
return {
|
||||
...state,
|
||||
fetchId: action.data,
|
||||
fetchingTxos: true,
|
||||
fetchingTxosError: undefined,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.FETCH_TXO_PAGE_COMPLETED]: (state: WalletState, action) => {
|
||||
if (state.fetchId !== action.data.fetchId) {
|
||||
// Leave 'state' and 'fetchingTxos' alone. The latter would ensure
|
||||
// the spiner would continue spinning for the latest transaction.
|
||||
return { ...state };
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
txoPage: action.data.result,
|
||||
fetchId: '',
|
||||
txoPage: action.data,
|
||||
fetchingTxos: false,
|
||||
};
|
||||
},
|
||||
|
@ -159,7 +142,6 @@ export const walletReducer = handleActions(
|
|||
return {
|
||||
...state,
|
||||
txoPage: {},
|
||||
fetchId: '',
|
||||
fetchingTxos: false,
|
||||
fetchingTxosError: action.data,
|
||||
};
|
||||
|
@ -195,11 +177,9 @@ export const walletReducer = handleActions(
|
|||
},
|
||||
|
||||
[ACTIONS.DO_UTXO_CONSOLIDATE_COMPLETED]: (state: WalletState, action) => {
|
||||
const { txid } = action.data;
|
||||
return {
|
||||
...state,
|
||||
consolidatingUtxos: false,
|
||||
pendingConsolidateTxid: txid,
|
||||
};
|
||||
},
|
||||
|
||||
|
@ -210,51 +190,17 @@ export const walletReducer = handleActions(
|
|||
};
|
||||
},
|
||||
|
||||
[ACTIONS.TIP_CLAIM_MASS_STARTED]: (state: WalletState) => {
|
||||
return {
|
||||
...state,
|
||||
massClaimingTips: true,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.TIP_CLAIM_MASS_COMPLETED]: (state: WalletState, action) => {
|
||||
const { txid } = action.data;
|
||||
return {
|
||||
...state,
|
||||
massClaimingTips: false,
|
||||
pendingMassClaimTxid: txid,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.TIP_CLAIM_MASS_FAILED]: (state: WalletState, action) => {
|
||||
return {
|
||||
...state,
|
||||
massClaimingTips: false,
|
||||
};
|
||||
},
|
||||
|
||||
[ACTIONS.PENDING_CONSOLIDATED_TXOS_UPDATED]: (state: WalletState, action) => {
|
||||
const { pendingTxos, pendingMassClaimTxid, pendingConsolidateTxid } = state;
|
||||
const pendingTxos = state.pendingConsolidateTxos;
|
||||
|
||||
const { txids, remove } = action.data;
|
||||
|
||||
if (remove) {
|
||||
const newTxos = pendingTxos.filter(txo => !txids.includes(txo));
|
||||
const newPendingMassClaimTxid = txids.includes(pendingMassClaimTxid)
|
||||
? undefined
|
||||
: pendingMassClaimTxid;
|
||||
const newPendingConsolidateTxid = txids.includes(pendingConsolidateTxid)
|
||||
? undefined
|
||||
: pendingConsolidateTxid;
|
||||
return {
|
||||
...state,
|
||||
pendingTxos: newTxos,
|
||||
pendingMassClaimTxid: newPendingMassClaimTxid,
|
||||
pendingConsolidateTxid: newPendingConsolidateTxid,
|
||||
};
|
||||
return { ...state, pendingConsolidateTxos: newTxos };
|
||||
} else {
|
||||
const newPendingSet = new Set([...pendingTxos, ...txids]);
|
||||
return { ...state, pendingTxos: Array.from(newPendingSet) };
|
||||
return { ...state, pendingConsolidateTxos: Array.from(newPendingSet) };
|
||||
}
|
||||
},
|
||||
|
||||
|
|
|
@ -1,30 +1,17 @@
|
|||
// @flow
|
||||
import { normalizeURI, parseURI } from 'lbryURI';
|
||||
import { normalizeURI, buildURI, parseURI } from 'lbryURI';
|
||||
import { selectSupportsByOutpoint } from 'redux/selectors/wallet';
|
||||
import { createSelector } from 'reselect';
|
||||
import { isClaimNsfw, filterClaims } from 'util/claim';
|
||||
import * as CLAIM from 'constants/claim';
|
||||
import { PAGE_SIZE } from 'constants/claim';
|
||||
|
||||
const selectState = state => state.claims || {};
|
||||
|
||||
export const selectById = createSelector(
|
||||
export const selectClaimsById = createSelector(
|
||||
selectState,
|
||||
state => state.byId || {}
|
||||
);
|
||||
|
||||
export const selectPendingClaimsById = createSelector(
|
||||
selectState,
|
||||
state => state.pendingById || {}
|
||||
);
|
||||
|
||||
export const selectClaimsById = createSelector(
|
||||
selectById,
|
||||
selectPendingClaimsById,
|
||||
(byId, pendingById) => {
|
||||
return Object.assign(byId, pendingById); // do I need merged to keep metadata?
|
||||
}
|
||||
);
|
||||
|
||||
export const selectClaimIdsByUri = createSelector(
|
||||
selectState,
|
||||
state => state.claimsByUri || {}
|
||||
|
@ -85,53 +72,28 @@ export const selectAllClaimsByChannel = createSelector(
|
|||
|
||||
export const selectPendingIds = createSelector(
|
||||
selectState,
|
||||
state => Object.keys(state.pendingById) || []
|
||||
);
|
||||
|
||||
export const selectPendingClaims = createSelector(
|
||||
selectPendingClaimsById,
|
||||
pendingById => Object.values(pendingById)
|
||||
state => state.pendingIds || []
|
||||
);
|
||||
|
||||
export const makeSelectClaimIsPending = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimIdsByUri,
|
||||
selectPendingClaimsById,
|
||||
(idsByUri, pendingById) => {
|
||||
selectPendingIds,
|
||||
(idsByUri, pendingIds) => {
|
||||
const claimId = idsByUri[normalizeURI(uri)];
|
||||
|
||||
if (claimId) {
|
||||
return Boolean(pendingById[claimId]);
|
||||
return pendingIds.some(i => i === claimId);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimIdIsPending = (claimId: string) =>
|
||||
createSelector(
|
||||
selectPendingClaimsById,
|
||||
pendingById => {
|
||||
return Boolean(pendingById[claimId]);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimIdForUri = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimIdsByUri,
|
||||
claimIds => claimIds[uri]
|
||||
);
|
||||
|
||||
export const selectReflectingById = createSelector(
|
||||
selectState,
|
||||
state => state.reflectingById
|
||||
);
|
||||
|
||||
export const makeSelectClaimForClaimId = (claimId: string) =>
|
||||
createSelector(
|
||||
selectClaimsById,
|
||||
byId => byId[claimId]
|
||||
);
|
||||
|
||||
export const makeSelectClaimForUri = (uri: string, returnRepost: boolean = true) =>
|
||||
createSelector(
|
||||
selectClaimIdsByUri,
|
||||
|
@ -159,13 +121,11 @@ export const makeSelectClaimForUri = (uri: string, returnRepost: boolean = true)
|
|||
|
||||
const repostedClaim = claim && claim.reposted_claim;
|
||||
if (repostedClaim && returnRepost) {
|
||||
const channelUrl =
|
||||
claim.signing_channel &&
|
||||
(claim.signing_channel.canonical_url || claim.signing_channel.permanent_url);
|
||||
const channelUrl = claim.signing_channel && claim.signing_channel.canonical_url;
|
||||
|
||||
return {
|
||||
...repostedClaim,
|
||||
repost_url: normalizeURI(uri),
|
||||
repost_url: uri,
|
||||
repost_channel_url: channelUrl,
|
||||
repost_bid_amount: claim && claim.meta && claim.meta.effective_amount,
|
||||
};
|
||||
|
@ -295,8 +255,8 @@ export const makeSelectMyPurchasesForPage = (query: ?string, page: number = 1) =
|
|||
|
||||
const fileInfos = myPurchases.map(uri => claimsByUri[uri]);
|
||||
const matchingFileInfos = filterClaims(fileInfos, query);
|
||||
const start = (Number(page) - 1) * Number(CLAIM.PAGE_SIZE);
|
||||
const end = Number(page) * Number(CLAIM.PAGE_SIZE);
|
||||
const start = (Number(page) - 1) * Number(PAGE_SIZE);
|
||||
const end = Number(page) * Number(PAGE_SIZE);
|
||||
return matchingFileInfos && matchingFileInfos.length
|
||||
? matchingFileInfos
|
||||
.slice(start, end)
|
||||
|
@ -338,7 +298,6 @@ export const makeSelectClaimsInChannelForPage = (uri: string, page?: number) =>
|
|||
}
|
||||
);
|
||||
|
||||
// THIS IS LEFT OVER FROM ONE TAB CHANNEL_CONTENT
|
||||
export const makeSelectTotalClaimsInChannelSearch = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimsById,
|
||||
|
@ -349,7 +308,6 @@ export const makeSelectTotalClaimsInChannelSearch = (uri: string) =>
|
|||
}
|
||||
);
|
||||
|
||||
// THIS IS LEFT OVER FROM ONE_TAB CHANNEL CONTENT
|
||||
export const makeSelectTotalPagesInChannelSearch = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimsById,
|
||||
|
@ -360,6 +318,21 @@ export const makeSelectTotalPagesInChannelSearch = (uri: string) =>
|
|||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimsInChannelForCurrentPageState = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimsById,
|
||||
selectAllClaimsByChannel,
|
||||
selectCurrentChannelPage,
|
||||
(byId, allClaims, page) => {
|
||||
const byChannel = allClaims[uri] || {};
|
||||
const claimIds = byChannel[page || 1];
|
||||
|
||||
if (!claimIds) return claimIds;
|
||||
|
||||
return claimIds.map(claimId => byId[claimId]);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectMetadataForUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
|
@ -500,9 +473,7 @@ export const selectMyClaims = createSelector(
|
|||
export const selectMyClaimsWithoutChannels = createSelector(
|
||||
selectMyClaims,
|
||||
myClaims =>
|
||||
myClaims
|
||||
.filter(claim => claim && !claim.name.match(/^@/))
|
||||
.sort((a, b) => a.timestamp - b.timestamp)
|
||||
myClaims.filter(claim => !claim.name.match(/^@/)).sort((a, b) => a.timestamp - b.timestamp)
|
||||
);
|
||||
|
||||
export const selectMyClaimUrisWithoutChannels = createSelector(
|
||||
|
@ -546,11 +517,6 @@ export const selectFetchingMyChannels = createSelector(
|
|||
state => state.fetchingMyChannels
|
||||
);
|
||||
|
||||
export const selectFetchingMyCollections = createSelector(
|
||||
selectState,
|
||||
state => state.fetchingMyCollections
|
||||
);
|
||||
|
||||
export const selectMyChannelClaims = createSelector(
|
||||
selectState,
|
||||
selectClaimsById,
|
||||
|
@ -577,11 +543,6 @@ export const selectMyChannelUrls = createSelector(
|
|||
claims => (claims ? claims.map(claim => claim.canonical_url || claim.permanent_url) : undefined)
|
||||
);
|
||||
|
||||
export const selectMyCollectionIds = createSelector(
|
||||
selectState,
|
||||
state => state.myCollectionClaims
|
||||
);
|
||||
|
||||
export const selectResolvingUris = createSelector(
|
||||
selectState,
|
||||
state => state.resolvingUris || []
|
||||
|
@ -610,18 +571,31 @@ export const selectChannelClaimCounts = createSelector(
|
|||
|
||||
export const makeSelectPendingClaimForUri = (uri: string) =>
|
||||
createSelector(
|
||||
selectPendingClaimsById,
|
||||
pendingById => {
|
||||
selectPendingIds,
|
||||
selectClaimsById,
|
||||
(pending, claims) => {
|
||||
let validUri;
|
||||
let uriIsChannel;
|
||||
let uriStreamName;
|
||||
let uriChannelName;
|
||||
try {
|
||||
({ streamName: uriStreamName, channelName: uriChannelName } = parseURI(uri));
|
||||
({
|
||||
isChannel: uriIsChannel,
|
||||
streamName: uriStreamName,
|
||||
channelName: uriChannelName,
|
||||
} = parseURI(uri));
|
||||
validUri = true;
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
const pendingClaims = (Object.values(pendingById): any);
|
||||
const matchingClaim = pendingClaims.find((claim: GenericClaim) => {
|
||||
return claim.normalized_name === uriChannelName || claim.normalized_name === uriStreamName;
|
||||
const pendingClaims = pending.map(id => claims[id]);
|
||||
const matchingClaim = pendingClaims.find(claim => {
|
||||
const { streamName, channelName, isChannel } = parseURI(claim.permanent_url);
|
||||
if (isChannel) {
|
||||
return channelName === uriChannelName;
|
||||
} else {
|
||||
return streamName === uriStreamName;
|
||||
}
|
||||
});
|
||||
return matchingClaim || null;
|
||||
}
|
||||
|
@ -630,13 +604,13 @@ export const makeSelectPendingClaimForUri = (uri: string) =>
|
|||
export const makeSelectTotalItemsForChannel = (uri: string) =>
|
||||
createSelector(
|
||||
selectChannelClaimCounts,
|
||||
byUri => byUri && byUri[normalizeURI(uri)]
|
||||
byUri => byUri && byUri[uri]
|
||||
);
|
||||
|
||||
export const makeSelectTotalPagesForChannel = (uri: string, pageSize: number = 10) =>
|
||||
createSelector(
|
||||
selectChannelClaimCounts,
|
||||
byUri => byUri && byUri[uri] && Math.ceil(byUri[normalizeURI(uri)] / pageSize)
|
||||
byUri => byUri && byUri[uri] && Math.ceil(byUri[uri] / pageSize)
|
||||
);
|
||||
|
||||
export const makeSelectNsfwCountFromUris = (uris: Array<string>) =>
|
||||
|
@ -652,6 +626,27 @@ export const makeSelectNsfwCountFromUris = (uris: Array<string>) =>
|
|||
}, 0)
|
||||
);
|
||||
|
||||
export const makeSelectNsfwCountForChannel = (uri: string) =>
|
||||
createSelector(
|
||||
selectClaimsById,
|
||||
selectAllClaimsByChannel,
|
||||
selectCurrentChannelPage,
|
||||
(byId, allClaims, page) => {
|
||||
const byChannel = allClaims[uri] || {};
|
||||
const claimIds = byChannel[page || 1];
|
||||
|
||||
if (!claimIds) return 0;
|
||||
|
||||
return claimIds.reduce((acc, claimId) => {
|
||||
const claim = byId[claimId];
|
||||
if (isClaimNsfw(claim)) {
|
||||
return acc + 1;
|
||||
}
|
||||
return acc;
|
||||
}, 0);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectOmittedCountForChannel = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectTotalItemsForChannel(uri),
|
||||
|
@ -731,6 +726,14 @@ export const makeSelectTagsForUri = (uri: string) =>
|
|||
}
|
||||
);
|
||||
|
||||
export const makeSelectChannelTagsForUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectMetadataForUri(uri),
|
||||
(metadata: ?GenericMetadata) => {
|
||||
return (metadata && metadata.tags) || [];
|
||||
}
|
||||
);
|
||||
|
||||
export const selectFetchingClaimSearchByQuery = createSelector(
|
||||
selectState,
|
||||
state => state.fetchingClaimSearchByQuery || {}
|
||||
|
@ -815,8 +818,8 @@ export const makeSelectMyStreamUrlsForPage = (page: number = 1) =>
|
|||
createSelector(
|
||||
selectMyClaimUrisWithoutChannels,
|
||||
urls => {
|
||||
const start = (Number(page) - 1) * Number(CLAIM.PAGE_SIZE);
|
||||
const end = Number(page) * Number(CLAIM.PAGE_SIZE);
|
||||
const start = (Number(page) - 1) * Number(PAGE_SIZE);
|
||||
const end = Number(page) * Number(PAGE_SIZE);
|
||||
|
||||
return urls && urls.length ? urls.slice(start, end) : [];
|
||||
}
|
||||
|
@ -841,82 +844,3 @@ export const makeSelectTagInClaimOrChannelForUri = (uri: string, tag: string) =>
|
|||
return claimTags.includes(tag) || channelTags.includes(tag);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimHasSource = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
claim => {
|
||||
if (!claim) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Boolean(claim.value.source);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimIsStreamPlaceholder = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
claim => {
|
||||
if (!claim) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Boolean(claim.value_type === 'stream' && !claim.value.source);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectTotalStakedAmountForChannelUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectClaimForUri(uri),
|
||||
claim => {
|
||||
if (!claim || !claim.amount || !claim.meta || !claim.meta.support_amount) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return parseFloat(claim.amount) + parseFloat(claim.meta.support_amount) || 0;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectStakedLevelForChannelUri = (uri: string) =>
|
||||
createSelector(
|
||||
makeSelectTotalStakedAmountForChannelUri(uri),
|
||||
amount => {
|
||||
let level = 1;
|
||||
switch (true) {
|
||||
case amount >= CLAIM.LEVEL_2_STAKED_AMOUNT && amount < CLAIM.LEVEL_3_STAKED_AMOUNT:
|
||||
level = 2;
|
||||
break;
|
||||
case amount >= CLAIM.LEVEL_3_STAKED_AMOUNT && amount < CLAIM.LEVEL_4_STAKED_AMOUNT:
|
||||
level = 3;
|
||||
break;
|
||||
case amount >= CLAIM.LEVEL_4_STAKED_AMOUNT && amount < CLAIM.LEVEL_5_STAKED_AMOUNT:
|
||||
level = 4;
|
||||
break;
|
||||
case amount >= CLAIM.LEVEL_5_STAKED_AMOUNT:
|
||||
level = 5;
|
||||
break;
|
||||
}
|
||||
return level;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectUpdatingCollection = createSelector(
|
||||
selectState,
|
||||
state => state.updatingCollection
|
||||
);
|
||||
|
||||
export const selectUpdateCollectionError = createSelector(
|
||||
selectState,
|
||||
state => state.updateCollectionError
|
||||
);
|
||||
|
||||
export const selectCreatingCollection = createSelector(
|
||||
selectState,
|
||||
state => state.creatingCollection
|
||||
);
|
||||
|
||||
export const selectCreateCollectionError = createSelector(
|
||||
selectState,
|
||||
state => state.createCollectionError
|
||||
);
|
||||
|
|
|
@ -1,311 +0,0 @@
|
|||
// @flow
|
||||
import fromEntries from '@ungap/from-entries';
|
||||
import { createSelector } from 'reselect';
|
||||
import {
|
||||
selectMyCollectionIds,
|
||||
makeSelectClaimForUri,
|
||||
selectClaimsByUri,
|
||||
} from 'redux/selectors/claims';
|
||||
import { parseURI } from 'lbryURI';
|
||||
|
||||
const selectState = (state: { collections: CollectionState }) => state.collections;
|
||||
|
||||
export const selectSavedCollectionIds = createSelector(
|
||||
selectState,
|
||||
collectionState => collectionState.saved
|
||||
);
|
||||
|
||||
export const selectBuiltinCollections = createSelector(
|
||||
selectState,
|
||||
state => state.builtin
|
||||
);
|
||||
export const selectResolvedCollections = createSelector(
|
||||
selectState,
|
||||
state => state.resolved
|
||||
);
|
||||
|
||||
export const selectMyUnpublishedCollections = createSelector(
|
||||
selectState,
|
||||
state => state.unpublished
|
||||
);
|
||||
|
||||
export const selectMyEditedCollections = createSelector(
|
||||
selectState,
|
||||
state => state.edited
|
||||
);
|
||||
|
||||
export const selectPendingCollections = createSelector(
|
||||
selectState,
|
||||
state => state.pending
|
||||
);
|
||||
|
||||
export const makeSelectEditedCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectMyEditedCollections,
|
||||
eLists => eLists[id]
|
||||
);
|
||||
|
||||
export const makeSelectPendingCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectPendingCollections,
|
||||
pending => pending[id]
|
||||
);
|
||||
|
||||
export const makeSelectPublishedCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectResolvedCollections,
|
||||
rLists => rLists[id]
|
||||
);
|
||||
|
||||
export const makeSelectUnpublishedCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectMyUnpublishedCollections,
|
||||
rLists => rLists[id]
|
||||
);
|
||||
|
||||
export const makeSelectCollectionIsMine = (id: string) =>
|
||||
createSelector(
|
||||
selectMyCollectionIds,
|
||||
selectMyUnpublishedCollections,
|
||||
selectBuiltinCollections,
|
||||
(publicIds, privateIds, builtinIds) => {
|
||||
return Boolean(publicIds.includes(id) || privateIds[id] || builtinIds[id]);
|
||||
}
|
||||
);
|
||||
|
||||
export const selectMyPublishedCollections = createSelector(
|
||||
selectResolvedCollections,
|
||||
selectPendingCollections,
|
||||
selectMyEditedCollections,
|
||||
selectMyCollectionIds,
|
||||
(resolved, pending, edited, myIds) => {
|
||||
// all resolved in myIds, plus those in pending and edited
|
||||
const myPublishedCollections = fromEntries(
|
||||
Object.entries(pending).concat(
|
||||
Object.entries(resolved).filter(
|
||||
([key, val]) =>
|
||||
myIds.includes(key) &&
|
||||
// $FlowFixMe
|
||||
!pending[key]
|
||||
)
|
||||
)
|
||||
);
|
||||
// now add in edited:
|
||||
Object.entries(edited).forEach(([id, item]) => {
|
||||
myPublishedCollections[id] = item;
|
||||
});
|
||||
return myPublishedCollections;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectMyPublishedMixedCollections = createSelector(
|
||||
selectMyPublishedCollections,
|
||||
published => {
|
||||
const myCollections = fromEntries(
|
||||
// $FlowFixMe
|
||||
Object.entries(published).filter(([key, collection]) => {
|
||||
// $FlowFixMe
|
||||
return collection.type === 'collection';
|
||||
})
|
||||
);
|
||||
return myCollections;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectMyPublishedPlaylistCollections = createSelector(
|
||||
selectMyPublishedCollections,
|
||||
published => {
|
||||
const myCollections = fromEntries(
|
||||
// $FlowFixMe
|
||||
Object.entries(published).filter(([key, collection]) => {
|
||||
// $FlowFixMe
|
||||
return collection.type === 'playlist';
|
||||
})
|
||||
);
|
||||
return myCollections;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectMyPublishedCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectMyPublishedCollections,
|
||||
myPublishedCollections => myPublishedCollections[id]
|
||||
);
|
||||
|
||||
// export const selectSavedCollections = createSelector(
|
||||
// selectResolvedCollections,
|
||||
// selectSavedCollectionIds,
|
||||
// (resolved, myIds) => {
|
||||
// const mySavedCollections = fromEntries(
|
||||
// Object.entries(resolved).filter(([key, val]) => myIds.includes(key))
|
||||
// );
|
||||
// return mySavedCollections;
|
||||
// }
|
||||
// );
|
||||
|
||||
export const makeSelectIsResolvingCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectState,
|
||||
state => {
|
||||
return state.isResolvingCollectionById[id];
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectCollectionForId = (id: string) =>
|
||||
createSelector(
|
||||
selectBuiltinCollections,
|
||||
selectResolvedCollections,
|
||||
selectMyUnpublishedCollections,
|
||||
selectMyEditedCollections,
|
||||
selectPendingCollections,
|
||||
(bLists, rLists, uLists, eLists, pLists) => {
|
||||
const collection = bLists[id] || uLists[id] || eLists[id] || pLists[id] || rLists[id];
|
||||
return collection;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectClaimUrlInCollection = (url: string) =>
|
||||
createSelector(
|
||||
selectBuiltinCollections,
|
||||
selectMyPublishedCollections,
|
||||
selectMyUnpublishedCollections,
|
||||
selectMyEditedCollections,
|
||||
selectPendingCollections,
|
||||
(bLists, myRLists, uLists, eLists, pLists) => {
|
||||
const collections = [bLists, uLists, eLists, myRLists, pLists];
|
||||
const itemsInCollections = [];
|
||||
collections.map(list => {
|
||||
Object.entries(list).forEach(([key, value]) => {
|
||||
// $FlowFixMe
|
||||
value.items.map(item => {
|
||||
itemsInCollections.push(item);
|
||||
});
|
||||
});
|
||||
});
|
||||
return itemsInCollections.includes(url);
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectCollectionForIdHasClaimUrl = (id: string, url: string) =>
|
||||
createSelector(
|
||||
makeSelectCollectionForId(id),
|
||||
collection => collection && collection.items.includes(url)
|
||||
);
|
||||
|
||||
export const makeSelectUrlsForCollectionId = (id: string) =>
|
||||
createSelector(
|
||||
makeSelectCollectionForId(id),
|
||||
collection => collection && collection.items
|
||||
);
|
||||
|
||||
export const makeSelectClaimIdsForCollectionId = (id: string) =>
|
||||
createSelector(
|
||||
makeSelectCollectionForId(id),
|
||||
collection => {
|
||||
const items = (collection && collection.items) || [];
|
||||
const ids = items.map(item => {
|
||||
const { claimId } = parseURI(item);
|
||||
return claimId;
|
||||
});
|
||||
return ids;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectIndexForUrlInCollection = (url: string, id: string) =>
|
||||
createSelector(
|
||||
state => state.content.shuffleList,
|
||||
makeSelectUrlsForCollectionId(id),
|
||||
makeSelectClaimForUri(url),
|
||||
(shuffleState, urls, claim) => {
|
||||
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
|
||||
const listUrls = shuffleUrls || urls;
|
||||
|
||||
const index = listUrls && listUrls.findIndex(u => u === url);
|
||||
if (index > -1) {
|
||||
return index;
|
||||
} else if (claim) {
|
||||
const index = listUrls && listUrls.findIndex(u => u === claim.permanent_url);
|
||||
if (index > -1) return index;
|
||||
return claim;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectPreviousUrlForCollectionAndUrl = (id: string, url: string) =>
|
||||
createSelector(
|
||||
state => state.content.shuffleList,
|
||||
state => state.content.loopList,
|
||||
makeSelectIndexForUrlInCollection(url, id),
|
||||
makeSelectUrlsForCollectionId(id),
|
||||
(shuffleState, loopState, index, urls) => {
|
||||
const loopList = loopState && loopState.collectionId === id && loopState.loop;
|
||||
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
|
||||
|
||||
if (index > -1) {
|
||||
const listUrls = shuffleUrls || urls;
|
||||
let nextUrl;
|
||||
if (index === 0 && loopList) {
|
||||
nextUrl = listUrls[listUrls.length - 1];
|
||||
} else {
|
||||
nextUrl = listUrls[index - 1];
|
||||
}
|
||||
return nextUrl || null;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectNextUrlForCollectionAndUrl = (id: string, url: string) =>
|
||||
createSelector(
|
||||
state => state.content.shuffleList,
|
||||
state => state.content.loopList,
|
||||
makeSelectIndexForUrlInCollection(url, id),
|
||||
makeSelectUrlsForCollectionId(id),
|
||||
(shuffleState, loopState, index, urls) => {
|
||||
const loopList = loopState && loopState.collectionId === id && loopState.loop;
|
||||
const shuffleUrls = shuffleState && shuffleState.collectionId === id && shuffleState.newUrls;
|
||||
|
||||
if (index > -1) {
|
||||
const listUrls = shuffleUrls || urls;
|
||||
// We'll get the next playble url
|
||||
let remainingUrls = listUrls.slice(index + 1);
|
||||
if (!remainingUrls.length && loopList) {
|
||||
remainingUrls = listUrls.slice(0);
|
||||
}
|
||||
const nextUrl = remainingUrls && remainingUrls[0];
|
||||
return nextUrl || null;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectNameForCollectionId = (id: string) =>
|
||||
createSelector(
|
||||
makeSelectCollectionForId(id),
|
||||
collection => {
|
||||
return (collection && collection.name) || '';
|
||||
}
|
||||
);
|
||||
|
||||
export const makeSelectCountForCollectionId = (id: string) =>
|
||||
createSelector(
|
||||
makeSelectCollectionForId(id),
|
||||
collection => {
|
||||
if (collection) {
|
||||
if (collection.itemCount !== undefined) {
|
||||
return collection.itemCount;
|
||||
}
|
||||
let itemCount = 0;
|
||||
collection.items.map(item => {
|
||||
if (item) {
|
||||
itemCount += 1;
|
||||
}
|
||||
});
|
||||
return itemCount;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
);
|
|
@ -43,8 +43,7 @@ export const selectPublishFormValues = createSelector(
|
|||
state => state.settings,
|
||||
selectIsStillEditing,
|
||||
(publishState, settingsState, isStillEditing) => {
|
||||
const { languages, ...formValues } = publishState;
|
||||
const language = languages && languages.length && languages[0];
|
||||
const { pendingPublish, language, ...formValues } = publishState;
|
||||
const { clientSettings } = settingsState;
|
||||
const { language: languageSet } = clientSettings;
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@ import { createSelector } from 'reselect';
|
|||
import * as TRANSACTIONS from 'constants/transaction_types';
|
||||
import { PAGE_SIZE, LATEST_PAGE_SIZE } from 'constants/transaction_list';
|
||||
import { selectClaimIdsByUri } from 'redux/selectors/claims';
|
||||
import parseData from 'util/parse-data';
|
||||
export const selectState = state => state.wallet || {};
|
||||
|
||||
export const selectWalletState = selectState;
|
||||
|
@ -29,7 +28,7 @@ export const selectPendingSupportTransactions = createSelector(
|
|||
|
||||
export const selectPendingOtherTransactions = createSelector(
|
||||
selectState,
|
||||
state => state.pendingTxos
|
||||
state => state.pendingConsolidateTxos
|
||||
);
|
||||
|
||||
export const selectAbandonClaimSupportError = createSelector(
|
||||
|
@ -268,27 +267,6 @@ export const selectIsFetchingTransactions = createSelector(
|
|||
state => state.fetchingTransactions
|
||||
);
|
||||
|
||||
/**
|
||||
* CSV of 'selectTransactionItems'.
|
||||
*/
|
||||
export const selectTransactionsFile = createSelector(
|
||||
selectTransactionItems,
|
||||
transactions => {
|
||||
if (!transactions || transactions.length === 0) {
|
||||
// No data.
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const parsed = parseData(transactions, 'csv');
|
||||
if (!parsed) {
|
||||
// Invalid data, or failed to parse.
|
||||
return null;
|
||||
}
|
||||
|
||||
return parsed;
|
||||
}
|
||||
);
|
||||
|
||||
export const selectIsSendingSupport = createSelector(
|
||||
selectState,
|
||||
state => state.sendingSupport
|
||||
|
@ -418,21 +396,6 @@ export const selectIsConsolidatingUtxos = createSelector(
|
|||
state => state.consolidatingUtxos
|
||||
);
|
||||
|
||||
export const selectIsMassClaimingTips = createSelector(
|
||||
selectState,
|
||||
state => state.massClaimingTips
|
||||
);
|
||||
|
||||
export const selectPendingConsolidateTxid = createSelector(
|
||||
selectState,
|
||||
state => state.pendingConsolidateTxid
|
||||
);
|
||||
|
||||
export const selectPendingMassClaimTxid = createSelector(
|
||||
selectState,
|
||||
state => state.pendingMassClaimTxid
|
||||
);
|
||||
|
||||
export const selectUtxoCounts = createSelector(
|
||||
selectState,
|
||||
state => state.utxoCounts
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
// JSON parser
|
||||
const parseJson = (data, filters = []) => {
|
||||
const list = data.map(item => {
|
||||
const temp = {};
|
||||
// Apply filters
|
||||
Object.entries(item).forEach(([key, value]) => {
|
||||
if (!filters.includes(key)) temp[key] = value;
|
||||
});
|
||||
return temp;
|
||||
});
|
||||
// Beautify JSON
|
||||
return JSON.stringify(list, null, '\t');
|
||||
};
|
||||
|
||||
// CSV Parser
|
||||
// No need for an external module:
|
||||
// https://gist.github.com/btzr-io/55c3450ea3d709fc57540e762899fb85
|
||||
const parseCsv = (data, filters = []) => {
|
||||
// Get items for header
|
||||
const getHeaders = item => {
|
||||
const list = [];
|
||||
// Apply filters
|
||||
Object.entries(item).forEach(([key]) => {
|
||||
if (!filters.includes(key)) list.push(key);
|
||||
});
|
||||
// return headers
|
||||
return list.join(',');
|
||||
};
|
||||
|
||||
// Get rows content
|
||||
const getData = list =>
|
||||
list
|
||||
.map(item => {
|
||||
const row = [];
|
||||
// Apply filters
|
||||
Object.entries(item).forEach(([key, value]) => {
|
||||
if (!filters.includes(key)) row.push(value);
|
||||
});
|
||||
// return rows
|
||||
return row.join(',');
|
||||
})
|
||||
.join('\n');
|
||||
|
||||
// Return CSV string
|
||||
return `${getHeaders(data[0])} \n ${getData(data)}`;
|
||||
};
|
||||
|
||||
const parseData = (data, format, filters = []) => {
|
||||
// Check for validation
|
||||
const valid = data && data[0] && format;
|
||||
// Pick a format
|
||||
const formats = {
|
||||
csv: list => parseCsv(list, filters),
|
||||
json: list => parseJson(list, filters),
|
||||
};
|
||||
|
||||
// Return parsed data: JSON || CSV
|
||||
return valid && formats[format] ? formats[format](data) : undefined;
|
||||
};
|
||||
|
||||
export default parseData;
|
|
@ -1411,11 +1411,6 @@
|
|||
dependencies:
|
||||
"@types/yargs-parser" "*"
|
||||
|
||||
"@ungap/from-entries@^0.2.1":
|
||||
version "0.2.1"
|
||||
resolved "https://registry.yarnpkg.com/@ungap/from-entries/-/from-entries-0.2.1.tgz#7e86196b8b2e99d73106a8f25c2a068326346354"
|
||||
integrity sha512-CAqefTFAfnUPwYqsWHXpOxHaq1Zo5UQ3m9Zm2p09LggGe57rqHoBn3c++xcoomzXKynAUuiBMDUCQvKMnXjUpA==
|
||||
|
||||
abab@^1.0.4:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/abab/-/abab-1.0.4.tgz#5faad9c2c07f60dd76770f71cf025b62a63cfd4e"
|
||||
|
|
Loading…
Reference in a new issue