From 50ae6e2869682acd136faf0da8a0b05edb9f3fd7 Mon Sep 17 00:00:00 2001 From: Franco Montenegro Date: Fri, 15 Apr 2022 00:05:59 -0300 Subject: [PATCH] Add ability to search through publishes. (#7535) * Add ability to search through publishes. * Small fix in allClaimListMine type. * Small fix for search claims in uploads page. * Add search term in uri when filtering uploads. * ui/ux touchup * no appstrings for you * resolve conflicts Co-authored-by: jessopb <36554050+jessopb@users.noreply.github.com> --- static/app-strings.json | 1 + ui/constants/action_types.js | 2 + ui/page/fileListPublished/index.js | 17 ++-- ui/page/fileListPublished/view.jsx | 130 +++++++++++++++++++++++------ ui/redux/actions/claims.js | 23 +++++ ui/redux/reducers/claims.js | 15 ++++ ui/redux/selectors/claims.js | 4 + ui/scss/component/_wunderbar.scss | 1 + ui/util/debounce.js | 6 +- 9 files changed, 162 insertions(+), 37 deletions(-) diff --git a/static/app-strings.json b/static/app-strings.json index 85bd64c28..649979674 100644 --- a/static/app-strings.json +++ b/static/app-strings.json @@ -2306,6 +2306,7 @@ "Privacy": "Privacy", "LBRY takes privacy and choice seriously. Is it ok if we monitor performance and help creators track their views?": "LBRY takes privacy and choice seriously. Is it ok if we monitor performance and help creators track their views?", "Yes, share with LBRY": "Yes, share with LBRY", + "Search Uploads": "Search Uploads", "This refundable boost will improve the discoverability of this %claimTypeText% while active. ": "This refundable boost will improve the discoverability of this %claimTypeText% while active. ", "--end--": "--end--" } diff --git a/ui/constants/action_types.js b/ui/constants/action_types.js index 8cc077d48..e12958db6 100644 --- a/ui/constants/action_types.js +++ b/ui/constants/action_types.js @@ -128,6 +128,8 @@ export const FETCH_CHANNEL_CLAIMS_COMPLETED = 'FETCH_CHANNEL_CLAIMS_COMPLETED'; export const FETCH_CHANNEL_CLAIM_COUNT_STARTED = 'FETCH_CHANNEL_CLAIM_COUNT_STARTED'; export const FETCH_CLAIM_LIST_MINE_STARTED = 'FETCH_CLAIM_LIST_MINE_STARTED'; export const FETCH_CLAIM_LIST_MINE_COMPLETED = 'FETCH_CLAIM_LIST_MINE_COMPLETED'; +export const FETCH_ALL_CLAIM_LIST_MINE_STARTED = 'FETCH_ALL_CLAIM_LIST_MINE_STARTED'; +export const FETCH_ALL_CLAIM_LIST_MINE_COMPLETED = 'FETCH_ALL_CLAIM_LIST_MINE_COMPLETED'; export const ABANDON_CLAIM_STARTED = 'ABANDON_CLAIM_STARTED'; export const ABANDON_CLAIM_SUCCEEDED = 'ABANDON_CLAIM_SUCCEEDED'; export const FETCH_CHANNEL_LIST_STARTED = 'FETCH_CHANNEL_LIST_STARTED'; diff --git a/ui/page/fileListPublished/index.js b/ui/page/fileListPublished/index.js index 75e6b4806..aa66f0274 100644 --- a/ui/page/fileListPublished/index.js +++ b/ui/page/fileListPublished/index.js @@ -1,11 +1,10 @@ import { connect } from 'react-redux'; import { - selectIsFetchingClaimListMine, - selectMyClaimsPage, - selectMyClaimsPageItemCount, + selectIsFetchingAllMyClaims, selectFetchingMyClaimsPageError, + selectAllMyClaims, } from 'redux/selectors/claims'; -import { doFetchClaimListMine, doCheckPendingClaims } from 'redux/actions/claims'; +import { doCheckPendingClaims, doFetchAllClaimListMine } from 'redux/actions/claims'; import { doClearPublish } from 'redux/actions/publish'; import FileListPublished from './view'; import { withRouter } from 'react-router'; @@ -16,22 +15,22 @@ const select = (state, props) => { const urlParams = new URLSearchParams(search); const page = Number(urlParams.get(PAGE_PARAM)) || '1'; const pageSize = urlParams.get(PAGE_SIZE_PARAM) || String(MY_CLAIMS_PAGE_SIZE); + const initialSearchTerm = urlParams.get('searchText') || ''; return { page, pageSize, - fetching: selectIsFetchingClaimListMine(state), - urls: selectMyClaimsPage(state), - urlTotal: selectMyClaimsPageItemCount(state), + fetching: selectIsFetchingAllMyClaims(state), error: selectFetchingMyClaimsPageError(state), + myClaims: selectAllMyClaims(state), + initialSearchTerm, }; }; const perform = (dispatch) => ({ checkPendingPublishes: () => dispatch(doCheckPendingClaims()), - fetchClaimListMine: (page, pageSize, resolve, filterBy) => - dispatch(doFetchClaimListMine(page, pageSize, resolve, filterBy)), clearPublish: () => dispatch(doClearPublish()), + fetchAllMyClaims: () => dispatch(doFetchAllClaimListMine()), }); export default withRouter(connect(select, perform)(FileListPublished)); diff --git a/ui/page/fileListPublished/view.jsx b/ui/page/fileListPublished/view.jsx index d87be48e0..02aa73dbc 100644 --- a/ui/page/fileListPublished/view.jsx +++ b/ui/page/fileListPublished/view.jsx @@ -1,7 +1,7 @@ // @flow import * as PAGES from 'constants/pages'; import * as ICONS from 'constants/icons'; -import React, { useEffect } from 'react'; +import React, { useEffect, useMemo } from 'react'; import Button from 'component/button'; import ClaimList from 'component/claimList'; import ClaimPreview from 'component/claimPreview'; @@ -10,28 +10,47 @@ import Paginate from 'component/common/paginate'; import { PAGE_PARAM, PAGE_SIZE_PARAM } from 'constants/claim'; import Spinner from 'component/spinner'; import Yrbl from 'component/yrbl'; +import { FormField, Form } from 'component/common/form'; +import Icon from 'component/common/icon'; +import debounce from 'util/debounce'; import classnames from 'classnames'; const FILTER_ALL = 'stream,repost'; const FILTER_UPLOADS = 'stream'; const FILTER_REPOSTS = 'repost'; +const PAGINATE_PARAM = 'page'; type Props = { checkPendingPublishes: () => void, clearPublish: () => void, - fetchClaimListMine: (number, number, boolean, Array) => void, fetching: boolean, - urls: Array, - urlTotal: number, history: { replace: (string) => void, push: (string) => void }, page: number, pageSize: number, + myClaims: any, + fetchAllMyClaims: () => void, + location: { search: string }, + initialSearchTerm: string, }; function FileListPublished(props: Props) { - const { checkPendingPublishes, clearPublish, fetchClaimListMine, fetching, urls, urlTotal, page, pageSize } = props; + const { + checkPendingPublishes, + clearPublish, + fetching, + page, + pageSize, + myClaims, + fetchAllMyClaims, + location, + history, + initialSearchTerm, + } = props; const [filterBy, setFilterBy] = React.useState(FILTER_ALL); + const [searchText, setSearchText] = React.useState(initialSearchTerm); + const [filteredClaims, setFilteredClaims] = React.useState([]); + const { search } = location; const params = {}; params[PAGE_PARAM] = Number(page); @@ -39,16 +58,71 @@ function FileListPublished(props: Props) { const paramsString = JSON.stringify(params); + const doFilterClaims = () => { + if (fetching) { + return; + } + const filtered = myClaims.filter((claim) => { + const value = claim.value || {}; + const src = value.source || {}; + const title = (value.title || '').toLowerCase(); + const description = (value.description || '').toLowerCase(); + const tags = (value.tags || []).join('').toLowerCase(); + const srcName = (src.name || '').toLowerCase(); + const lowerCaseSearchText = searchText.toLowerCase(); + const textMatches = + !searchText || + title.indexOf(lowerCaseSearchText) !== -1 || + description.indexOf(lowerCaseSearchText) !== -1 || + tags.indexOf(lowerCaseSearchText) !== -1 || + srcName.indexOf(lowerCaseSearchText) !== -1; + return textMatches && filterBy.includes(claim.value_type); + }); + setFilteredClaims(filtered); + }; + + const debounceFilter = debounce(doFilterClaims, 200); + useEffect(() => { checkPendingPublishes(); }, [checkPendingPublishes]); useEffect(() => { - if (paramsString && fetchClaimListMine) { - const params = JSON.parse(paramsString); - fetchClaimListMine(params.page, params.page_size, true, filterBy.split(',')); - } - }, [paramsString, filterBy, fetchClaimListMine]); + const params = new URLSearchParams(search); + params.set('searchText', searchText); + history.replace('?' + params.toString()); + debounceFilter(); + }, [myClaims, searchText]); + + useEffect(() => { + doFilterClaims(); + }, [myClaims, filterBy]); + + const urlTotal = filteredClaims.length; + + const urls = useMemo(() => { + const params = JSON.parse(paramsString); + const zeroIndexPage = Math.max(0, params.page - 1); + const paginated = filteredClaims.slice( + zeroIndexPage * params.page_size, + zeroIndexPage * params.page_size + params.page_size + ); + return paginated.map((claim) => claim.permanent_url); + }, [filteredClaims, paramsString]); + + // Go back to the first page when the filtered claims change. + // This way, we avoid hiding results just because the + // user may be on a different page (page that was calculated + // using a different state, ie, different filtered claims) + useEffect(() => { + const params = new URLSearchParams(search); + params.set(PAGINATE_PARAM, '1'); + history.replace('?' + params.toString()); + }, [filteredClaims]); + + useEffect(() => { + fetchAllMyClaims(); + }, [fetchAllMyClaims]); return ( @@ -79,7 +153,10 @@ function FileListPublished(props: Props) {