fix(search) Update getAutoComplete #42
3 changed files with 52 additions and 24 deletions
|
@ -2,7 +2,7 @@
|
||||||
"name": "lighthouse",
|
"name": "lighthouse",
|
||||||
"description": "Lighthouse is a lightning-fast advanced search engine API for publications on the lbrycrd with autocomplete capabilities.",
|
"description": "Lighthouse is a lightning-fast advanced search engine API for publications on the lbrycrd with autocomplete capabilities.",
|
||||||
"version": "0.0.1",
|
"version": "0.0.1",
|
||||||
"author": "filipnyquist <filip@lbry.io>,
|
"author": "filipnyquist <filip@lbry.io>",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"lbry",
|
"lbry",
|
||||||
"search",
|
"search",
|
||||||
|
|
|
@ -18,6 +18,7 @@ const eclient = new elasticsearch.Client({
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
function getResults (input) {
|
function getResults (input) {
|
||||||
if (input.size === undefined) input.size = 10;
|
if (input.size === undefined) input.size = 10;
|
||||||
if (input.from === undefined) input.from = 0;
|
if (input.from === undefined) input.from = 0;
|
||||||
|
@ -46,30 +47,57 @@ function getResults (input) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function getAutoComplete (input) {
|
function getIndex() {
|
||||||
if (input.size === undefined) input.size = 10;
|
// ideally, data is inserted into elastic search with an index that helps us query it faster/better results
|
||||||
if (input.from === undefined) input.from = 0;
|
// A simple start is to default queries to be within the n months, and to make a new index each month.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getRoutingKey() {
|
||||||
|
// This is the most important field for performance. Being able to route the queries ahead of time can make typedowns insanely good.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getAutoCompleteQuery(query) {
|
||||||
|
return {
|
||||||
|
multi_match: {
|
||||||
|
query: query.s.trim(),
|
||||||
|
type: 'phrase_prefix',
|
||||||
|
slop: 5,
|
||||||
|
max_expansions: 50,
|
||||||
|
fields: [
|
||||||
|
'name',
|
||||||
|
'value.stream.metadata.author',
|
||||||
|
'value.stream.metadata.title',
|
||||||
|
'value.stream.metadata.description'
|
||||||
|
]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function getFilter(query) {
|
||||||
|
// this is the best place for putting things like filtering on the type of content
|
||||||
|
// Perhaps we can add search param that will filter on how people have categorized / tagged their content
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getAutoComplete(query) {
|
||||||
return eclient.search({
|
return eclient.search({
|
||||||
index : 'claims',
|
index: getIndex(query) || 'claims',
|
||||||
_source: ['name', 'value.stream.metadata.title', 'value.stream.metadata.author'],
|
routing: getRoutingKey(query),
|
||||||
body : {
|
ignore_unavailable: true, // ignore error when date index does not exist
|
||||||
'query': {
|
body: {
|
||||||
'bool': {
|
size: query.size || 5,
|
||||||
'must': {
|
from: query.from || 0,
|
||||||
'query_string': {
|
query: {
|
||||||
'query' : '*' + input.s.trim() + '*',
|
bool: {
|
||||||
'fields': [
|
must: getAutoCompleteQuery(query),
|
||||||
'name',
|
filter: getFilter(query)
|
||||||
'value.stream.metadata.title',
|
}
|
||||||
'value.stream.metadata.author',
|
}
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
size: input.size,
|
|
||||||
from: input.from,
|
|
||||||
},
|
},
|
||||||
|
size: query.size,
|
||||||
|
from: query.from,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -124,7 +124,7 @@ export async function sync () {
|
||||||
// Done adding, update our claimTrie cache to latest and wait a bit...
|
// Done adding, update our claimTrie cache to latest and wait a bit...
|
||||||
await saveJSON(path.join(appRoot.path, 'claimTrieCache.json'), latestClaimTrie);
|
await saveJSON(path.join(appRoot.path, 'claimTrieCache.json'), latestClaimTrie);
|
||||||
status.info = 'upToDate';
|
status.info = 'upToDate';
|
||||||
await sleep(300000);
|
await sleep(600000);
|
||||||
sync();
|
sync();
|
||||||
|
|||||||
} catch (err) {
|
} catch (err) {
|
||||||
winston.log(err);
|
winston.log(err);
|
||||||
|
|
Loading…
Add table
Reference in a new issue
was poking at trying to get rid of the timeouts for importer. disregard this.