2021-04-19 21:25:34 +02:00
|
|
|
package server
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"encoding/hex"
|
2021-06-01 04:19:10 +02:00
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
2021-05-16 05:13:14 +02:00
|
|
|
"github.com/btcsuite/btcutil/base58"
|
2021-05-31 20:53:08 +02:00
|
|
|
"github.com/golang/protobuf/ptypes/wrappers"
|
|
|
|
pb "github.com/lbryio/hub/protobuf/go"
|
2021-06-04 07:56:50 +02:00
|
|
|
"github.com/lbryio/hub/schema"
|
|
|
|
"github.com/lbryio/hub/util"
|
2021-04-19 21:25:34 +02:00
|
|
|
"github.com/olivere/elastic/v7"
|
2021-05-31 03:34:57 +02:00
|
|
|
"golang.org/x/text/cases"
|
2021-06-01 04:19:10 +02:00
|
|
|
"golang.org/x/text/language"
|
2021-05-31 20:53:08 +02:00
|
|
|
"log"
|
|
|
|
"reflect"
|
2021-06-01 04:19:10 +02:00
|
|
|
"strings"
|
2021-04-19 21:25:34 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
type record struct {
|
2021-05-31 03:34:57 +02:00
|
|
|
Txid string `json:"tx_id"`
|
|
|
|
Nout uint32 `json:"tx_nout"`
|
|
|
|
Height uint32 `json:"height"`
|
2021-06-04 07:56:50 +02:00
|
|
|
ClaimId string `json:"claim_id"`
|
2021-04-19 21:25:34 +02:00
|
|
|
}
|
|
|
|
|
2021-05-18 12:02:55 +02:00
|
|
|
type orderField struct {
|
|
|
|
Field string
|
|
|
|
is_asc bool
|
|
|
|
}
|
2021-06-04 07:56:50 +02:00
|
|
|
const (
|
|
|
|
errorResolution = iota
|
|
|
|
channelResolution = iota
|
|
|
|
streamResolution = iota
|
|
|
|
)
|
|
|
|
type urlResolution struct {
|
|
|
|
resolutionType int
|
|
|
|
value string
|
2021-05-13 22:06:19 +02:00
|
|
|
}
|
|
|
|
|
2021-05-16 05:13:14 +02:00
|
|
|
func StrArrToInterface(arr []string) []interface{} {
|
|
|
|
searchVals := make([]interface{}, len(arr))
|
|
|
|
for i := 0; i < len(arr); i++ {
|
|
|
|
searchVals[i] = arr[i]
|
|
|
|
}
|
|
|
|
return searchVals
|
|
|
|
}
|
|
|
|
|
2021-05-18 12:02:55 +02:00
|
|
|
func AddTermsField(arr []string, name string, q *elastic.BoolQuery) *elastic.BoolQuery {
|
2021-05-16 05:13:14 +02:00
|
|
|
if len(arr) > 0 {
|
|
|
|
searchVals := StrArrToInterface(arr)
|
|
|
|
return q.Must(elastic.NewTermsQuery(name, searchVals...))
|
|
|
|
}
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
2021-06-01 04:19:10 +02:00
|
|
|
func AddIndividualTermFields(arr []string, name string, q *elastic.BoolQuery, invert bool) *elastic.BoolQuery {
|
|
|
|
if len(arr) > 0 {
|
|
|
|
for _, x := range arr {
|
|
|
|
if invert {
|
|
|
|
q = q.MustNot(elastic.NewTermQuery(name, x))
|
|
|
|
} else {
|
|
|
|
q = q.Must(elastic.NewTermQuery(name, x))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
2021-05-18 12:02:55 +02:00
|
|
|
func AddRangeField(rq *pb.RangeField, name string, q *elastic.BoolQuery) *elastic.BoolQuery {
|
2021-05-16 05:13:14 +02:00
|
|
|
if rq == nil {
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(rq.Value) > 1 {
|
2021-05-18 12:02:55 +02:00
|
|
|
if rq.Op != pb.RangeField_EQ {
|
2021-05-16 05:13:14 +02:00
|
|
|
return q
|
|
|
|
}
|
2021-05-18 12:02:55 +02:00
|
|
|
return AddTermsField(rq.Value, name, q)
|
2021-05-16 05:13:14 +02:00
|
|
|
}
|
2021-05-18 12:02:55 +02:00
|
|
|
if rq.Op == pb.RangeField_EQ {
|
2021-06-03 06:31:58 +02:00
|
|
|
return q.Must(elastic.NewTermQuery(name, rq.Value[0]))
|
2021-05-18 12:02:55 +02:00
|
|
|
} else if rq.Op == pb.RangeField_LT {
|
2021-06-01 04:19:10 +02:00
|
|
|
return q.Must(elastic.NewRangeQuery(name).Lt(rq.Value[0]))
|
2021-05-18 12:02:55 +02:00
|
|
|
} else if rq.Op == pb.RangeField_LTE {
|
2021-06-01 04:19:10 +02:00
|
|
|
return q.Must(elastic.NewRangeQuery(name).Lte(rq.Value[0]))
|
2021-05-18 12:02:55 +02:00
|
|
|
} else if rq.Op == pb.RangeField_GT {
|
2021-06-01 04:19:10 +02:00
|
|
|
return q.Must(elastic.NewRangeQuery(name).Gt(rq.Value[0]))
|
2021-05-18 12:02:55 +02:00
|
|
|
} else { // pb.RangeField_GTE
|
2021-06-01 04:19:10 +02:00
|
|
|
return q.Must(elastic.NewRangeQuery(name).Gte(rq.Value[0]))
|
2021-05-16 05:13:14 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-18 12:02:55 +02:00
|
|
|
func AddInvertibleField(field *pb.InvertibleField, name string, q *elastic.BoolQuery) *elastic.BoolQuery {
|
|
|
|
if field == nil {
|
|
|
|
return q
|
|
|
|
}
|
|
|
|
searchVals := StrArrToInterface(field.Value)
|
|
|
|
if field.Invert {
|
2021-06-01 04:19:10 +02:00
|
|
|
q = q.MustNot(elastic.NewTermsQuery(name, searchVals...))
|
|
|
|
if name == "channel_id.keyword" {
|
|
|
|
q = q.MustNot(elastic.NewTermsQuery("_id", searchVals...))
|
|
|
|
}
|
|
|
|
return q
|
2021-05-18 12:02:55 +02:00
|
|
|
} else {
|
|
|
|
return q.Must(elastic.NewTermsQuery(name, searchVals...))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-01 04:19:10 +02:00
|
|
|
func (s *Server) normalizeTag(tag string) string {
|
|
|
|
c := cases.Lower(language.English)
|
|
|
|
res := s.MultiSpaceRe.ReplaceAll(
|
|
|
|
s.WeirdCharsRe.ReplaceAll(
|
|
|
|
[]byte(strings.TrimSpace(strings.Replace(c.String(tag), "'", "", -1))),
|
|
|
|
[]byte(" ")),
|
|
|
|
[]byte(" "))
|
|
|
|
|
|
|
|
return string(res)
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *Server) cleanTags(tags []string) []string {
|
|
|
|
cleanedTags := make([]string, len(tags))
|
|
|
|
for i, tag := range tags {
|
|
|
|
cleanedTags[i] = s.normalizeTag(tag)
|
|
|
|
}
|
|
|
|
return cleanedTags
|
|
|
|
}
|
|
|
|
|
2021-06-04 07:56:50 +02:00
|
|
|
func (s *Server) fullIdFromShortId(ctx context.Context, channelName string, claimId string) (string, error) {
|
|
|
|
return "", nil
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (s *Server) resolveStream(ctx context.Context, url *schema.URL, channelId string) (string, error) {
|
|
|
|
return "", nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Server) resolveChannelId(ctx context.Context, url *schema.URL) (string, error) {
|
|
|
|
if !url.HasChannel() {
|
|
|
|
return "", nil
|
|
|
|
}
|
|
|
|
if url.Channel.IsFullID() {
|
|
|
|
return url.Channel.ClaimId, nil
|
|
|
|
}
|
|
|
|
if url.Channel.IsShortID() {
|
|
|
|
channelId, err := s.fullIdFromShortId(ctx, url.Channel.Name, url.Channel.ClaimId)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
return channelId, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
in := &pb.SearchRequest{}
|
|
|
|
in.Normalized = []string{util.Normalize(url.Channel.Name)}
|
|
|
|
if url.Channel.ClaimId == "" && url.Channel.AmountOrder < 0 {
|
|
|
|
in.IsControlling = &wrappers.BoolValue{Value: true}
|
|
|
|
} else {
|
|
|
|
if url.Channel.AmountOrder > 0 {
|
|
|
|
in.AmountOrder = &wrappers.Int32Value{Value: int32(url.Channel.AmountOrder)}
|
|
|
|
}
|
|
|
|
if url.Channel.ClaimId != "" {
|
|
|
|
in.ClaimId = &pb.InvertibleField{
|
|
|
|
Invert: false,
|
|
|
|
Value: []string{url.Channel.ClaimId},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var size = 1
|
|
|
|
var from = 0
|
|
|
|
q := elastic.NewBoolQuery()
|
|
|
|
q = AddTermsField(in.Normalized, "normalized", q)
|
|
|
|
|
|
|
|
if in.IsControlling != nil {
|
|
|
|
q = q.Must(elastic.NewTermQuery("is_controlling", in.IsControlling.Value))
|
|
|
|
}
|
|
|
|
|
|
|
|
if in.AmountOrder != nil {
|
|
|
|
in.Limit.Value = 1
|
|
|
|
in.OrderBy = []string{"effective_amount"}
|
|
|
|
in.Offset = &wrappers.Int32Value{Value: in.AmountOrder.Value - 1}
|
|
|
|
}
|
|
|
|
|
|
|
|
if in.Limit != nil {
|
|
|
|
size = int(in.Limit.Value)
|
|
|
|
}
|
|
|
|
|
|
|
|
if in.Offset != nil {
|
|
|
|
from = int(in.Offset.Value)
|
|
|
|
}
|
|
|
|
|
|
|
|
if in.ClaimId != nil {
|
|
|
|
searchVals := StrArrToInterface(in.ClaimId.Value)
|
|
|
|
if len(in.ClaimId.Value) == 1 && len(in.ClaimId.Value[0]) < 20 {
|
|
|
|
if in.ClaimId.Invert {
|
|
|
|
q = q.MustNot(elastic.NewPrefixQuery("claim_id.keyword", in.ClaimId.Value[0]))
|
|
|
|
} else {
|
|
|
|
q = q.Must(elastic.NewPrefixQuery("claim_id.keyword", in.ClaimId.Value[0]))
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if in.ClaimId.Invert {
|
|
|
|
q = q.MustNot(elastic.NewTermsQuery("claim_id.keyword", searchVals...))
|
|
|
|
} else {
|
|
|
|
q = q.Must(elastic.NewTermsQuery("claim_id.keyword", searchVals...))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
searchResult, err := s.EsClient.Search().
|
|
|
|
Query(q). // specify the query
|
|
|
|
From(from).Size(size).
|
|
|
|
Do(ctx)
|
|
|
|
|
2021-04-19 21:25:34 +02:00
|
|
|
if err != nil {
|
2021-06-04 07:56:50 +02:00
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
var r record
|
|
|
|
var channelId string
|
|
|
|
for _, item := range searchResult.Each(reflect.TypeOf(r)) {
|
|
|
|
if t, ok := item.(record); ok {
|
|
|
|
channelId = t.ClaimId
|
|
|
|
}
|
|
|
|
}
|
|
|
|
//matches, err := s.Search(ctx, in)
|
|
|
|
//if err != nil {
|
|
|
|
// return "", err
|
|
|
|
//}
|
|
|
|
|
|
|
|
|
|
|
|
return channelId, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Server) resolveUrl(ctx context.Context, rawUrl string) *urlResolution {
|
|
|
|
url := schema.ParseURL(rawUrl)
|
|
|
|
if url == nil {
|
|
|
|
return nil
|
2021-04-19 21:25:34 +02:00
|
|
|
}
|
|
|
|
|
2021-06-04 07:56:50 +02:00
|
|
|
channelId, err := s.resolveChannelId(ctx, url)
|
|
|
|
if err != nil {
|
|
|
|
return &urlResolution{
|
|
|
|
resolutionType: errorResolution,
|
|
|
|
value: fmt.Sprintf("Could not find channel in \"%s\".", url),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
stream, _ := s.resolveStream(ctx, url, channelId)
|
|
|
|
|
|
|
|
if url.HasStream() {
|
|
|
|
return &urlResolution{
|
|
|
|
resolutionType: streamResolution,
|
|
|
|
value: stream,
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return &urlResolution{
|
|
|
|
resolutionType: channelResolution,
|
|
|
|
value: channelId,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
/*
|
|
|
|
async def resolve_url(self, raw_url):
|
|
|
|
if raw_url not in self.resolution_cache:
|
|
|
|
self.resolution_cache[raw_url] = await self._resolve_url(raw_url)
|
|
|
|
return self.resolution_cache[raw_url]
|
|
|
|
|
|
|
|
async def _resolve_url(self, raw_url):
|
|
|
|
try:
|
|
|
|
url = URL.parse(raw_url)
|
|
|
|
except ValueError as e:
|
|
|
|
return e
|
|
|
|
|
|
|
|
stream = LookupError(f'Could not find claim at "{raw_url}".')
|
|
|
|
|
|
|
|
channel_id = await self.resolve_channel_id(url)
|
|
|
|
if isinstance(channel_id, LookupError):
|
|
|
|
return channel_id
|
|
|
|
stream = (await self.resolve_stream(url, channel_id if isinstance(channel_id, str) else None)) or stream
|
|
|
|
if url.has_stream:
|
|
|
|
return StreamResolution(stream)
|
|
|
|
else:
|
|
|
|
return ChannelResolution(channel_id)
|
|
|
|
|
|
|
|
async def resolve_channel_id(self, url: URL):
|
|
|
|
if not url.has_channel:
|
|
|
|
return
|
|
|
|
if url.channel.is_fullid:
|
|
|
|
return url.channel.claim_id
|
|
|
|
if url.channel.is_shortid:
|
|
|
|
channel_id = await self.full_id_from_short_id(url.channel.name, url.channel.claim_id)
|
|
|
|
if not channel_id:
|
|
|
|
return LookupError(f'Could not find channel in "{url}".')
|
|
|
|
return channel_id
|
|
|
|
|
|
|
|
query = url.channel.to_dict()
|
|
|
|
if set(query) == {'name'}:
|
|
|
|
query['is_controlling'] = True
|
|
|
|
else:
|
|
|
|
query['order_by'] = ['^creation_height']
|
|
|
|
matches, _, _ = await self.search(**query, limit=1)
|
|
|
|
if matches:
|
|
|
|
channel_id = matches[0]['claim_id']
|
|
|
|
else:
|
|
|
|
return LookupError(f'Could not find channel in "{url}".')
|
|
|
|
return channel_id
|
|
|
|
|
|
|
|
async def resolve_stream(self, url: URL, channel_id: str = None):
|
|
|
|
if not url.has_stream:
|
|
|
|
return None
|
|
|
|
if url.has_channel and channel_id is None:
|
|
|
|
return None
|
|
|
|
query = url.stream.to_dict()
|
|
|
|
if url.stream.claim_id is not None:
|
|
|
|
if url.stream.is_fullid:
|
|
|
|
claim_id = url.stream.claim_id
|
|
|
|
else:
|
|
|
|
claim_id = await self.full_id_from_short_id(query['name'], query['claim_id'], channel_id)
|
|
|
|
return claim_id
|
|
|
|
|
|
|
|
if channel_id is not None:
|
|
|
|
if set(query) == {'name'}:
|
|
|
|
# temporarily emulate is_controlling for claims in channel
|
|
|
|
query['order_by'] = ['effective_amount', '^height']
|
|
|
|
else:
|
|
|
|
query['order_by'] = ['^channel_join']
|
|
|
|
query['channel_id'] = channel_id
|
|
|
|
query['signature_valid'] = True
|
|
|
|
elif set(query) == {'name'}:
|
|
|
|
query['is_controlling'] = True
|
|
|
|
matches, _, _ = await self.search(**query, limit=1)
|
|
|
|
if matches:
|
|
|
|
return matches[0]['claim_id']
|
|
|
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
func (s *Server) Search(ctx context.Context, in *pb.SearchRequest) (*pb.Outputs, error) {
|
|
|
|
var client *elastic.Client = nil
|
|
|
|
if s.EsClient == nil {
|
2021-06-04 18:38:17 +02:00
|
|
|
esUrl := s.Args.EsHost + ":" + s.Args.EsPort
|
|
|
|
tmpClient, err := elastic.NewClient(elastic.SetURL(esUrl), elastic.SetSniff(false))
|
2021-06-04 07:56:50 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
client = tmpClient
|
|
|
|
s.EsClient = client
|
|
|
|
} else {
|
|
|
|
client = s.EsClient
|
|
|
|
}
|
|
|
|
|
2021-06-04 18:38:17 +02:00
|
|
|
//res := s.resolveUrl(ctx, "@abc#111")
|
|
|
|
//log.Println(res)
|
2021-06-04 07:56:50 +02:00
|
|
|
|
2021-05-13 22:06:19 +02:00
|
|
|
claimTypes := map[string]int {
|
|
|
|
"stream": 1,
|
|
|
|
"channel": 2,
|
|
|
|
"repost": 3,
|
|
|
|
"collection": 4,
|
|
|
|
}
|
2021-05-18 12:02:55 +02:00
|
|
|
|
|
|
|
streamTypes := map[string]int {
|
|
|
|
"video": 1,
|
|
|
|
"audio": 2,
|
|
|
|
"image": 3,
|
|
|
|
"document": 4,
|
|
|
|
"binary": 5,
|
|
|
|
"model": 6,
|
|
|
|
}
|
|
|
|
|
|
|
|
replacements := map[string]string {
|
|
|
|
"name": "normalized",
|
|
|
|
"txid": "tx_id",
|
|
|
|
"claim_hash": "_id",
|
|
|
|
}
|
|
|
|
|
|
|
|
textFields := map[string]bool {
|
|
|
|
"author": true,
|
|
|
|
"canonical_url": true,
|
|
|
|
"channel_id": true,
|
|
|
|
"claim_name": true,
|
|
|
|
"description": true,
|
|
|
|
"claim_id": true,
|
|
|
|
"media_type": true,
|
|
|
|
"normalized": true,
|
|
|
|
"public_key_bytes": true,
|
|
|
|
"public_key_hash": true,
|
|
|
|
"short_url": true,
|
|
|
|
"signature": true,
|
|
|
|
"signature_digest": true,
|
|
|
|
"stream_type": true,
|
|
|
|
"title": true,
|
|
|
|
"tx_id": true,
|
|
|
|
"fee_currency": true,
|
|
|
|
"reposted_claim_id": true,
|
|
|
|
"tags": true,
|
|
|
|
}
|
|
|
|
|
|
|
|
var from = 0
|
|
|
|
var size = 10
|
|
|
|
var orderBy []orderField
|
|
|
|
|
2021-04-19 21:25:34 +02:00
|
|
|
// Ping the Elasticsearch server to get e.g. the version number
|
2021-04-21 22:19:05 +02:00
|
|
|
//_, code, err := client.Ping("http://127.0.0.1:9200").Do(ctx)
|
|
|
|
//if err != nil {
|
|
|
|
// return nil, err
|
|
|
|
//}
|
|
|
|
//if code != 200 {
|
|
|
|
// return nil, errors.New("ping failed")
|
|
|
|
//}
|
2021-04-19 21:25:34 +02:00
|
|
|
|
2021-04-21 22:19:05 +02:00
|
|
|
// TODO: support all of this https://github.com/lbryio/lbry-sdk/blob/master/lbry/wallet/server/db/elasticsearch/search.py#L385
|
2021-05-13 22:06:19 +02:00
|
|
|
|
|
|
|
q := elastic.NewBoolQuery()
|
|
|
|
|
2021-05-31 20:53:08 +02:00
|
|
|
if in.IsControlling != nil {
|
|
|
|
q = q.Must(elastic.NewTermQuery("is_controlling", in.IsControlling.Value))
|
|
|
|
}
|
|
|
|
|
|
|
|
if in.AmountOrder != nil {
|
|
|
|
in.Limit.Value = 1
|
2021-05-18 12:02:55 +02:00
|
|
|
in.OrderBy = []string{"effective_amount"}
|
2021-05-31 20:53:08 +02:00
|
|
|
in.Offset = &wrappers.Int32Value{Value: in.AmountOrder.Value - 1}
|
2021-05-13 22:06:19 +02:00
|
|
|
}
|
|
|
|
|
2021-05-31 20:53:08 +02:00
|
|
|
if in.Limit != nil {
|
|
|
|
size = int(in.Limit.Value)
|
2021-05-18 12:02:55 +02:00
|
|
|
}
|
|
|
|
|
2021-05-31 20:53:08 +02:00
|
|
|
if in.Offset != nil {
|
|
|
|
from = int(in.Offset.Value)
|
2021-05-18 12:02:55 +02:00
|
|
|
}
|
|
|
|
|
2021-05-25 05:28:43 +02:00
|
|
|
if len(in.Name) > 0 {
|
2021-05-31 03:34:57 +02:00
|
|
|
normalized := make([]string, len(in.Name))
|
|
|
|
for i := 0; i < len(in.Name); i++ {
|
2021-06-04 07:56:50 +02:00
|
|
|
normalized[i] = util.Normalize(in.Name[i])
|
2021-05-31 03:34:57 +02:00
|
|
|
}
|
|
|
|
in.Normalized = normalized
|
2021-05-25 05:28:43 +02:00
|
|
|
}
|
|
|
|
|
2021-05-18 12:02:55 +02:00
|
|
|
if len(in.OrderBy) > 0 {
|
|
|
|
for _, x := range in.OrderBy {
|
|
|
|
var toAppend string
|
|
|
|
var is_asc = false
|
|
|
|
if x[0] == '^' {
|
|
|
|
is_asc = true
|
|
|
|
x = x[1:]
|
|
|
|
}
|
|
|
|
if _, ok := replacements[x]; ok {
|
|
|
|
toAppend = replacements[x]
|
2021-05-31 20:53:08 +02:00
|
|
|
} else {
|
|
|
|
toAppend = x
|
2021-05-18 12:02:55 +02:00
|
|
|
}
|
2021-05-31 20:53:08 +02:00
|
|
|
|
|
|
|
if _, ok := textFields[toAppend]; ok {
|
|
|
|
toAppend = toAppend + ".keyword"
|
2021-05-18 12:02:55 +02:00
|
|
|
}
|
|
|
|
orderBy = append(orderBy, orderField{toAppend, is_asc})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-13 22:06:19 +02:00
|
|
|
if len(in.ClaimType) > 0 {
|
|
|
|
searchVals := make([]interface{}, len(in.ClaimType))
|
|
|
|
for i := 0; i < len(in.ClaimType); i++ {
|
|
|
|
searchVals[i] = claimTypes[in.ClaimType[i]]
|
|
|
|
}
|
|
|
|
q = q.Must(elastic.NewTermsQuery("claim_type", searchVals...))
|
|
|
|
}
|
|
|
|
|
2021-05-18 12:02:55 +02:00
|
|
|
// FIXME is this a text field or not?
|
|
|
|
if len(in.StreamType) > 0 {
|
|
|
|
searchVals := make([]interface{}, len(in.StreamType))
|
|
|
|
for i := 0; i < len(in.StreamType); i++ {
|
|
|
|
searchVals[i] = streamTypes[in.StreamType[i]]
|
|
|
|
}
|
2021-06-01 04:19:10 +02:00
|
|
|
q = q.Must(elastic.NewTermsQuery("stream_type", searchVals...))
|
2021-05-18 12:02:55 +02:00
|
|
|
}
|
|
|
|
|
2021-05-13 22:06:19 +02:00
|
|
|
if len(in.XId) > 0 {
|
|
|
|
searchVals := make([]interface{}, len(in.XId))
|
|
|
|
for i := 0; i < len(in.XId); i++ {
|
2021-06-04 07:56:50 +02:00
|
|
|
util.ReverseBytes(in.XId[i])
|
2021-05-13 22:06:19 +02:00
|
|
|
searchVals[i] = hex.Dump(in.XId[i])
|
|
|
|
}
|
2021-05-16 05:13:14 +02:00
|
|
|
if len(in.XId) == 1 && len(in.XId[0]) < 20 {
|
|
|
|
q = q.Must(elastic.NewPrefixQuery("_id", string(in.XId[0])))
|
|
|
|
} else {
|
|
|
|
q = q.Must(elastic.NewTermsQuery("_id", searchVals...))
|
|
|
|
}
|
2021-05-13 22:06:19 +02:00
|
|
|
}
|
|
|
|
|
2021-05-16 05:13:14 +02:00
|
|
|
|
2021-05-18 12:02:55 +02:00
|
|
|
if in.ClaimId != nil {
|
|
|
|
searchVals := StrArrToInterface(in.ClaimId.Value)
|
|
|
|
if len(in.ClaimId.Value) == 1 && len(in.ClaimId.Value[0]) < 20 {
|
|
|
|
if in.ClaimId.Invert {
|
|
|
|
q = q.MustNot(elastic.NewPrefixQuery("claim_id.keyword", in.ClaimId.Value[0]))
|
|
|
|
} else {
|
|
|
|
q = q.Must(elastic.NewPrefixQuery("claim_id.keyword", in.ClaimId.Value[0]))
|
|
|
|
}
|
2021-05-16 05:13:14 +02:00
|
|
|
} else {
|
2021-05-18 12:02:55 +02:00
|
|
|
if in.ClaimId.Invert {
|
|
|
|
q = q.MustNot(elastic.NewTermsQuery("claim_id.keyword", searchVals...))
|
|
|
|
} else {
|
|
|
|
q = q.Must(elastic.NewTermsQuery("claim_id.keyword", searchVals...))
|
|
|
|
}
|
2021-05-16 05:13:14 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if in.PublicKeyId != "" {
|
2021-05-31 20:53:08 +02:00
|
|
|
value := hex.EncodeToString(base58.Decode(in.PublicKeyId)[1:21])
|
2021-05-16 05:13:14 +02:00
|
|
|
q = q.Must(elastic.NewTermQuery("public_key_hash.keyword", value))
|
|
|
|
}
|
|
|
|
|
2021-05-31 20:53:08 +02:00
|
|
|
if in.HasChannelSignature != nil && in.HasChannelSignature.Value {
|
|
|
|
q = q.Must(elastic.NewExistsQuery("signature_digest"))
|
|
|
|
if in.SignatureValid != nil {
|
|
|
|
q = q.Must(elastic.NewTermQuery("signature_valid", in.SignatureValid.Value))
|
2021-05-18 12:02:55 +02:00
|
|
|
}
|
2021-05-31 20:53:08 +02:00
|
|
|
} else if in.SignatureValid != nil {
|
2021-05-18 12:02:55 +02:00
|
|
|
q = q.MinimumNumberShouldMatch(1)
|
|
|
|
q = q.Should(elastic.NewBoolQuery().MustNot(elastic.NewExistsQuery("signature_digest")))
|
2021-05-31 20:53:08 +02:00
|
|
|
q = q.Should(elastic.NewTermQuery("signature_valid", in.SignatureValid.Value))
|
2021-05-18 12:02:55 +02:00
|
|
|
}
|
|
|
|
|
2021-05-31 20:53:08 +02:00
|
|
|
if in.HasSource != nil {
|
2021-05-18 12:02:55 +02:00
|
|
|
q = q.MinimumNumberShouldMatch(1)
|
2021-05-31 20:53:08 +02:00
|
|
|
isStreamOrRepost := elastic.NewTermsQuery("claim_type", claimTypes["stream"], claimTypes["repost"])
|
|
|
|
q = q.Should(elastic.NewBoolQuery().Must(isStreamOrRepost, elastic.NewMatchQuery("has_source", in.HasSource.Value)))
|
|
|
|
q = q.Should(elastic.NewBoolQuery().MustNot(isStreamOrRepost))
|
2021-05-18 12:02:55 +02:00
|
|
|
q = q.Should(elastic.NewBoolQuery().Must(elastic.NewTermQuery("reposted_claim_type", claimTypes["channel"])))
|
|
|
|
}
|
|
|
|
|
|
|
|
var collapse *elastic.CollapseBuilder
|
2021-05-31 20:53:08 +02:00
|
|
|
if in.LimitClaimsPerChannel != nil {
|
2021-06-01 04:19:10 +02:00
|
|
|
println(in.LimitClaimsPerChannel.Value)
|
2021-05-31 20:53:08 +02:00
|
|
|
innerHit := elastic.NewInnerHit().Size(int(in.LimitClaimsPerChannel.Value)).Name("channel_id.keyword")
|
2021-05-18 12:02:55 +02:00
|
|
|
collapse = elastic.NewCollapseBuilder("channel_id.keyword").InnerHit(innerHit)
|
|
|
|
}
|
|
|
|
|
2021-05-31 20:53:08 +02:00
|
|
|
if in.TxNout != nil {
|
|
|
|
q = q.Must(elastic.NewTermQuery("tx_nout", in.TxNout.Value))
|
|
|
|
}
|
2021-05-18 12:02:55 +02:00
|
|
|
|
|
|
|
q = AddTermsField(in.PublicKeyHash, "public_key_hash.keyword", q)
|
|
|
|
q = AddTermsField(in.Author, "author.keyword", q)
|
|
|
|
q = AddTermsField(in.Title, "title.keyword", q)
|
|
|
|
q = AddTermsField(in.CanonicalUrl, "canonical_url.keyword", q)
|
|
|
|
q = AddTermsField(in.ClaimName, "claim_name.keyword", q)
|
|
|
|
q = AddTermsField(in.Description, "description.keyword", q)
|
|
|
|
q = AddTermsField(in.MediaType, "media_type.keyword", q)
|
|
|
|
q = AddTermsField(in.Normalized, "normalized.keyword", q)
|
|
|
|
q = AddTermsField(in.PublicKeyBytes, "public_key_bytes.keyword", q)
|
|
|
|
q = AddTermsField(in.ShortUrl, "short_url.keyword", q)
|
|
|
|
q = AddTermsField(in.Signature, "signature.keyword", q)
|
|
|
|
q = AddTermsField(in.SignatureDigest, "signature_digest.keyword", q)
|
|
|
|
q = AddTermsField(in.TxId, "tx_id.keyword", q)
|
|
|
|
q = AddTermsField(in.FeeCurrency, "fee_currency.keyword", q)
|
|
|
|
q = AddTermsField(in.RepostedClaimId, "reposted_claim_id.keyword", q)
|
|
|
|
|
2021-06-01 04:19:10 +02:00
|
|
|
|
|
|
|
q = AddTermsField(s.cleanTags(in.AnyTags), "tags.keyword", q)
|
|
|
|
q = AddIndividualTermFields(s.cleanTags(in.AllTags), "tags.keyword", q, false)
|
|
|
|
q = AddIndividualTermFields(s.cleanTags(in.NotTags), "tags.keyword", q, true)
|
|
|
|
q = AddTermsField(in.AnyLanguages, "languages", q)
|
|
|
|
q = AddIndividualTermFields(in.AllLanguages, "languages", q, false)
|
|
|
|
|
2021-05-18 12:02:55 +02:00
|
|
|
q = AddInvertibleField(in.ChannelId, "channel_id.keyword", q)
|
2021-05-31 20:53:08 +02:00
|
|
|
q = AddInvertibleField(in.ChannelIds, "channel_id.keyword", q)
|
2021-06-01 04:19:10 +02:00
|
|
|
/*
|
|
|
|
|
|
|
|
*/
|
2021-05-18 12:02:55 +02:00
|
|
|
|
|
|
|
q = AddRangeField(in.TxPosition, "tx_position", q)
|
|
|
|
q = AddRangeField(in.Amount, "amount", q)
|
|
|
|
q = AddRangeField(in.Timestamp, "timestamp", q)
|
|
|
|
q = AddRangeField(in.CreationTimestamp, "creation_timestamp", q)
|
|
|
|
q = AddRangeField(in.Height, "height", q)
|
|
|
|
q = AddRangeField(in.CreationHeight, "creation_height", q)
|
|
|
|
q = AddRangeField(in.ActivationHeight, "activation_height", q)
|
|
|
|
q = AddRangeField(in.ExpirationHeight, "expiration_height", q)
|
|
|
|
q = AddRangeField(in.ReleaseTime, "release_time", q)
|
|
|
|
q = AddRangeField(in.Reposted, "reposted", q)
|
|
|
|
q = AddRangeField(in.FeeAmount, "fee_amount", q)
|
|
|
|
q = AddRangeField(in.Duration, "duration", q)
|
|
|
|
q = AddRangeField(in.CensorType, "censor_type", q)
|
|
|
|
q = AddRangeField(in.ChannelJoin, "channel_join", q)
|
|
|
|
q = AddRangeField(in.EffectiveAmount, "effective_amount", q)
|
|
|
|
q = AddRangeField(in.SupportAmount, "support_amount", q)
|
|
|
|
q = AddRangeField(in.TrendingGroup, "trending_group", q)
|
|
|
|
q = AddRangeField(in.TrendingMixed, "trending_mixed", q)
|
|
|
|
q = AddRangeField(in.TrendingLocal, "trending_local", q)
|
|
|
|
q = AddRangeField(in.TrendingGlobal, "trending_global", q)
|
2021-05-16 05:13:14 +02:00
|
|
|
|
2021-05-31 20:53:08 +02:00
|
|
|
if in.Text != "" {
|
|
|
|
textQuery := elastic.NewSimpleQueryStringQuery(in.Text).
|
2021-05-13 22:06:19 +02:00
|
|
|
FieldWithBoost("claim_name", 4).
|
|
|
|
FieldWithBoost("channel_name", 8).
|
|
|
|
FieldWithBoost("title", 1).
|
|
|
|
FieldWithBoost("description", 0.5).
|
|
|
|
FieldWithBoost("author", 1).
|
|
|
|
FieldWithBoost("tags", 0.5)
|
|
|
|
|
|
|
|
q = q.Must(textQuery)
|
|
|
|
}
|
|
|
|
|
2021-05-31 03:34:57 +02:00
|
|
|
|
2021-06-01 04:19:10 +02:00
|
|
|
//TODO make this only happen in dev environment
|
2021-05-31 20:53:08 +02:00
|
|
|
indices, err := client.IndexNames()
|
|
|
|
if err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
searchIndices := make([]string, len(indices)-1)
|
|
|
|
j := 0
|
|
|
|
for i := 0; i < len(indices); i++ {
|
|
|
|
if indices[i] == "claims" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
searchIndices[j] = indices[i]
|
|
|
|
j = j + 1
|
|
|
|
}
|
|
|
|
|
2021-05-31 03:34:57 +02:00
|
|
|
fsc := elastic.NewFetchSourceContext(true).Exclude("description", "title")
|
2021-05-18 12:02:55 +02:00
|
|
|
search := client.Search().
|
2021-05-31 20:53:08 +02:00
|
|
|
Index(searchIndices...).
|
2021-05-31 03:34:57 +02:00
|
|
|
FetchSourceContext(fsc).
|
2021-04-19 21:25:34 +02:00
|
|
|
Query(q). // specify the query
|
2021-05-18 12:02:55 +02:00
|
|
|
From(from).Size(size)
|
2021-05-31 20:53:08 +02:00
|
|
|
if in.LimitClaimsPerChannel != nil {
|
2021-05-18 12:02:55 +02:00
|
|
|
search = search.Collapse(collapse)
|
|
|
|
}
|
|
|
|
for _, x := range orderBy {
|
2021-06-01 04:19:10 +02:00
|
|
|
log.Println(x.Field, x.is_asc)
|
2021-05-18 12:02:55 +02:00
|
|
|
search = search.Sort(x.Field, x.is_asc)
|
|
|
|
}
|
|
|
|
|
|
|
|
searchResult, err := search.Do(ctx) // execute
|
2021-04-19 21:25:34 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2021-05-31 20:53:08 +02:00
|
|
|
log.Printf("%s: found %d results in %dms\n", in.Text, len(searchResult.Hits.Hits), searchResult.TookInMillis)
|
2021-04-19 21:25:34 +02:00
|
|
|
|
|
|
|
txos := make([]*pb.Output, len(searchResult.Hits.Hits))
|
|
|
|
|
|
|
|
var r record
|
|
|
|
for i, item := range searchResult.Each(reflect.TypeOf(r)) {
|
|
|
|
if t, ok := item.(record); ok {
|
|
|
|
txos[i] = &pb.Output{
|
2021-06-04 07:56:50 +02:00
|
|
|
TxHash: util.ToHash(t.Txid),
|
2021-04-19 21:25:34 +02:00
|
|
|
Nout: t.Nout,
|
2021-05-31 03:34:57 +02:00
|
|
|
Height: t.Height,
|
2021-04-19 21:25:34 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// or if you want more control
|
2021-06-01 04:19:10 +02:00
|
|
|
for _, hit := range searchResult.Hits.Hits {
|
|
|
|
// hit.Index contains the name of the index
|
|
|
|
|
|
|
|
var t map[string]interface{} // or could be a Record
|
|
|
|
err := json.Unmarshal(hit.Source, &t)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
b, err := json.MarshalIndent(t, "", " ")
|
|
|
|
if err != nil {
|
|
|
|
fmt.Println("error:", err)
|
|
|
|
}
|
|
|
|
fmt.Println(string(b))
|
|
|
|
//for k := range t {
|
|
|
|
// fmt.Println(k)
|
|
|
|
//}
|
|
|
|
//return nil, nil
|
|
|
|
}
|
2021-04-19 21:25:34 +02:00
|
|
|
|
2021-06-03 06:31:58 +02:00
|
|
|
return &pb.Outputs{
|
2021-06-01 04:19:10 +02:00
|
|
|
Txos: txos,
|
|
|
|
Total: uint32(searchResult.TotalHits()),
|
|
|
|
Offset: uint32(int64(from) + searchResult.TotalHits()),
|
2021-04-19 21:25:34 +02:00
|
|
|
}, nil
|
|
|
|
}
|