commit
5771a16c65
5
Dockerfile
Normal file
5
Dockerfile
Normal file
|
@ -0,0 +1,5 @@
|
|||
FROM debian:10-slim
|
||||
|
||||
EXPOSE 50051
|
||||
COPY ./hub /hub
|
||||
ENTRYPOINT ["/hub", "serve"]
|
6
action.yml
Normal file
6
action.yml
Normal file
|
@ -0,0 +1,6 @@
|
|||
# action.yml
|
||||
name: 'Hub'
|
||||
description: 'Run go hub'
|
||||
runs:
|
||||
using: 'docker'
|
||||
image: 'dev.dockerfile'
|
4
build.sh
Executable file
4
build.sh
Executable file
|
@ -0,0 +1,4 @@
|
|||
#!/bin/bash
|
||||
|
||||
go build .
|
||||
sudo docker build . -t lbry/hub:latest
|
6
dev.dockerfile
Normal file
6
dev.dockerfile
Normal file
|
@ -0,0 +1,6 @@
|
|||
FROM debian:10-slim
|
||||
|
||||
EXPOSE 50051
|
||||
RUN apt-get update && apt-get install curl -y
|
||||
RUN curl -L -o /hub https://github.com/lbryio/hub/releases/download/v0.2021.06.14-beta/hub && chmod +x /hub
|
||||
ENTRYPOINT ["/hub", "serve", "--dev"]
|
2
dev.sh
2
dev.sh
|
@ -3,4 +3,4 @@
|
|||
hash reflex 2>/dev/null || go get github.com/cespare/reflex
|
||||
hash reflex 2>/dev/null || { echo >&2 'Make sure '"$(go env GOPATH)"'/bin is in your $PATH'; exit 1; }
|
||||
|
||||
reflex --decoration=none --start-service=true -- sh -c "go run . serve"
|
||||
reflex --decoration=none --start-service=true -- sh -c "go run . serve --dev"
|
||||
|
|
34
docker-compose-hub-server.yml
Normal file
34
docker-compose-hub-server.yml
Normal file
|
@ -0,0 +1,34 @@
|
|||
version: "3"
|
||||
|
||||
volumes:
|
||||
es01:
|
||||
|
||||
services:
|
||||
hub_server:
|
||||
depends_on:
|
||||
- es01
|
||||
image: lbry/hub:latest
|
||||
restart: always
|
||||
environment:
|
||||
#- TCP_PORT=50051 # should probably have these supported by the go server too
|
||||
#- TCP_HOST=0.0.0.0
|
||||
- ELASTIC_HOST=http://127.0.0.1
|
||||
- ELASTIC_PORT=9200
|
||||
network_mode: host
|
||||
es01:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:7.12.1
|
||||
container_name: es01
|
||||
environment:
|
||||
- node.name=es01
|
||||
- discovery.type=single-node
|
||||
- indices.query.bool.max_clause_count=4096
|
||||
- bootstrap.memory_lock=true
|
||||
- "ES_JAVA_OPTS=-Xms512m -Xmx512m" # no more than 32, remember to disable swap
|
||||
#- "ES_JAVA_OPTS=-Xms8g -Xmx8g" # no more than 32, remember to disable swap
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
volumes:
|
||||
- es01:/usr/share/elasticsearch/data
|
||||
network_mode: host
|
13
go.mod
13
go.mod
|
@ -3,10 +3,15 @@ module github.com/lbryio/hub
|
|||
go 1.16
|
||||
|
||||
require (
|
||||
github.com/golang/protobuf v1.5.1 // indirect
|
||||
github.com/akamensky/argparse v1.2.2
|
||||
github.com/btcsuite/btcutil v1.0.2
|
||||
github.com/golang/protobuf v1.5.2
|
||||
github.com/olivere/elastic/v7 v7.0.24
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 // indirect
|
||||
golang.org/x/sys v0.0.0-20210317225723-c4fcb01b228e // indirect
|
||||
google.golang.org/grpc v1.36.0
|
||||
golang.org/x/net v0.0.0-20210525063256-abc453219eb5 // indirect
|
||||
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea // indirect
|
||||
golang.org/x/text v0.3.6
|
||||
google.golang.org/genproto v0.0.0-20210524171403-669157292da3 // indirect
|
||||
google.golang.org/grpc v1.38.0
|
||||
google.golang.org/protobuf v1.26.0
|
||||
gopkg.in/karalabe/cookiejar.v1 v1.0.0-20141109175019-e1490cae028c
|
||||
)
|
||||
|
|
77
go.sum
77
go.sum
|
@ -1,18 +1,34 @@
|
|||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII=
|
||||
github.com/akamensky/argparse v1.2.2 h1:P17T0ZjlUNJuWTPPJ2A5dM1wxarHgHqfYH+AZTo2xQA=
|
||||
github.com/akamensky/argparse v1.2.2/go.mod h1:S5kwC7IuDcEr5VeXtGPRVZ5o/FdhcMlQz4IZQuw64xA=
|
||||
github.com/aws/aws-sdk-go v1.38.3/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
|
||||
github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
|
||||
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA=
|
||||
github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg=
|
||||
github.com/btcsuite/btcutil v1.0.2 h1:9iZ1Terx9fMIOtq1VrwdqfsATL9MC2l8ZrUY6YZ2uts=
|
||||
github.com/btcsuite/btcutil v1.0.2/go.mod h1:j9HUFwoQRsZL3V4n+qG+CUnEGHOarIxfC3Le2Yhbcts=
|
||||
github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg=
|
||||
github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVaaLLH7j4eDXPRvw78tMflu7Ie2bzYOH4Y8rRKBY=
|
||||
github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc=
|
||||
github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY=
|
||||
github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
|
||||
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
|
@ -28,8 +44,8 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD
|
|||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.1 h1:jAbXjIeW2ZSW2AwFxlGTDoc2CjI2XujLkV3ArsZFCvc=
|
||||
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
|
||||
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
|
||||
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
|
@ -39,14 +55,21 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
|||
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ=
|
||||
github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4=
|
||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/olivere/elastic/v7 v7.0.24 h1:9ZcCQP3Pvgese7TaypYiVAL49sCEphyIwkVxtRf8jb8=
|
||||
github.com/olivere/elastic/v7 v7.0.24/go.mod h1:OuWmD2DiuYhddWegBKPWQuelVKBLrW0fa/VUYgxuGTY=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
|
@ -59,56 +82,83 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
|
|||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
|
||||
golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
|
||||
golang.org/x/net v0.0.0-20210525063256-abc453219eb5 h1:wjuX4b5yYQnEQHzd+CBcrcC6OVR2J1CN6mUy0oSxIPo=
|
||||
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210317225723-c4fcb01b228e h1:XNp2Flc/1eWQGk5BLzqTAN7fQIwIbfyVTuVxXxZh73M=
|
||||
golang.org/x/sys v0.0.0-20210317225723-c4fcb01b228e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea h1:+WiDlPBBaO+h9vPNZi8uJ3k4BkKQB7Iow3aqwHVA5hI=
|
||||
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 h1:+kGHl1aib/qcwaRi1CbqBZ1rk19r85MNUf8HaBghugY=
|
||||
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
|
||||
google.golang.org/genproto v0.0.0-20210524171403-669157292da3 h1:xFyh6GBb+NO1L0xqb978I3sBPQpk6FrKO0jJGRvdj/0=
|
||||
google.golang.org/genproto v0.0.0-20210524171403-669157292da3/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
|
||||
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
|
||||
google.golang.org/grpc v1.36.0 h1:o1bcQ6imQMIOpdrO3SWf2z5RV72WbDwdXuK0MDlc8As=
|
||||
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/grpc v1.38.0 h1:/9BgsAsa5nWe26HqOlvlgJnqBuktYOLCgjCPqsa56W0=
|
||||
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
|
@ -122,6 +172,11 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0
|
|||
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/karalabe/cookiejar.v1 v1.0.0-20141109175019-e1490cae028c h1:4GYkPhjcYLPrPAnoxHVQlH/xcXtWN8pEgqBnHrPAs8c=
|
||||
gopkg.in/karalabe/cookiejar.v1 v1.0.0-20141109175019-e1490cae028c/go.mod h1:xd7qpr5uPMNy4hsRJ5JEBXA8tJjTFmUI1soCjlCIgAE=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
|
156
main.go
156
main.go
|
@ -3,40 +3,167 @@ package main
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/lbryio/hub/util"
|
||||
"log"
|
||||
"net"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/akamensky/argparse"
|
||||
pb "github.com/lbryio/hub/protobuf/go"
|
||||
"github.com/lbryio/hub/server"
|
||||
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/reflection"
|
||||
)
|
||||
|
||||
const (
|
||||
port = ":50051"
|
||||
defaultHost = "0.0.0.0"
|
||||
defaultPort = "50051"
|
||||
defaultEsHost = "http://localhost"
|
||||
defaultEsPort = "9200"
|
||||
)
|
||||
|
||||
|
||||
func GetEnvironment(data []string, getkeyval func(item string) (key, val string)) map[string]string {
|
||||
items := make(map[string]string)
|
||||
for _, item := range data {
|
||||
key, val := getkeyval(item)
|
||||
items[key] = val
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
func GetEnvironmentStandard() map[string]string {
|
||||
return GetEnvironment(os.Environ(), func(item string) (key, val string) {
|
||||
splits := strings.Split(item, "=")
|
||||
key = splits[0]
|
||||
val = splits[1]
|
||||
return
|
||||
})
|
||||
}
|
||||
|
||||
func parseArgs(searchRequest *pb.SearchRequest) *server.Args {
|
||||
|
||||
environment := GetEnvironmentStandard()
|
||||
parser := argparse.NewParser("hub", "hub server and client")
|
||||
|
||||
serveCmd := parser.NewCommand("serve", "start the hub server")
|
||||
|
||||
host := parser.String("", "rpchost", &argparse.Options{Required: false, Help: "host", Default: defaultHost})
|
||||
port := parser.String("", "rpcport", &argparse.Options{Required: false, Help: "port", Default: defaultPort})
|
||||
esHost := parser.String("", "eshost", &argparse.Options{Required: false, Help: "host", Default: defaultEsHost})
|
||||
esPort := parser.String("", "esport", &argparse.Options{Required: false, Help: "port", Default: defaultEsPort})
|
||||
dev := parser.Flag("", "dev", &argparse.Options{Required: false, Help: "port", Default: false})
|
||||
|
||||
text := parser.String("", "text", &argparse.Options{Required: false, Help: "text query"})
|
||||
name := parser.String("", "name", &argparse.Options{Required: false, Help: "name"})
|
||||
claimType := parser.String("", "claim_type", &argparse.Options{Required: false, Help: "claim_type"})
|
||||
id := parser.String("", "id", &argparse.Options{Required: false, Help: "id"})
|
||||
author := parser.String("", "author", &argparse.Options{Required: false, Help: "author"})
|
||||
title := parser.String("", "title", &argparse.Options{Required: false, Help: "title"})
|
||||
description := parser.String("", "description", &argparse.Options{Required: false, Help: "description"})
|
||||
channelId := parser.String("", "channel_id", &argparse.Options{Required: false, Help: "channel id"})
|
||||
channelIds := parser.StringList("", "channel_ids", &argparse.Options{Required: false, Help: "channel ids"})
|
||||
|
||||
// Now parse the arguments
|
||||
err := parser.Parse(os.Args)
|
||||
if err != nil {
|
||||
log.Fatalln(parser.Usage(err))
|
||||
}
|
||||
|
||||
|
||||
args := &server.Args{
|
||||
Serve: false,
|
||||
Host: *host,
|
||||
Port: ":" + *port,
|
||||
EsHost: *esHost,
|
||||
EsPort: *esPort,
|
||||
Dev: *dev,
|
||||
}
|
||||
|
||||
if esHost, ok := environment["ELASTIC_HOST"]; ok {
|
||||
args.EsHost = esHost
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(args.EsHost, "http") {
|
||||
args.EsHost = "http://" + args.EsHost
|
||||
}
|
||||
|
||||
if esPort, ok := environment["ELASTIC_PORT"]; ok {
|
||||
args.EsPort = esPort
|
||||
}
|
||||
|
||||
/*
|
||||
Verify no invalid argument combinations
|
||||
*/
|
||||
if len(*channelIds) > 0 && *channelId != "" {
|
||||
log.Fatal("Cannot specify both channel_id and channel_ids")
|
||||
}
|
||||
|
||||
if serveCmd.Happened() {
|
||||
args.Serve = true
|
||||
}
|
||||
|
||||
if *text != "" {
|
||||
searchRequest.Text = *text
|
||||
}
|
||||
if *name!= "" {
|
||||
searchRequest.Name = []string{*name}
|
||||
}
|
||||
if *claimType != "" {
|
||||
searchRequest.ClaimType = []string{*claimType}
|
||||
}
|
||||
if *id != "" {
|
||||
searchRequest.XId = [][]byte{[]byte(*id)}
|
||||
}
|
||||
if *author != "" {
|
||||
searchRequest.Author = []string{*author}
|
||||
}
|
||||
if *title != "" {
|
||||
searchRequest.Title = []string{*title}
|
||||
}
|
||||
if *description != "" {
|
||||
searchRequest.Description = []string{*description}
|
||||
}
|
||||
if *channelId != "" {
|
||||
searchRequest.ChannelId = &pb.InvertibleField{Invert: false, Value: []string{*channelId}}
|
||||
}
|
||||
if len(*channelIds) > 0 {
|
||||
searchRequest.ChannelId = &pb.InvertibleField{Invert: false, Value: *channelIds}
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
func main() {
|
||||
if len(os.Args) == 2 && os.Args[1] == "serve" {
|
||||
l, err := net.Listen("tcp", port)
|
||||
searchRequest := &pb.SearchRequest{}
|
||||
|
||||
args := parseArgs(searchRequest)
|
||||
|
||||
if args.Serve {
|
||||
|
||||
l, err := net.Listen("tcp", args.Port)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to listen: %v", err)
|
||||
}
|
||||
|
||||
s := grpc.NewServer()
|
||||
pb.RegisterHubServer(s, &server.Server{})
|
||||
s := server.MakeHubServer(args)
|
||||
pb.RegisterHubServer(s.GrpcServer, s)
|
||||
reflection.Register(s.GrpcServer)
|
||||
|
||||
log.Printf("listening on %s\n", l.Addr().String())
|
||||
if err := s.Serve(l); err != nil {
|
||||
log.Println(s.Args)
|
||||
if err := s.GrpcServer.Serve(l); err != nil {
|
||||
log.Fatalf("failed to serve: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
conn, err := grpc.Dial("localhost"+port, grpc.WithInsecure(), grpc.WithBlock())
|
||||
conn, err := grpc.Dial("localhost"+args.Port,
|
||||
grpc.WithInsecure(),
|
||||
grpc.WithBlock(),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatalf("did not connect: %v", err)
|
||||
}
|
||||
|
@ -44,18 +171,11 @@ func main() {
|
|||
|
||||
c := pb.NewHubClient(conn)
|
||||
|
||||
var query string
|
||||
if len(os.Args) > 1 {
|
||||
query = strings.Join(os.Args[1:], " ")
|
||||
} else {
|
||||
log.Printf("error: no search query provided\n")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
|
||||
defer cancel()
|
||||
|
||||
r, err := c.Search(ctx, &pb.SearchRequest{Query: query})
|
||||
|
||||
r, err := c.Search(ctx, searchRequest)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
@ -63,6 +183,6 @@ func main() {
|
|||
log.Printf("found %d results\n", r.GetTotal())
|
||||
|
||||
for _, t := range r.Txos {
|
||||
fmt.Printf("%s:%d\n", server.FromHash(t.TxHash), t.Nout)
|
||||
fmt.Printf("%s:%d\n", util.FromHash(t.TxHash), t.Nout)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,6 +33,12 @@ protoc --proto_path="$DIR/definitions" \
|
|||
--go-grpc_out="$DIR/go" --go-grpc_opt=paths=source_relative \
|
||||
$DIR/definitions/*.proto
|
||||
# --python_out="$DIR/python" \
|
||||
# --grpc_python_out="$DIR/python"
|
||||
# --js_out="import_style=commonjs,binary:$DIR/js" \
|
||||
python -m grpc_tools.protoc --proto_path="$DIR/definitions" \
|
||||
--python_out="$DIR/python" \
|
||||
--grpc_python_out="$DIR/python" \
|
||||
$DIR/definitions/*.proto
|
||||
|
||||
|
||||
ls "$DIR"/go/*.pb.go | xargs -n1 -IX bash -c "sed -e 's/,omitempty//' X > X.tmp && mv X{.tmp,}"
|
||||
|
|
|
@ -1,70 +1,96 @@
|
|||
syntax = "proto3";
|
||||
|
||||
option go_package = "github.com/lbryio/hub/protobuf/go/pb";
|
||||
import "google/protobuf/wrappers.proto";
|
||||
import "result.proto";
|
||||
|
||||
package pb;
|
||||
|
||||
service Hub {
|
||||
rpc Search (SearchRequest) returns (SearchReply) {}
|
||||
rpc Search (SearchRequest) returns (Outputs) {}
|
||||
}
|
||||
|
||||
message InvertibleField {
|
||||
bool invert = 1;
|
||||
repeated string value = 2;
|
||||
}
|
||||
|
||||
message RangeField {
|
||||
enum Op {
|
||||
EQ = 0;
|
||||
LTE = 1;
|
||||
GTE = 2;
|
||||
LT = 3;
|
||||
GT = 4;
|
||||
}
|
||||
Op op = 1;
|
||||
repeated string value = 2;
|
||||
}
|
||||
|
||||
message SearchRequest {
|
||||
string query = 1;
|
||||
}
|
||||
|
||||
message SearchReply {
|
||||
repeated Output txos = 1;
|
||||
repeated Output extra_txos = 2;
|
||||
uint32 total = 3;
|
||||
uint32 offset = 4;
|
||||
repeated Blocked blocked = 5;
|
||||
uint32 blocked_total = 6;
|
||||
}
|
||||
|
||||
message Output {
|
||||
bytes tx_hash = 1;
|
||||
uint32 nout = 2;
|
||||
uint32 height = 3;
|
||||
oneof meta {
|
||||
ClaimMeta claim = 7;
|
||||
Error error = 15;
|
||||
}
|
||||
}
|
||||
|
||||
message ClaimMeta {
|
||||
Output channel = 1;
|
||||
Output repost = 2;
|
||||
string short_url = 3;
|
||||
string canonical_url = 4;
|
||||
bool is_controlling = 5;
|
||||
uint32 take_over_height = 6;
|
||||
uint32 creation_height = 7;
|
||||
uint32 activation_height = 8;
|
||||
uint32 expiration_height = 9;
|
||||
uint32 claims_in_channel = 10;
|
||||
uint32 reposted = 11;
|
||||
|
||||
uint64 effective_amount = 20;
|
||||
uint64 support_amount = 21;
|
||||
uint32 trending_group = 22;
|
||||
float trending_mixed = 23;
|
||||
float trending_local = 24;
|
||||
float trending_global = 25;
|
||||
}
|
||||
|
||||
message Error {
|
||||
enum Code {
|
||||
UNKNOWN_CODE = 0;
|
||||
NOT_FOUND = 1;
|
||||
INVALID = 2;
|
||||
BLOCKED = 3;
|
||||
}
|
||||
Code code = 1;
|
||||
string text = 2;
|
||||
Blocked blocked = 3;
|
||||
}
|
||||
|
||||
message Blocked {
|
||||
uint32 count = 1;
|
||||
Output channel = 2;
|
||||
}
|
||||
string text = 1;
|
||||
repeated string name = 2;
|
||||
.google.protobuf.Int32Value amount_order = 3;
|
||||
.google.protobuf.Int32Value limit = 4;
|
||||
repeated string order_by = 5;
|
||||
.google.protobuf.Int32Value offset = 6;
|
||||
.google.protobuf.BoolValue is_controlling = 7;
|
||||
string last_take_over_height = 19;
|
||||
InvertibleField claim_id = 20;
|
||||
repeated string claim_name = 22;
|
||||
repeated string normalized = 23;
|
||||
RangeField tx_position = 24;
|
||||
RangeField amount = 25;
|
||||
RangeField timestamp = 26;
|
||||
RangeField creation_timestamp = 27;
|
||||
RangeField height = 28;
|
||||
RangeField creation_height = 29;
|
||||
RangeField activation_height = 30;
|
||||
RangeField expiration_height = 31;
|
||||
RangeField release_time = 32;
|
||||
repeated string short_url = 33;
|
||||
repeated string canonical_url = 34;
|
||||
repeated string title = 35;
|
||||
repeated string author = 36;
|
||||
repeated string description = 37;
|
||||
repeated string claim_type = 38;
|
||||
RangeField reposted = 39;
|
||||
repeated string stream_type = 40;
|
||||
repeated string media_type = 41;
|
||||
RangeField fee_amount = 42;
|
||||
repeated string fee_currency = 43;
|
||||
RangeField duration = 44;
|
||||
string reposted_claim_hash = 45;
|
||||
RangeField censor_type = 46;
|
||||
string claims_in_channel = 47;
|
||||
RangeField channel_join = 48;
|
||||
.google.protobuf.BoolValue signature_valid = 49;
|
||||
RangeField effective_amount = 51;
|
||||
RangeField support_amount = 52;
|
||||
RangeField trending_group = 53;
|
||||
RangeField trending_mixed = 54;
|
||||
RangeField trending_local = 55;
|
||||
RangeField trending_global = 56;
|
||||
InvertibleField channel_id = 57;
|
||||
InvertibleField channel_ids = 58;
|
||||
repeated string tx_id = 59;
|
||||
.google.protobuf.Int32Value tx_nout = 60;
|
||||
repeated string signature = 61;
|
||||
repeated string signature_digest = 62;
|
||||
repeated string public_key_bytes = 63;
|
||||
repeated string public_key_hash = 64;
|
||||
string public_key_id = 65;
|
||||
repeated bytes _id = 66;
|
||||
repeated string any_tags = 67;
|
||||
repeated string all_tags = 68;
|
||||
repeated string not_tags = 69;
|
||||
repeated string reposted_claim_id = 70;
|
||||
.google.protobuf.BoolValue has_channel_signature = 71;
|
||||
.google.protobuf.BoolValue has_source = 72;
|
||||
.google.protobuf.Int32Value limit_claims_per_channel = 73;
|
||||
repeated string any_languages = 74;
|
||||
repeated string all_languages = 75;
|
||||
.google.protobuf.BoolValue remove_duplicates = 76;
|
||||
.google.protobuf.BoolValue no_totals = 77;
|
||||
repeated string search_indices = 78;
|
||||
}
|
62
protobuf/definitions/result.proto
Normal file
62
protobuf/definitions/result.proto
Normal file
|
@ -0,0 +1,62 @@
|
|||
syntax = "proto3";
|
||||
|
||||
option go_package = "github.com/lbryio/hub/protobuf/go/pb";
|
||||
|
||||
package pb;
|
||||
|
||||
message Outputs {
|
||||
repeated Output txos = 1;
|
||||
repeated Output extra_txos = 2;
|
||||
uint32 total = 3;
|
||||
uint32 offset = 4;
|
||||
repeated Blocked blocked = 5;
|
||||
uint32 blocked_total = 6;
|
||||
}
|
||||
|
||||
message Output {
|
||||
bytes tx_hash = 1;
|
||||
uint32 nout = 2;
|
||||
uint32 height = 3;
|
||||
oneof meta {
|
||||
ClaimMeta claim = 7;
|
||||
Error error = 15;
|
||||
}
|
||||
}
|
||||
|
||||
message ClaimMeta {
|
||||
Output channel = 1;
|
||||
Output repost = 2;
|
||||
string short_url = 3;
|
||||
string canonical_url = 4;
|
||||
bool is_controlling = 5;
|
||||
uint32 take_over_height = 6;
|
||||
uint32 creation_height = 7;
|
||||
uint32 activation_height = 8;
|
||||
uint32 expiration_height = 9;
|
||||
uint32 claims_in_channel = 10;
|
||||
uint32 reposted = 11;
|
||||
|
||||
uint64 effective_amount = 20;
|
||||
uint64 support_amount = 21;
|
||||
uint32 trending_group = 22;
|
||||
float trending_mixed = 23;
|
||||
float trending_local = 24;
|
||||
float trending_global = 25;
|
||||
}
|
||||
|
||||
message Error {
|
||||
enum Code {
|
||||
UNKNOWN_CODE = 0;
|
||||
NOT_FOUND = 1;
|
||||
INVALID = 2;
|
||||
BLOCKED = 3;
|
||||
}
|
||||
Code code = 1;
|
||||
string text = 2;
|
||||
Blocked blocked = 3;
|
||||
}
|
||||
|
||||
message Blocked {
|
||||
uint32 count = 1;
|
||||
Output channel = 2;
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -18,7 +18,7 @@ const _ = grpc.SupportPackageIsVersion7
|
|||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
|
||||
type HubClient interface {
|
||||
Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*SearchReply, error)
|
||||
Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*Outputs, error)
|
||||
}
|
||||
|
||||
type hubClient struct {
|
||||
|
@ -29,8 +29,8 @@ func NewHubClient(cc grpc.ClientConnInterface) HubClient {
|
|||
return &hubClient{cc}
|
||||
}
|
||||
|
||||
func (c *hubClient) Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*SearchReply, error) {
|
||||
out := new(SearchReply)
|
||||
func (c *hubClient) Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*Outputs, error) {
|
||||
out := new(Outputs)
|
||||
err := c.cc.Invoke(ctx, "/pb.Hub/Search", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -42,7 +42,7 @@ func (c *hubClient) Search(ctx context.Context, in *SearchRequest, opts ...grpc.
|
|||
// All implementations must embed UnimplementedHubServer
|
||||
// for forward compatibility
|
||||
type HubServer interface {
|
||||
Search(context.Context, *SearchRequest) (*SearchReply, error)
|
||||
Search(context.Context, *SearchRequest) (*Outputs, error)
|
||||
mustEmbedUnimplementedHubServer()
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ type HubServer interface {
|
|||
type UnimplementedHubServer struct {
|
||||
}
|
||||
|
||||
func (UnimplementedHubServer) Search(context.Context, *SearchRequest) (*SearchReply, error) {
|
||||
func (UnimplementedHubServer) Search(context.Context, *SearchRequest) (*Outputs, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method Search not implemented")
|
||||
}
|
||||
func (UnimplementedHubServer) mustEmbedUnimplementedHubServer() {}
|
||||
|
|
779
protobuf/go/result.pb.go
Normal file
779
protobuf/go/result.pb.go
Normal file
|
@ -0,0 +1,779 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.26.0
|
||||
// protoc v3.17.1
|
||||
// source: result.proto
|
||||
|
||||
package pb
|
||||
|
||||
import (
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
// Verify that this generated code is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
|
||||
// Verify that runtime/protoimpl is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
|
||||
)
|
||||
|
||||
type Error_Code int32
|
||||
|
||||
const (
|
||||
Error_UNKNOWN_CODE Error_Code = 0
|
||||
Error_NOT_FOUND Error_Code = 1
|
||||
Error_INVALID Error_Code = 2
|
||||
Error_BLOCKED Error_Code = 3
|
||||
)
|
||||
|
||||
// Enum value maps for Error_Code.
|
||||
var (
|
||||
Error_Code_name = map[int32]string{
|
||||
0: "UNKNOWN_CODE",
|
||||
1: "NOT_FOUND",
|
||||
2: "INVALID",
|
||||
3: "BLOCKED",
|
||||
}
|
||||
Error_Code_value = map[string]int32{
|
||||
"UNKNOWN_CODE": 0,
|
||||
"NOT_FOUND": 1,
|
||||
"INVALID": 2,
|
||||
"BLOCKED": 3,
|
||||
}
|
||||
)
|
||||
|
||||
func (x Error_Code) Enum() *Error_Code {
|
||||
p := new(Error_Code)
|
||||
*p = x
|
||||
return p
|
||||
}
|
||||
|
||||
func (x Error_Code) String() string {
|
||||
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
|
||||
}
|
||||
|
||||
func (Error_Code) Descriptor() protoreflect.EnumDescriptor {
|
||||
return file_result_proto_enumTypes[0].Descriptor()
|
||||
}
|
||||
|
||||
func (Error_Code) Type() protoreflect.EnumType {
|
||||
return &file_result_proto_enumTypes[0]
|
||||
}
|
||||
|
||||
func (x Error_Code) Number() protoreflect.EnumNumber {
|
||||
return protoreflect.EnumNumber(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Error_Code.Descriptor instead.
|
||||
func (Error_Code) EnumDescriptor() ([]byte, []int) {
|
||||
return file_result_proto_rawDescGZIP(), []int{3, 0}
|
||||
}
|
||||
|
||||
type Outputs struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Txos []*Output `protobuf:"bytes,1,rep,name=txos,proto3" json:"txos"`
|
||||
ExtraTxos []*Output `protobuf:"bytes,2,rep,name=extra_txos,json=extraTxos,proto3" json:"extra_txos"`
|
||||
Total uint32 `protobuf:"varint,3,opt,name=total,proto3" json:"total"`
|
||||
Offset uint32 `protobuf:"varint,4,opt,name=offset,proto3" json:"offset"`
|
||||
Blocked []*Blocked `protobuf:"bytes,5,rep,name=blocked,proto3" json:"blocked"`
|
||||
BlockedTotal uint32 `protobuf:"varint,6,opt,name=blocked_total,json=blockedTotal,proto3" json:"blocked_total"`
|
||||
}
|
||||
|
||||
func (x *Outputs) Reset() {
|
||||
*x = Outputs{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_result_proto_msgTypes[0]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *Outputs) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*Outputs) ProtoMessage() {}
|
||||
|
||||
func (x *Outputs) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_result_proto_msgTypes[0]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Outputs.ProtoReflect.Descriptor instead.
|
||||
func (*Outputs) Descriptor() ([]byte, []int) {
|
||||
return file_result_proto_rawDescGZIP(), []int{0}
|
||||
}
|
||||
|
||||
func (x *Outputs) GetTxos() []*Output {
|
||||
if x != nil {
|
||||
return x.Txos
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *Outputs) GetExtraTxos() []*Output {
|
||||
if x != nil {
|
||||
return x.ExtraTxos
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *Outputs) GetTotal() uint32 {
|
||||
if x != nil {
|
||||
return x.Total
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *Outputs) GetOffset() uint32 {
|
||||
if x != nil {
|
||||
return x.Offset
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *Outputs) GetBlocked() []*Blocked {
|
||||
if x != nil {
|
||||
return x.Blocked
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *Outputs) GetBlockedTotal() uint32 {
|
||||
if x != nil {
|
||||
return x.BlockedTotal
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type Output struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
TxHash []byte `protobuf:"bytes,1,opt,name=tx_hash,json=txHash,proto3" json:"tx_hash"`
|
||||
Nout uint32 `protobuf:"varint,2,opt,name=nout,proto3" json:"nout"`
|
||||
Height uint32 `protobuf:"varint,3,opt,name=height,proto3" json:"height"`
|
||||
// Types that are assignable to Meta:
|
||||
// *Output_Claim
|
||||
// *Output_Error
|
||||
Meta isOutput_Meta `protobuf_oneof:"meta"`
|
||||
}
|
||||
|
||||
func (x *Output) Reset() {
|
||||
*x = Output{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_result_proto_msgTypes[1]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *Output) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*Output) ProtoMessage() {}
|
||||
|
||||
func (x *Output) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_result_proto_msgTypes[1]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Output.ProtoReflect.Descriptor instead.
|
||||
func (*Output) Descriptor() ([]byte, []int) {
|
||||
return file_result_proto_rawDescGZIP(), []int{1}
|
||||
}
|
||||
|
||||
func (x *Output) GetTxHash() []byte {
|
||||
if x != nil {
|
||||
return x.TxHash
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *Output) GetNout() uint32 {
|
||||
if x != nil {
|
||||
return x.Nout
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *Output) GetHeight() uint32 {
|
||||
if x != nil {
|
||||
return x.Height
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Output) GetMeta() isOutput_Meta {
|
||||
if m != nil {
|
||||
return m.Meta
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *Output) GetClaim() *ClaimMeta {
|
||||
if x, ok := x.GetMeta().(*Output_Claim); ok {
|
||||
return x.Claim
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *Output) GetError() *Error {
|
||||
if x, ok := x.GetMeta().(*Output_Error); ok {
|
||||
return x.Error
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type isOutput_Meta interface {
|
||||
isOutput_Meta()
|
||||
}
|
||||
|
||||
type Output_Claim struct {
|
||||
Claim *ClaimMeta `protobuf:"bytes,7,opt,name=claim,proto3,oneof"`
|
||||
}
|
||||
|
||||
type Output_Error struct {
|
||||
Error *Error `protobuf:"bytes,15,opt,name=error,proto3,oneof"`
|
||||
}
|
||||
|
||||
func (*Output_Claim) isOutput_Meta() {}
|
||||
|
||||
func (*Output_Error) isOutput_Meta() {}
|
||||
|
||||
type ClaimMeta struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Channel *Output `protobuf:"bytes,1,opt,name=channel,proto3" json:"channel"`
|
||||
Repost *Output `protobuf:"bytes,2,opt,name=repost,proto3" json:"repost"`
|
||||
ShortUrl string `protobuf:"bytes,3,opt,name=short_url,json=shortUrl,proto3" json:"short_url"`
|
||||
CanonicalUrl string `protobuf:"bytes,4,opt,name=canonical_url,json=canonicalUrl,proto3" json:"canonical_url"`
|
||||
IsControlling bool `protobuf:"varint,5,opt,name=is_controlling,json=isControlling,proto3" json:"is_controlling"`
|
||||
TakeOverHeight uint32 `protobuf:"varint,6,opt,name=take_over_height,json=takeOverHeight,proto3" json:"take_over_height"`
|
||||
CreationHeight uint32 `protobuf:"varint,7,opt,name=creation_height,json=creationHeight,proto3" json:"creation_height"`
|
||||
ActivationHeight uint32 `protobuf:"varint,8,opt,name=activation_height,json=activationHeight,proto3" json:"activation_height"`
|
||||
ExpirationHeight uint32 `protobuf:"varint,9,opt,name=expiration_height,json=expirationHeight,proto3" json:"expiration_height"`
|
||||
ClaimsInChannel uint32 `protobuf:"varint,10,opt,name=claims_in_channel,json=claimsInChannel,proto3" json:"claims_in_channel"`
|
||||
Reposted uint32 `protobuf:"varint,11,opt,name=reposted,proto3" json:"reposted"`
|
||||
EffectiveAmount uint64 `protobuf:"varint,20,opt,name=effective_amount,json=effectiveAmount,proto3" json:"effective_amount"`
|
||||
SupportAmount uint64 `protobuf:"varint,21,opt,name=support_amount,json=supportAmount,proto3" json:"support_amount"`
|
||||
TrendingGroup uint32 `protobuf:"varint,22,opt,name=trending_group,json=trendingGroup,proto3" json:"trending_group"`
|
||||
TrendingMixed float32 `protobuf:"fixed32,23,opt,name=trending_mixed,json=trendingMixed,proto3" json:"trending_mixed"`
|
||||
TrendingLocal float32 `protobuf:"fixed32,24,opt,name=trending_local,json=trendingLocal,proto3" json:"trending_local"`
|
||||
TrendingGlobal float32 `protobuf:"fixed32,25,opt,name=trending_global,json=trendingGlobal,proto3" json:"trending_global"`
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) Reset() {
|
||||
*x = ClaimMeta{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_result_proto_msgTypes[2]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*ClaimMeta) ProtoMessage() {}
|
||||
|
||||
func (x *ClaimMeta) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_result_proto_msgTypes[2]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use ClaimMeta.ProtoReflect.Descriptor instead.
|
||||
func (*ClaimMeta) Descriptor() ([]byte, []int) {
|
||||
return file_result_proto_rawDescGZIP(), []int{2}
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetChannel() *Output {
|
||||
if x != nil {
|
||||
return x.Channel
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetRepost() *Output {
|
||||
if x != nil {
|
||||
return x.Repost
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetShortUrl() string {
|
||||
if x != nil {
|
||||
return x.ShortUrl
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetCanonicalUrl() string {
|
||||
if x != nil {
|
||||
return x.CanonicalUrl
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetIsControlling() bool {
|
||||
if x != nil {
|
||||
return x.IsControlling
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetTakeOverHeight() uint32 {
|
||||
if x != nil {
|
||||
return x.TakeOverHeight
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetCreationHeight() uint32 {
|
||||
if x != nil {
|
||||
return x.CreationHeight
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetActivationHeight() uint32 {
|
||||
if x != nil {
|
||||
return x.ActivationHeight
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetExpirationHeight() uint32 {
|
||||
if x != nil {
|
||||
return x.ExpirationHeight
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetClaimsInChannel() uint32 {
|
||||
if x != nil {
|
||||
return x.ClaimsInChannel
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetReposted() uint32 {
|
||||
if x != nil {
|
||||
return x.Reposted
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetEffectiveAmount() uint64 {
|
||||
if x != nil {
|
||||
return x.EffectiveAmount
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetSupportAmount() uint64 {
|
||||
if x != nil {
|
||||
return x.SupportAmount
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetTrendingGroup() uint32 {
|
||||
if x != nil {
|
||||
return x.TrendingGroup
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetTrendingMixed() float32 {
|
||||
if x != nil {
|
||||
return x.TrendingMixed
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetTrendingLocal() float32 {
|
||||
if x != nil {
|
||||
return x.TrendingLocal
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ClaimMeta) GetTrendingGlobal() float32 {
|
||||
if x != nil {
|
||||
return x.TrendingGlobal
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type Error struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Code Error_Code `protobuf:"varint,1,opt,name=code,proto3,enum=pb.Error_Code" json:"code"`
|
||||
Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text"`
|
||||
Blocked *Blocked `protobuf:"bytes,3,opt,name=blocked,proto3" json:"blocked"`
|
||||
}
|
||||
|
||||
func (x *Error) Reset() {
|
||||
*x = Error{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_result_proto_msgTypes[3]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *Error) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*Error) ProtoMessage() {}
|
||||
|
||||
func (x *Error) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_result_proto_msgTypes[3]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Error.ProtoReflect.Descriptor instead.
|
||||
func (*Error) Descriptor() ([]byte, []int) {
|
||||
return file_result_proto_rawDescGZIP(), []int{3}
|
||||
}
|
||||
|
||||
func (x *Error) GetCode() Error_Code {
|
||||
if x != nil {
|
||||
return x.Code
|
||||
}
|
||||
return Error_UNKNOWN_CODE
|
||||
}
|
||||
|
||||
func (x *Error) GetText() string {
|
||||
if x != nil {
|
||||
return x.Text
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Error) GetBlocked() *Blocked {
|
||||
if x != nil {
|
||||
return x.Blocked
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type Blocked struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Count uint32 `protobuf:"varint,1,opt,name=count,proto3" json:"count"`
|
||||
Channel *Output `protobuf:"bytes,2,opt,name=channel,proto3" json:"channel"`
|
||||
}
|
||||
|
||||
func (x *Blocked) Reset() {
|
||||
*x = Blocked{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_result_proto_msgTypes[4]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *Blocked) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*Blocked) ProtoMessage() {}
|
||||
|
||||
func (x *Blocked) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_result_proto_msgTypes[4]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Blocked.ProtoReflect.Descriptor instead.
|
||||
func (*Blocked) Descriptor() ([]byte, []int) {
|
||||
return file_result_proto_rawDescGZIP(), []int{4}
|
||||
}
|
||||
|
||||
func (x *Blocked) GetCount() uint32 {
|
||||
if x != nil {
|
||||
return x.Count
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *Blocked) GetChannel() *Output {
|
||||
if x != nil {
|
||||
return x.Channel
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var File_result_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_result_proto_rawDesc = []byte{
|
||||
0x0a, 0x0c, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x02,
|
||||
0x70, 0x62, 0x22, 0xce, 0x01, 0x0a, 0x07, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x12, 0x1e,
|
||||
0x0a, 0x04, 0x74, 0x78, 0x6f, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x70,
|
||||
0x62, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x04, 0x74, 0x78, 0x6f, 0x73, 0x12, 0x29,
|
||||
0x0a, 0x0a, 0x65, 0x78, 0x74, 0x72, 0x61, 0x5f, 0x74, 0x78, 0x6f, 0x73, 0x18, 0x02, 0x20, 0x03,
|
||||
0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x70, 0x62, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x09,
|
||||
0x65, 0x78, 0x74, 0x72, 0x61, 0x54, 0x78, 0x6f, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x74,
|
||||
0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x12,
|
||||
0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52,
|
||||
0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x25, 0x0a, 0x07, 0x62, 0x6c, 0x6f, 0x63, 0x6b,
|
||||
0x65, 0x64, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x70, 0x62, 0x2e, 0x42, 0x6c,
|
||||
0x6f, 0x63, 0x6b, 0x65, 0x64, 0x52, 0x07, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x12, 0x23,
|
||||
0x0a, 0x0d, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18,
|
||||
0x06, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x54, 0x6f,
|
||||
0x74, 0x61, 0x6c, 0x22, 0x9f, 0x01, 0x0a, 0x06, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x17,
|
||||
0x0a, 0x07, 0x74, 0x78, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52,
|
||||
0x06, 0x74, 0x78, 0x48, 0x61, 0x73, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x6f, 0x75, 0x74, 0x18,
|
||||
0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, 0x6e, 0x6f, 0x75, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x68,
|
||||
0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x68, 0x65, 0x69,
|
||||
0x67, 0x68, 0x74, 0x12, 0x25, 0x0a, 0x05, 0x63, 0x6c, 0x61, 0x69, 0x6d, 0x18, 0x07, 0x20, 0x01,
|
||||
0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x70, 0x62, 0x2e, 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x4d, 0x65, 0x74,
|
||||
0x61, 0x48, 0x00, 0x52, 0x05, 0x63, 0x6c, 0x61, 0x69, 0x6d, 0x12, 0x21, 0x0a, 0x05, 0x65, 0x72,
|
||||
0x72, 0x6f, 0x72, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x09, 0x2e, 0x70, 0x62, 0x2e, 0x45,
|
||||
0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x06, 0x0a,
|
||||
0x04, 0x6d, 0x65, 0x74, 0x61, 0x22, 0xa3, 0x05, 0x0a, 0x09, 0x43, 0x6c, 0x61, 0x69, 0x6d, 0x4d,
|
||||
0x65, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x07, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x18, 0x01,
|
||||
0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x70, 0x62, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74,
|
||||
0x52, 0x07, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x12, 0x22, 0x0a, 0x06, 0x72, 0x65, 0x70,
|
||||
0x6f, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x70, 0x62, 0x2e, 0x4f,
|
||||
0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x06, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x74, 0x12, 0x1b, 0x0a,
|
||||
0x09, 0x73, 0x68, 0x6f, 0x72, 0x74, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09,
|
||||
0x52, 0x08, 0x73, 0x68, 0x6f, 0x72, 0x74, 0x55, 0x72, 0x6c, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x61,
|
||||
0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x0c, 0x63, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x55, 0x72, 0x6c, 0x12,
|
||||
0x25, 0x0a, 0x0e, 0x69, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x69, 0x6e,
|
||||
0x67, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x69, 0x73, 0x43, 0x6f, 0x6e, 0x74, 0x72,
|
||||
0x6f, 0x6c, 0x6c, 0x69, 0x6e, 0x67, 0x12, 0x28, 0x0a, 0x10, 0x74, 0x61, 0x6b, 0x65, 0x5f, 0x6f,
|
||||
0x76, 0x65, 0x72, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0d,
|
||||
0x52, 0x0e, 0x74, 0x61, 0x6b, 0x65, 0x4f, 0x76, 0x65, 0x72, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74,
|
||||
0x12, 0x27, 0x0a, 0x0f, 0x63, 0x72, 0x65, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x68, 0x65, 0x69,
|
||||
0x67, 0x68, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0e, 0x63, 0x72, 0x65, 0x61, 0x74,
|
||||
0x69, 0x6f, 0x6e, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x2b, 0x0a, 0x11, 0x61, 0x63, 0x74,
|
||||
0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x08,
|
||||
0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x61, 0x63, 0x74, 0x69, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e,
|
||||
0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x70, 0x69, 0x72, 0x61,
|
||||
0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28,
|
||||
0x0d, 0x52, 0x10, 0x65, 0x78, 0x70, 0x69, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x65, 0x69,
|
||||
0x67, 0x68, 0x74, 0x12, 0x2a, 0x0a, 0x11, 0x63, 0x6c, 0x61, 0x69, 0x6d, 0x73, 0x5f, 0x69, 0x6e,
|
||||
0x5f, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0f,
|
||||
0x63, 0x6c, 0x61, 0x69, 0x6d, 0x73, 0x49, 0x6e, 0x43, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x12,
|
||||
0x1a, 0x0a, 0x08, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x74, 0x65, 0x64, 0x18, 0x0b, 0x20, 0x01, 0x28,
|
||||
0x0d, 0x52, 0x08, 0x72, 0x65, 0x70, 0x6f, 0x73, 0x74, 0x65, 0x64, 0x12, 0x29, 0x0a, 0x10, 0x65,
|
||||
0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18,
|
||||
0x14, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x69, 0x76, 0x65,
|
||||
0x41, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72,
|
||||
0x74, 0x5f, 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x15, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0d,
|
||||
0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x41, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x25, 0x0a,
|
||||
0x0e, 0x74, 0x72, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x18,
|
||||
0x16, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x74, 0x72, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x47,
|
||||
0x72, 0x6f, 0x75, 0x70, 0x12, 0x25, 0x0a, 0x0e, 0x74, 0x72, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67,
|
||||
0x5f, 0x6d, 0x69, 0x78, 0x65, 0x64, 0x18, 0x17, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0d, 0x74, 0x72,
|
||||
0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x4d, 0x69, 0x78, 0x65, 0x64, 0x12, 0x25, 0x0a, 0x0e, 0x74,
|
||||
0x72, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x18, 0x18, 0x20,
|
||||
0x01, 0x28, 0x02, 0x52, 0x0d, 0x74, 0x72, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x4c, 0x6f, 0x63,
|
||||
0x61, 0x6c, 0x12, 0x27, 0x0a, 0x0f, 0x74, 0x72, 0x65, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x67,
|
||||
0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x18, 0x19, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0e, 0x74, 0x72, 0x65,
|
||||
0x6e, 0x64, 0x69, 0x6e, 0x67, 0x47, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x22, 0xa9, 0x01, 0x0a, 0x05,
|
||||
0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x22, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20,
|
||||
0x01, 0x28, 0x0e, 0x32, 0x0e, 0x2e, 0x70, 0x62, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x43,
|
||||
0x6f, 0x64, 0x65, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78,
|
||||
0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x25, 0x0a,
|
||||
0x07, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b,
|
||||
0x2e, 0x70, 0x62, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x52, 0x07, 0x62, 0x6c, 0x6f,
|
||||
0x63, 0x6b, 0x65, 0x64, 0x22, 0x41, 0x0a, 0x04, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x10, 0x0a, 0x0c,
|
||||
0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x5f, 0x43, 0x4f, 0x44, 0x45, 0x10, 0x00, 0x12, 0x0d,
|
||||
0x0a, 0x09, 0x4e, 0x4f, 0x54, 0x5f, 0x46, 0x4f, 0x55, 0x4e, 0x44, 0x10, 0x01, 0x12, 0x0b, 0x0a,
|
||||
0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x42, 0x4c,
|
||||
0x4f, 0x43, 0x4b, 0x45, 0x44, 0x10, 0x03, 0x22, 0x45, 0x0a, 0x07, 0x42, 0x6c, 0x6f, 0x63, 0x6b,
|
||||
0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x0d, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x24, 0x0a, 0x07, 0x63, 0x68, 0x61, 0x6e,
|
||||
0x6e, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x70, 0x62, 0x2e, 0x4f,
|
||||
0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x07, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x42, 0x26,
|
||||
0x5a, 0x24, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6c, 0x62, 0x72,
|
||||
0x79, 0x69, 0x6f, 0x2f, 0x68, 0x75, 0x62, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
|
||||
0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
file_result_proto_rawDescOnce sync.Once
|
||||
file_result_proto_rawDescData = file_result_proto_rawDesc
|
||||
)
|
||||
|
||||
func file_result_proto_rawDescGZIP() []byte {
|
||||
file_result_proto_rawDescOnce.Do(func() {
|
||||
file_result_proto_rawDescData = protoimpl.X.CompressGZIP(file_result_proto_rawDescData)
|
||||
})
|
||||
return file_result_proto_rawDescData
|
||||
}
|
||||
|
||||
var file_result_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
|
||||
var file_result_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
|
||||
var file_result_proto_goTypes = []interface{}{
|
||||
(Error_Code)(0), // 0: pb.Error.Code
|
||||
(*Outputs)(nil), // 1: pb.Outputs
|
||||
(*Output)(nil), // 2: pb.Output
|
||||
(*ClaimMeta)(nil), // 3: pb.ClaimMeta
|
||||
(*Error)(nil), // 4: pb.Error
|
||||
(*Blocked)(nil), // 5: pb.Blocked
|
||||
}
|
||||
var file_result_proto_depIdxs = []int32{
|
||||
2, // 0: pb.Outputs.txos:type_name -> pb.Output
|
||||
2, // 1: pb.Outputs.extra_txos:type_name -> pb.Output
|
||||
5, // 2: pb.Outputs.blocked:type_name -> pb.Blocked
|
||||
3, // 3: pb.Output.claim:type_name -> pb.ClaimMeta
|
||||
4, // 4: pb.Output.error:type_name -> pb.Error
|
||||
2, // 5: pb.ClaimMeta.channel:type_name -> pb.Output
|
||||
2, // 6: pb.ClaimMeta.repost:type_name -> pb.Output
|
||||
0, // 7: pb.Error.code:type_name -> pb.Error.Code
|
||||
5, // 8: pb.Error.blocked:type_name -> pb.Blocked
|
||||
2, // 9: pb.Blocked.channel:type_name -> pb.Output
|
||||
10, // [10:10] is the sub-list for method output_type
|
||||
10, // [10:10] is the sub-list for method input_type
|
||||
10, // [10:10] is the sub-list for extension type_name
|
||||
10, // [10:10] is the sub-list for extension extendee
|
||||
0, // [0:10] is the sub-list for field type_name
|
||||
}
|
||||
|
||||
func init() { file_result_proto_init() }
|
||||
func file_result_proto_init() {
|
||||
if File_result_proto != nil {
|
||||
return
|
||||
}
|
||||
if !protoimpl.UnsafeEnabled {
|
||||
file_result_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*Outputs); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_result_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*Output); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_result_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*ClaimMeta); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_result_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*Error); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_result_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*Blocked); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
file_result_proto_msgTypes[1].OneofWrappers = []interface{}{
|
||||
(*Output_Claim)(nil),
|
||||
(*Output_Error)(nil),
|
||||
}
|
||||
type x struct{}
|
||||
out := protoimpl.TypeBuilder{
|
||||
File: protoimpl.DescBuilder{
|
||||
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||
RawDescriptor: file_result_proto_rawDesc,
|
||||
NumEnums: 1,
|
||||
NumMessages: 5,
|
||||
NumExtensions: 0,
|
||||
NumServices: 0,
|
||||
},
|
||||
GoTypes: file_result_proto_goTypes,
|
||||
DependencyIndexes: file_result_proto_depIdxs,
|
||||
EnumInfos: file_result_proto_enumTypes,
|
||||
MessageInfos: file_result_proto_msgTypes,
|
||||
}.Build()
|
||||
File_result_proto = out.File
|
||||
file_result_proto_rawDesc = nil
|
||||
file_result_proto_goTypes = nil
|
||||
file_result_proto_depIdxs = nil
|
||||
}
|
|
@ -45,6 +45,7 @@ Install Go 1.14+
|
|||
|
||||
- Ubuntu: `sudo add-apt-repository ppa:longsleep/golang-backports && sudo apt install golang-go`
|
||||
- OSX: `brew install go`
|
||||
- Windows https://golang.org/doc/install
|
||||
|
||||
Download `protoc` from https://github.com/protocolbuffers/protobuf/releases and make sure it is
|
||||
executable and in your path.
|
||||
|
@ -65,6 +66,10 @@ Run `./dev.sh` to start the hub. The script will restart the hub as you make cha
|
|||
|
||||
To search, use `go run . search text goes here`.
|
||||
|
||||
#### Windows
|
||||
|
||||
reflex doesn't work on windows, so you'll need to run `go run . serve` and restart manually as you make changes.
|
||||
|
||||
## License
|
||||
|
||||
This project is MIT licensed. For the full license, see [LICENSE](LICENSE).
|
||||
|
|
859
server/search.go
859
server/search.go
|
@ -3,116 +3,833 @@ package server
|
|||
import (
|
||||
"context"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
//"github.com/lbryio/hub/schema"
|
||||
|
||||
"github.com/btcsuite/btcutil/base58"
|
||||
"github.com/golang/protobuf/ptypes/wrappers"
|
||||
pb "github.com/lbryio/hub/protobuf/go"
|
||||
|
||||
"github.com/lbryio/hub/util"
|
||||
"github.com/olivere/elastic/v7"
|
||||
"golang.org/x/text/cases"
|
||||
"golang.org/x/text/language"
|
||||
"gopkg.in/karalabe/cookiejar.v1/collections/deque"
|
||||
)
|
||||
|
||||
const DefaultSearchSize = 1000
|
||||
|
||||
type record struct {
|
||||
Txid string `json:"tx_id"`
|
||||
Nout uint32 `json:"tx_nout"`
|
||||
Txid string `json:"tx_id"`
|
||||
Nout uint32 `json:"tx_nout"`
|
||||
Height uint32 `json:"height"`
|
||||
ClaimId string `json:"claim_id"`
|
||||
ChannelId string `json:"channel_id"`
|
||||
RepostedClaimId string `json:"reposted_claim_id"`
|
||||
CensorType uint32 `json:"censor_type"`
|
||||
CensoringChannelHash string `json:"censoring_channel_hash"`
|
||||
ShortUrl string `json:"short_url"`
|
||||
CanonicalUrl string `json:"canonical_url"`
|
||||
IsControlling bool `json:"is_controlling"`
|
||||
TakeOverHeight uint32 `json:"last_take_over_height"`
|
||||
CreationHeight uint32 `json:"creation_height"`
|
||||
ActivationHeight uint32 `json:"activation_height"`
|
||||
ExpirationHeight uint32 `json:"expiration_height"`
|
||||
ClaimsInChannel uint32 `json:"claims_in_channel"`
|
||||
Reposted uint32 `json:"reposted"`
|
||||
EffectiveAmount uint64 `json:"effective_amount"`
|
||||
SupportAmount uint64 `json:"support_amount"`
|
||||
TrendingGroup uint32 `json:"trending_group"`
|
||||
TrendingMixed float32 `json:"trending_mixed"`
|
||||
TrendingLocal float32 `json:"trending_local"`
|
||||
TrendingGlobal float32 `json:"trending_global"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
func (s *Server) Search(ctx context.Context, in *pb.SearchRequest) (*pb.SearchReply, error) {
|
||||
// TODO: reuse elastic client across requests
|
||||
client, err := elastic.NewClient()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
type orderField struct {
|
||||
Field string
|
||||
IsAsc bool
|
||||
}
|
||||
|
||||
func StrArrToInterface(arr []string) []interface{} {
|
||||
searchVals := make([]interface{}, len(arr))
|
||||
for i := 0; i < len(arr); i++ {
|
||||
searchVals[i] = arr[i]
|
||||
}
|
||||
return searchVals
|
||||
}
|
||||
|
||||
func AddTermsField(q *elastic.BoolQuery, arr []string, name string) *elastic.BoolQuery {
|
||||
if len(arr) == 0 {
|
||||
return q
|
||||
}
|
||||
searchVals := StrArrToInterface(arr)
|
||||
return q.Must(elastic.NewTermsQuery(name, searchVals...))
|
||||
}
|
||||
|
||||
func AddIndividualTermFields(q *elastic.BoolQuery, arr []string, name string, invert bool) *elastic.BoolQuery {
|
||||
for _, x := range arr {
|
||||
if invert {
|
||||
q = q.MustNot(elastic.NewTermQuery(name, x))
|
||||
} else {
|
||||
q = q.Must(elastic.NewTermQuery(name, x))
|
||||
}
|
||||
}
|
||||
return q
|
||||
}
|
||||
|
||||
func AddRangeField(q *elastic.BoolQuery, rq *pb.RangeField, name string) *elastic.BoolQuery {
|
||||
if rq == nil {
|
||||
return q
|
||||
}
|
||||
|
||||
// Ping the Elasticsearch server to get e.g. the version number
|
||||
//_, code, err := client.Ping("http://127.0.0.1:9200").Do(ctx)
|
||||
//if err != nil {
|
||||
// return nil, err
|
||||
//}
|
||||
//if code != 200 {
|
||||
// return nil, errors.New("ping failed")
|
||||
//}
|
||||
if len(rq.Value) > 1 {
|
||||
if rq.Op != pb.RangeField_EQ {
|
||||
return q
|
||||
}
|
||||
return AddTermsField(q, rq.Value, name)
|
||||
}
|
||||
if rq.Op == pb.RangeField_EQ {
|
||||
return q.Must(elastic.NewTermQuery(name, rq.Value[0]))
|
||||
} else if rq.Op == pb.RangeField_LT {
|
||||
return q.Must(elastic.NewRangeQuery(name).Lt(rq.Value[0]))
|
||||
} else if rq.Op == pb.RangeField_LTE {
|
||||
return q.Must(elastic.NewRangeQuery(name).Lte(rq.Value[0]))
|
||||
} else if rq.Op == pb.RangeField_GT {
|
||||
return q.Must(elastic.NewRangeQuery(name).Gt(rq.Value[0]))
|
||||
} else { // pb.RangeField_GTE
|
||||
return q.Must(elastic.NewRangeQuery(name).Gte(rq.Value[0]))
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: support all of this https://github.com/lbryio/lbry-sdk/blob/master/lbry/wallet/server/db/elasticsearch/search.py#L385
|
||||
q := elastic.NewSimpleQueryStringQuery(in.Query).
|
||||
FieldWithBoost("claim_name", 4).
|
||||
FieldWithBoost("channel_name", 8).
|
||||
FieldWithBoost("title", 1).
|
||||
FieldWithBoost("description", 0.5).
|
||||
FieldWithBoost("author", 1).
|
||||
FieldWithBoost("tags", 0.5)
|
||||
func AddInvertibleField(q *elastic.BoolQuery, field *pb.InvertibleField, name string) *elastic.BoolQuery {
|
||||
if field == nil {
|
||||
return q
|
||||
}
|
||||
searchVals := StrArrToInterface(field.Value)
|
||||
if field.Invert {
|
||||
q = q.MustNot(elastic.NewTermsQuery(name, searchVals...))
|
||||
if name == "channel_id.keyword" {
|
||||
q = q.MustNot(elastic.NewTermsQuery("_id", searchVals...))
|
||||
}
|
||||
return q
|
||||
} else {
|
||||
return q.Must(elastic.NewTermsQuery(name, searchVals...))
|
||||
}
|
||||
}
|
||||
|
||||
searchResult, err := client.Search().
|
||||
//Index("twitter"). // search in index "twitter"
|
||||
// Search /*
|
||||
// Search logic is as follows:
|
||||
// 1) Setup query with params given
|
||||
// 2) Do query with limit of 1000
|
||||
// 3) remove blocked content (these are returned separately)
|
||||
// 4) remove duplicates (these are not returned)
|
||||
// 5) limit claims per channel logic
|
||||
// 6) get claims referenced by reposts
|
||||
// 7) get channels references by claims and repost claims
|
||||
// 8) return streams referenced by repost and all channel referenced in extra_txos
|
||||
//*/
|
||||
func (s *Server) Search(ctx context.Context, in *pb.SearchRequest) (*pb.Outputs, error) {
|
||||
var client *elastic.Client = nil
|
||||
if s.EsClient == nil {
|
||||
esUrl := s.Args.EsHost + ":" + s.Args.EsPort
|
||||
tmpClient, err := elastic.NewClient(elastic.SetURL(esUrl), elastic.SetSniff(false))
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return nil, err
|
||||
}
|
||||
client = tmpClient
|
||||
s.EsClient = client
|
||||
} else {
|
||||
client = s.EsClient
|
||||
}
|
||||
|
||||
var from = 0
|
||||
var pageSize = 10
|
||||
var orderBy []orderField
|
||||
var searchIndices = []string{}
|
||||
|
||||
q := elastic.NewBoolQuery()
|
||||
|
||||
q = s.setupEsQuery(q, in, &pageSize, &from, &orderBy)
|
||||
|
||||
if s.Args.Dev && len(in.SearchIndices) == 0 {
|
||||
// If we're running in dev mode ignore the mainnet claims index
|
||||
indices, err := client.IndexNames()
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
var numIndices = len(indices)
|
||||
searchIndices = make([]string, 0, numIndices)
|
||||
for i := 0; i < numIndices; i++ {
|
||||
if indices[i] == "claims" {
|
||||
continue
|
||||
}
|
||||
searchIndices = append(searchIndices, indices[i])
|
||||
}
|
||||
}
|
||||
|
||||
if len(in.SearchIndices) > 0 {
|
||||
searchIndices = in.SearchIndices
|
||||
}
|
||||
|
||||
fsc := elastic.NewFetchSourceContext(true).Exclude("description", "title")
|
||||
search := client.Search().
|
||||
Index(searchIndices...).
|
||||
FetchSourceContext(fsc).
|
||||
Query(q). // specify the query
|
||||
//Sort("user", true). // sort by "user" field, ascending
|
||||
From(0).Size(10). // take documents 0-9
|
||||
//Pretty(true). // pretty print request and response JSON
|
||||
Do(ctx) // execute
|
||||
From(0).Size(DefaultSearchSize)
|
||||
|
||||
|
||||
for _, x := range orderBy {
|
||||
search = search.Sort(x.Field, x.IsAsc)
|
||||
}
|
||||
|
||||
searchResult, err := search.Do(ctx) // execute
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Printf("%s: found %d results in %dms\n", in.Query, len(searchResult.Hits.Hits), searchResult.TookInMillis)
|
||||
log.Printf("%s: found %d results in %dms\n", in.Text, len(searchResult.Hits.Hits), searchResult.TookInMillis)
|
||||
|
||||
txos := make([]*pb.Output, len(searchResult.Hits.Hits))
|
||||
txos, extraTxos, blocked := s.postProcessResults(ctx, client, searchResult, in, pageSize, from, searchIndices)
|
||||
|
||||
if in.NoTotals != nil && !in.NoTotals.Value {
|
||||
return &pb.Outputs{
|
||||
Txos: txos,
|
||||
ExtraTxos: extraTxos,
|
||||
Offset: uint32(int64(from) + searchResult.TotalHits()),
|
||||
Blocked: blocked,
|
||||
}, nil
|
||||
}
|
||||
|
||||
var blockedTotal uint32 = 0
|
||||
for _, b := range blocked {
|
||||
blockedTotal += b.Count
|
||||
}
|
||||
return &pb.Outputs{
|
||||
Txos: txos,
|
||||
ExtraTxos: extraTxos,
|
||||
Total: uint32(searchResult.TotalHits()),
|
||||
Offset: uint32(int64(from) + searchResult.TotalHits()),
|
||||
Blocked: blocked,
|
||||
BlockedTotal: blockedTotal,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *Server) normalizeTag(tag string) string {
|
||||
c := cases.Lower(language.English)
|
||||
res := s.MultiSpaceRe.ReplaceAll(
|
||||
s.WeirdCharsRe.ReplaceAll(
|
||||
[]byte(strings.TrimSpace(strings.Replace(c.String(tag), "'", "", -1))),
|
||||
[]byte(" ")),
|
||||
[]byte(" "))
|
||||
|
||||
return string(res)
|
||||
}
|
||||
|
||||
|
||||
func (s *Server) cleanTags(tags []string) []string {
|
||||
cleanedTags := make([]string, len(tags))
|
||||
for i, tag := range tags {
|
||||
cleanedTags[i] = s.normalizeTag(tag)
|
||||
}
|
||||
return cleanedTags
|
||||
}
|
||||
|
||||
func (s *Server) postProcessResults(
|
||||
ctx context.Context,
|
||||
client *elastic.Client,
|
||||
searchResult *elastic.SearchResult,
|
||||
in *pb.SearchRequest,
|
||||
pageSize int,
|
||||
from int,
|
||||
searchIndices []string) ([]*pb.Output, []*pb.Output, []*pb.Blocked) {
|
||||
var txos []*pb.Output
|
||||
var records []*record
|
||||
var blockedRecords []*record
|
||||
var blocked []*pb.Blocked
|
||||
var blockedMap map[string]*pb.Blocked
|
||||
|
||||
records = make([]*record, 0, searchResult.TotalHits())
|
||||
|
||||
var r record
|
||||
for i, item := range searchResult.Each(reflect.TypeOf(r)) {
|
||||
for _, item := range searchResult.Each(reflect.TypeOf(r)) {
|
||||
if t, ok := item.(record); ok {
|
||||
txos[i] = &pb.Output{
|
||||
TxHash: toHash(t.Txid),
|
||||
Nout: t.Nout,
|
||||
records = append(records, &t)
|
||||
}
|
||||
}
|
||||
|
||||
//printJsonFullResults(searchResult)
|
||||
records, blockedRecords, blockedMap = removeBlocked(records)
|
||||
|
||||
if in.RemoveDuplicates != nil {
|
||||
records = removeDuplicates(records)
|
||||
}
|
||||
|
||||
if in.LimitClaimsPerChannel != nil && in.LimitClaimsPerChannel.Value > 0 {
|
||||
records = searchAhead(records, pageSize, int(in.LimitClaimsPerChannel.Value))
|
||||
}
|
||||
|
||||
finalLength := int(math.Min(float64(len(records)), float64(pageSize)))
|
||||
txos = make([]*pb.Output, 0, finalLength)
|
||||
var j = 0
|
||||
for i := from; i < from + finalLength && i < len(records) && j < finalLength; i++ {
|
||||
t := records[i]
|
||||
res := t.recordToOutput()
|
||||
txos = append(txos, res)
|
||||
j += 1
|
||||
}
|
||||
|
||||
//printJsonFullRecords(blockedRecords)
|
||||
|
||||
//Get claims for reposts
|
||||
repostClaims, repostRecords, repostedMap := getClaimsForReposts(ctx, client, records, searchIndices)
|
||||
//get all unique channels
|
||||
channels, channelMap := getUniqueChannels(append(append(records, repostRecords...), blockedRecords...), client, ctx, searchIndices)
|
||||
//add these to extra txos
|
||||
extraTxos := append(repostClaims, channels...)
|
||||
|
||||
//Fill in channel / repost data for txos and blocked
|
||||
for i, txo := range txos {
|
||||
channel, cOk := channelMap[records[i].ChannelId]
|
||||
repostClaim, rOk := repostedMap[records[i].RepostedClaimId]
|
||||
if cOk {
|
||||
txo.GetClaim().Channel = channel
|
||||
}
|
||||
if rOk {
|
||||
txo.GetClaim().Repost = repostClaim
|
||||
}
|
||||
}
|
||||
|
||||
blocked = make([]*pb.Blocked, 0, len(blockedMap))
|
||||
for k, v := range blockedMap {
|
||||
if channel, ok := channelMap[k]; ok {
|
||||
v.Channel = channel
|
||||
}
|
||||
blocked = append(blocked, v)
|
||||
}
|
||||
|
||||
return txos, extraTxos, blocked
|
||||
}
|
||||
|
||||
func (s *Server) setupEsQuery(
|
||||
q *elastic.BoolQuery,
|
||||
in *pb.SearchRequest,
|
||||
pageSize *int,
|
||||
from *int,
|
||||
orderBy *[]orderField) *elastic.BoolQuery {
|
||||
claimTypes := map[string]int {
|
||||
"stream": 1,
|
||||
"channel": 2,
|
||||
"repost": 3,
|
||||
"collection": 4,
|
||||
}
|
||||
|
||||
streamTypes := map[string]int {
|
||||
"video": 1,
|
||||
"audio": 2,
|
||||
"image": 3,
|
||||
"document": 4,
|
||||
"binary": 5,
|
||||
"model": 6,
|
||||
}
|
||||
|
||||
replacements := map[string]string {
|
||||
"name": "normalized",
|
||||
"txid": "tx_id",
|
||||
"claim_hash": "_id",
|
||||
}
|
||||
|
||||
textFields := map[string]bool {
|
||||
"author": true,
|
||||
"canonical_url": true,
|
||||
"channel_id": true,
|
||||
"claim_name": true,
|
||||
"description": true,
|
||||
"claim_id": true,
|
||||
"media_type": true,
|
||||
"normalized": true,
|
||||
"public_key_bytes": true,
|
||||
"public_key_hash": true,
|
||||
"short_url": true,
|
||||
"signature": true,
|
||||
"signature_digest": true,
|
||||
"stream_type": true,
|
||||
"title": true,
|
||||
"tx_id": true,
|
||||
"fee_currency": true,
|
||||
"reposted_claim_id": true,
|
||||
"tags": true,
|
||||
}
|
||||
|
||||
if in.IsControlling != nil {
|
||||
q = q.Must(elastic.NewTermQuery("is_controlling", in.IsControlling.Value))
|
||||
}
|
||||
|
||||
if in.AmountOrder != nil {
|
||||
in.Limit.Value = 1
|
||||
in.OrderBy = []string{"effective_amount"}
|
||||
in.Offset = &wrappers.Int32Value{Value: in.AmountOrder.Value - 1}
|
||||
}
|
||||
|
||||
if in.Limit != nil {
|
||||
*pageSize = int(in.Limit.Value)
|
||||
}
|
||||
|
||||
if in.Offset != nil {
|
||||
*from = int(in.Offset.Value)
|
||||
}
|
||||
|
||||
if len(in.Name) > 0 {
|
||||
normalized := make([]string, len(in.Name))
|
||||
for i := 0; i < len(in.Name); i++ {
|
||||
normalized[i] = util.Normalize(in.Name[i])
|
||||
}
|
||||
in.Normalized = normalized
|
||||
}
|
||||
|
||||
if len(in.OrderBy) > 0 {
|
||||
for _, x := range in.OrderBy {
|
||||
var toAppend string
|
||||
var isAsc = false
|
||||
if x[0] == '^' {
|
||||
isAsc = true
|
||||
x = x[1:]
|
||||
}
|
||||
if _, ok := replacements[x]; ok {
|
||||
toAppend = replacements[x]
|
||||
} else {
|
||||
toAppend = x
|
||||
}
|
||||
|
||||
if _, ok := textFields[toAppend]; ok {
|
||||
toAppend = toAppend + ".keyword"
|
||||
}
|
||||
*orderBy = append(*orderBy, orderField{toAppend, isAsc})
|
||||
}
|
||||
}
|
||||
|
||||
if len(in.ClaimType) > 0 {
|
||||
searchVals := make([]interface{}, len(in.ClaimType))
|
||||
for i := 0; i < len(in.ClaimType); i++ {
|
||||
searchVals[i] = claimTypes[in.ClaimType[i]]
|
||||
}
|
||||
q = q.Must(elastic.NewTermsQuery("claim_type", searchVals...))
|
||||
}
|
||||
|
||||
if len(in.StreamType) > 0 {
|
||||
searchVals := make([]interface{}, len(in.StreamType))
|
||||
for i := 0; i < len(in.StreamType); i++ {
|
||||
searchVals[i] = streamTypes[in.StreamType[i]]
|
||||
}
|
||||
q = q.Must(elastic.NewTermsQuery("stream_type", searchVals...))
|
||||
}
|
||||
|
||||
if len(in.XId) > 0 {
|
||||
searchVals := make([]interface{}, len(in.XId))
|
||||
for i := 0; i < len(in.XId); i++ {
|
||||
util.ReverseBytes(in.XId[i])
|
||||
searchVals[i] = hex.Dump(in.XId[i])
|
||||
}
|
||||
if len(in.XId) == 1 && len(in.XId[0]) < 20 {
|
||||
q = q.Must(elastic.NewPrefixQuery("_id", string(in.XId[0])))
|
||||
} else {
|
||||
q = q.Must(elastic.NewTermsQuery("_id", searchVals...))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if in.ClaimId != nil {
|
||||
searchVals := StrArrToInterface(in.ClaimId.Value)
|
||||
if len(in.ClaimId.Value) == 1 && len(in.ClaimId.Value[0]) < 20 {
|
||||
if in.ClaimId.Invert {
|
||||
q = q.MustNot(elastic.NewPrefixQuery("claim_id.keyword", in.ClaimId.Value[0]))
|
||||
} else {
|
||||
q = q.Must(elastic.NewPrefixQuery("claim_id.keyword", in.ClaimId.Value[0]))
|
||||
}
|
||||
} else {
|
||||
if in.ClaimId.Invert {
|
||||
q = q.MustNot(elastic.NewTermsQuery("claim_id.keyword", searchVals...))
|
||||
} else {
|
||||
q = q.Must(elastic.NewTermsQuery("claim_id.keyword", searchVals...))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// or if you want more control
|
||||
//for _, hit := range searchResult.Hits.Hits {
|
||||
// // hit.Index contains the name of the index
|
||||
//
|
||||
// var t map[string]interface{} // or could be a Record
|
||||
// err := json.Unmarshal(hit.Source, &t)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
//
|
||||
// for k := range t {
|
||||
// fmt.Println(k)
|
||||
// }
|
||||
// return nil, nil
|
||||
//}
|
||||
if in.PublicKeyId != "" {
|
||||
value := hex.EncodeToString(base58.Decode(in.PublicKeyId)[1:21])
|
||||
q = q.Must(elastic.NewTermQuery("public_key_hash.keyword", value))
|
||||
}
|
||||
|
||||
return &pb.SearchReply{
|
||||
Txos: txos,
|
||||
Total: uint32(searchResult.TotalHits()),
|
||||
}, nil
|
||||
if in.HasChannelSignature != nil && in.HasChannelSignature.Value {
|
||||
q = q.Must(elastic.NewExistsQuery("signature_digest"))
|
||||
if in.SignatureValid != nil {
|
||||
q = q.Must(elastic.NewTermQuery("signature_valid", in.SignatureValid.Value))
|
||||
}
|
||||
} else if in.SignatureValid != nil {
|
||||
q = q.MinimumNumberShouldMatch(1)
|
||||
q = q.Should(elastic.NewBoolQuery().MustNot(elastic.NewExistsQuery("signature_digest")))
|
||||
q = q.Should(elastic.NewTermQuery("signature_valid", in.SignatureValid.Value))
|
||||
}
|
||||
|
||||
if in.HasSource != nil {
|
||||
q = q.MinimumNumberShouldMatch(1)
|
||||
isStreamOrRepost := elastic.NewTermsQuery("claim_type", claimTypes["stream"], claimTypes["repost"])
|
||||
q = q.Should(elastic.NewBoolQuery().Must(isStreamOrRepost, elastic.NewMatchQuery("has_source", in.HasSource.Value)))
|
||||
q = q.Should(elastic.NewBoolQuery().MustNot(isStreamOrRepost))
|
||||
q = q.Should(elastic.NewBoolQuery().Must(elastic.NewTermQuery("reposted_claim_type", claimTypes["channel"])))
|
||||
}
|
||||
|
||||
if in.TxNout != nil {
|
||||
q = q.Must(elastic.NewTermQuery("tx_nout", in.TxNout.Value))
|
||||
}
|
||||
|
||||
q = AddTermsField(q, in.PublicKeyHash, "public_key_hash.keyword")
|
||||
q = AddTermsField(q, in.Author, "author.keyword")
|
||||
q = AddTermsField(q, in.Title, "title.keyword")
|
||||
q = AddTermsField(q, in.CanonicalUrl, "canonical_url.keyword")
|
||||
q = AddTermsField(q, in.ClaimName, "claim_name.keyword")
|
||||
q = AddTermsField(q, in.Description, "description.keyword")
|
||||
q = AddTermsField(q, in.MediaType, "media_type.keyword")
|
||||
q = AddTermsField(q, in.Normalized, "normalized.keyword")
|
||||
q = AddTermsField(q, in.PublicKeyBytes, "public_key_bytes.keyword")
|
||||
q = AddTermsField(q, in.ShortUrl, "short_url.keyword")
|
||||
q = AddTermsField(q, in.Signature, "signature.keyword")
|
||||
q = AddTermsField(q, in.SignatureDigest, "signature_digest.keyword")
|
||||
q = AddTermsField(q, in.TxId, "tx_id.keyword")
|
||||
q = AddTermsField(q, in.FeeCurrency, "fee_currency.keyword")
|
||||
q = AddTermsField(q, in.RepostedClaimId, "reposted_claim_id.keyword")
|
||||
|
||||
|
||||
q = AddTermsField(q, s.cleanTags(in.AnyTags), "tags.keyword")
|
||||
q = AddIndividualTermFields(q, s.cleanTags(in.AllTags), "tags.keyword", false)
|
||||
q = AddIndividualTermFields(q, s.cleanTags(in.NotTags), "tags.keyword", true)
|
||||
q = AddTermsField(q, in.AnyLanguages, "languages")
|
||||
q = AddIndividualTermFields(q, in.AllLanguages, "languages", false)
|
||||
|
||||
q = AddInvertibleField(q, in.ChannelId, "channel_id.keyword")
|
||||
q = AddInvertibleField(q, in.ChannelIds, "channel_id.keyword")
|
||||
|
||||
|
||||
q = AddRangeField(q, in.TxPosition, "tx_position")
|
||||
q = AddRangeField(q, in.Amount, "amount")
|
||||
q = AddRangeField(q, in.Timestamp, "timestamp")
|
||||
q = AddRangeField(q, in.CreationTimestamp, "creation_timestamp")
|
||||
q = AddRangeField(q, in.Height, "height")
|
||||
q = AddRangeField(q, in.CreationHeight, "creation_height")
|
||||
q = AddRangeField(q, in.ActivationHeight, "activation_height")
|
||||
q = AddRangeField(q, in.ExpirationHeight, "expiration_height")
|
||||
q = AddRangeField(q, in.ReleaseTime, "release_time")
|
||||
q = AddRangeField(q, in.Reposted, "reposted")
|
||||
q = AddRangeField(q, in.FeeAmount, "fee_amount")
|
||||
q = AddRangeField(q, in.Duration, "duration")
|
||||
q = AddRangeField(q, in.CensorType, "censor_type")
|
||||
q = AddRangeField(q, in.ChannelJoin, "channel_join")
|
||||
q = AddRangeField(q, in.EffectiveAmount, "effective_amount")
|
||||
q = AddRangeField(q, in.SupportAmount, "support_amount")
|
||||
q = AddRangeField(q, in.TrendingGroup, "trending_group")
|
||||
q = AddRangeField(q, in.TrendingMixed, "trending_mixed")
|
||||
q = AddRangeField(q, in.TrendingLocal, "trending_local")
|
||||
q = AddRangeField(q, in.TrendingGlobal, "trending_global")
|
||||
|
||||
if in.Text != "" {
|
||||
textQuery := elastic.NewSimpleQueryStringQuery(in.Text).
|
||||
FieldWithBoost("claim_name", 4).
|
||||
FieldWithBoost("channel_name", 8).
|
||||
FieldWithBoost("title", 1).
|
||||
FieldWithBoost("description", 0.5).
|
||||
FieldWithBoost("author", 1).
|
||||
FieldWithBoost("tags", 0.5)
|
||||
|
||||
q = q.Must(textQuery)
|
||||
}
|
||||
|
||||
return q
|
||||
}
|
||||
|
||||
// convert txid to txHash
|
||||
func toHash(txid string) []byte {
|
||||
t, err := hex.DecodeString(txid)
|
||||
func getUniqueChannels(records []*record, client *elastic.Client, ctx context.Context, searchIndices []string) ([]*pb.Output, map[string]*pb.Output) {
|
||||
channels := make(map[string]*pb.Output)
|
||||
channelsSet := make(map[string]bool)
|
||||
var mget = client.Mget()
|
||||
var totalChannels = 0
|
||||
for _, r := range records {
|
||||
for _, searchIndex := range searchIndices {
|
||||
if r.ChannelId != "" && !channelsSet[r.ChannelId] {
|
||||
channelsSet[r.ChannelId] = true
|
||||
nmget := elastic.NewMultiGetItem().Id(r.ChannelId).Index(searchIndex)
|
||||
mget = mget.Add(nmget)
|
||||
totalChannels++
|
||||
}
|
||||
if r.CensorType != 0 && !channelsSet[r.CensoringChannelHash] {
|
||||
channelsSet[r.CensoringChannelHash] = true
|
||||
nmget := elastic.NewMultiGetItem().Id(r.CensoringChannelHash).Index(searchIndex)
|
||||
mget = mget.Add(nmget)
|
||||
totalChannels++
|
||||
}
|
||||
}
|
||||
}
|
||||
if totalChannels == 0 {
|
||||
return []*pb.Output{}, make(map[string]*pb.Output)
|
||||
}
|
||||
|
||||
res, err := mget.Do(ctx)
|
||||
if err != nil {
|
||||
return nil
|
||||
log.Println(err)
|
||||
return []*pb.Output{}, make(map[string]*pb.Output)
|
||||
}
|
||||
|
||||
// reverse the bytes. thanks, Satoshi 😒
|
||||
for i, j := 0, len(t)-1; i < j; i, j = i+1, j-1 {
|
||||
t[i], t[j] = t[j], t[i]
|
||||
channelTxos := make([]*pb.Output, totalChannels)
|
||||
//repostedRecords := make([]*record, totalReposted)
|
||||
|
||||
log.Println("total channel", totalChannels)
|
||||
for i, doc := range res.Docs {
|
||||
var r record
|
||||
err := json.Unmarshal(doc.Source, &r)
|
||||
if err != nil {
|
||||
return []*pb.Output{}, make(map[string]*pb.Output)
|
||||
}
|
||||
channelTxos[i] = r.recordToOutput()
|
||||
channels[r.ClaimId] = channelTxos[i]
|
||||
//log.Println(r)
|
||||
//repostedRecords[i] = &r
|
||||
}
|
||||
|
||||
return t
|
||||
return channelTxos, channels
|
||||
}
|
||||
|
||||
// convert txHash to txid
|
||||
func FromHash(txHash []byte) string {
|
||||
t := make([]byte, len(txHash))
|
||||
copy(t, txHash)
|
||||
func getClaimsForReposts(ctx context.Context, client *elastic.Client, records []*record, searchIndices []string) ([]*pb.Output, []*record, map[string]*pb.Output) {
|
||||
|
||||
// reverse the bytes. thanks, Satoshi 😒
|
||||
for i, j := 0, len(txHash)-1; i < j; i, j = i+1, j-1 {
|
||||
txHash[i], txHash[j] = txHash[j], txHash[i]
|
||||
var totalReposted = 0
|
||||
var mget = client.Mget()//.StoredFields("_id")
|
||||
/*
|
||||
var nmget = elastic.NewMultiGetItem()
|
||||
for _, index := range searchIndices {
|
||||
nmget = nmget.Index(index)
|
||||
}
|
||||
*/
|
||||
for _, r := range records {
|
||||
for _, searchIndex := range searchIndices {
|
||||
if r.RepostedClaimId != "" {
|
||||
var nmget = elastic.NewMultiGetItem().Id(r.RepostedClaimId).Index(searchIndex)
|
||||
//nmget = nmget.Id(r.RepostedClaimId)
|
||||
mget = mget.Add(nmget)
|
||||
totalReposted++
|
||||
}
|
||||
}
|
||||
}
|
||||
//mget = mget.Add(nmget)
|
||||
if totalReposted == 0 {
|
||||
return []*pb.Output{}, []*record{}, make(map[string]*pb.Output)
|
||||
}
|
||||
|
||||
return hex.EncodeToString(t)
|
||||
res, err := mget.Do(ctx)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return []*pb.Output{}, []*record{}, make(map[string]*pb.Output)
|
||||
}
|
||||
|
||||
claims := make([]*pb.Output, totalReposted)
|
||||
repostedRecords := make([]*record, totalReposted)
|
||||
respostedMap := make(map[string]*pb.Output)
|
||||
|
||||
log.Println("reposted records", totalReposted)
|
||||
for i, doc := range res.Docs {
|
||||
var r record
|
||||
err := json.Unmarshal(doc.Source, &r)
|
||||
if err != nil {
|
||||
return []*pb.Output{}, []*record{}, make(map[string]*pb.Output)
|
||||
}
|
||||
claims[i] = r.recordToOutput()
|
||||
repostedRecords[i] = &r
|
||||
respostedMap[r.ClaimId] = claims[i]
|
||||
}
|
||||
|
||||
return claims, repostedRecords, respostedMap
|
||||
}
|
||||
|
||||
func searchAhead(searchHits []*record, pageSize int, perChannelPerPage int) []*record {
|
||||
finalHits := make([]*record, 0 , len(searchHits))
|
||||
var channelCounters map[string]int
|
||||
channelCounters = make(map[string]int)
|
||||
nextPageHitsMaybeCheckLater := deque.New()
|
||||
searchHitsQ := deque.New()
|
||||
for _, rec := range searchHits {
|
||||
searchHitsQ.PushRight(rec)
|
||||
}
|
||||
for !searchHitsQ.Empty() || !nextPageHitsMaybeCheckLater.Empty() {
|
||||
if len(finalHits) > 0 && len(finalHits) % pageSize == 0 {
|
||||
channelCounters = make(map[string]int)
|
||||
} else if len(finalHits) != 0 {
|
||||
// means last page was incomplete and we are left with bad replacements
|
||||
break
|
||||
}
|
||||
|
||||
for i := 0; i < nextPageHitsMaybeCheckLater.Size(); i++ {
|
||||
rec := nextPageHitsMaybeCheckLater.PopLeft().(*record)
|
||||
if perChannelPerPage > 0 && channelCounters[rec.ChannelId] < perChannelPerPage {
|
||||
finalHits = append(finalHits, rec)
|
||||
channelCounters[rec.ChannelId] = channelCounters[rec.ChannelId] + 1
|
||||
}
|
||||
}
|
||||
for !searchHitsQ.Empty() {
|
||||
hit := searchHitsQ.PopLeft().(*record)
|
||||
if hit.ChannelId == "" || perChannelPerPage < 0 {
|
||||
finalHits = append(finalHits, hit)
|
||||
} else if channelCounters[hit.ChannelId] < perChannelPerPage {
|
||||
finalHits = append(finalHits, hit)
|
||||
channelCounters[hit.ChannelId] = channelCounters[hit.ChannelId] + 1
|
||||
if len(finalHits) % pageSize == 0 {
|
||||
break
|
||||
}
|
||||
} else {
|
||||
nextPageHitsMaybeCheckLater.PushRight(hit)
|
||||
}
|
||||
}
|
||||
}
|
||||
return finalHits
|
||||
}
|
||||
|
||||
func (r *record) recordToChannelOutput() *pb.Output {
|
||||
// Don't nee dthe meta for this one
|
||||
return &pb.Output{
|
||||
TxHash: util.ToHash(r.Txid),
|
||||
Nout: r.Nout,
|
||||
Height: r.Height,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *record) recordToOutput() *pb.Output {
|
||||
return &pb.Output{
|
||||
TxHash: util.ToHash(r.Txid),
|
||||
Nout: r.Nout,
|
||||
Height: r.Height,
|
||||
Meta: &pb.Output_Claim{
|
||||
Claim: &pb.ClaimMeta{
|
||||
//Channel:
|
||||
//Repost:
|
||||
ShortUrl: r.ShortUrl,
|
||||
CanonicalUrl: r.CanonicalUrl,
|
||||
IsControlling: r.IsControlling,
|
||||
TakeOverHeight: r.TakeOverHeight,
|
||||
CreationHeight: r.CreationHeight,
|
||||
ActivationHeight: r.ActivationHeight,
|
||||
ExpirationHeight: r.ExpirationHeight,
|
||||
ClaimsInChannel: r.ClaimsInChannel,
|
||||
Reposted: r.Reposted,
|
||||
EffectiveAmount: r.EffectiveAmount,
|
||||
SupportAmount: r.SupportAmount,
|
||||
TrendingGroup: r.TrendingGroup,
|
||||
TrendingMixed: r.TrendingMixed,
|
||||
TrendingLocal: r.TrendingLocal,
|
||||
TrendingGlobal: r.TrendingGlobal,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (r *record) getHitId() string {
|
||||
if r.RepostedClaimId != "" {
|
||||
return r.RepostedClaimId
|
||||
} else {
|
||||
return r.ClaimId
|
||||
}
|
||||
}
|
||||
|
||||
func removeDuplicates(searchHits []*record) []*record {
|
||||
dropped := make(map[*record]bool)
|
||||
// claim_id -> (creation_height, hit_id), where hit_id is either reposted claim id or original
|
||||
knownIds := make(map[string]*record)
|
||||
|
||||
for _, hit := range searchHits {
|
||||
hitHeight := hit.Height
|
||||
hitId := hit.getHitId()
|
||||
|
||||
|
||||
if knownIds[hitId] == nil {
|
||||
knownIds[hitId] = hit
|
||||
} else {
|
||||
prevHit := knownIds[hitId]
|
||||
if hitHeight < prevHit.Height {
|
||||
knownIds[hitId] = hit
|
||||
dropped[prevHit] = true
|
||||
} else {
|
||||
dropped[hit] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deduped := make([]*record, len(searchHits) - len(dropped))
|
||||
|
||||
var i = 0
|
||||
for _, hit := range searchHits {
|
||||
if !dropped[hit] {
|
||||
deduped[i] = hit
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
return deduped
|
||||
}
|
||||
|
||||
func removeBlocked(searchHits []*record) ([]*record, []*record, map[string]*pb.Blocked) {
|
||||
newHits := make([]*record, 0, len(searchHits))
|
||||
blockedHits := make([]*record, 0, len(searchHits))
|
||||
blockedChannels := make(map[string]*pb.Blocked)
|
||||
for _, r := range searchHits {
|
||||
if r.CensorType != 0 {
|
||||
if blockedChannels[r.CensoringChannelHash] == nil {
|
||||
blockedObj := &pb.Blocked{
|
||||
Count: 1,
|
||||
Channel: nil,
|
||||
}
|
||||
blockedChannels[r.CensoringChannelHash] = blockedObj
|
||||
blockedHits = append(blockedHits, r)
|
||||
} else {
|
||||
blockedChannels[r.CensoringChannelHash].Count += 1
|
||||
}
|
||||
} else {
|
||||
newHits = append(newHits, r)
|
||||
}
|
||||
}
|
||||
|
||||
return newHits, blockedHits, blockedChannels
|
||||
}
|
||||
|
||||
func printJsonFullRecords(records []*record) {
|
||||
// or if you want more control
|
||||
for _, r := range records {
|
||||
// hit.Index contains the name of the index
|
||||
|
||||
b, err := json.MarshalIndent(r, "", " ")
|
||||
if err != nil {
|
||||
fmt.Println("error:", err)
|
||||
}
|
||||
fmt.Println(string(b))
|
||||
}
|
||||
}
|
||||
|
||||
func printJsonFullResults(searchResult *elastic.SearchResult) {
|
||||
// or if you want more control
|
||||
for _, hit := range searchResult.Hits.Hits {
|
||||
// hit.Index contains the name of the index
|
||||
|
||||
var t map[string]interface{} // or could be a Record
|
||||
err := json.Unmarshal(hit.Source, &t)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
b, err := json.MarshalIndent(t, "", " ")
|
||||
if err != nil {
|
||||
fmt.Println("error:", err)
|
||||
}
|
||||
fmt.Println(string(b))
|
||||
}
|
||||
}
|
|
@ -1,13 +1,32 @@
|
|||
package server
|
||||
|
||||
import (
|
||||
"log"
|
||||
"regexp"
|
||||
|
||||
pb "github.com/lbryio/hub/protobuf/go"
|
||||
"github.com/olivere/elastic/v7"
|
||||
"google.golang.org/grpc"
|
||||
)
|
||||
|
||||
type Server struct {
|
||||
GrpcServer *grpc.Server
|
||||
Args *Args
|
||||
MultiSpaceRe *regexp.Regexp
|
||||
WeirdCharsRe *regexp.Regexp
|
||||
EsClient *elastic.Client
|
||||
pb.UnimplementedHubServer
|
||||
}
|
||||
|
||||
type Args struct {
|
||||
Serve bool
|
||||
Host string
|
||||
Port string
|
||||
EsHost string
|
||||
EsPort string
|
||||
Dev bool
|
||||
}
|
||||
|
||||
/*
|
||||
'blockchain.block.get_chunk'
|
||||
'blockchain.block.get_header'
|
||||
|
@ -46,3 +65,26 @@ type Server struct {
|
|||
'blockchain.address.subscribe'
|
||||
'blockchain.address.unsubscribe'
|
||||
*/
|
||||
|
||||
func MakeHubServer(args *Args) *Server {
|
||||
grpcServer := grpc.NewServer()
|
||||
|
||||
multiSpaceRe, err := regexp.Compile("\\s{2,}")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
weirdCharsRe, err := regexp.Compile("[#!~]")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
s := &Server {
|
||||
GrpcServer: grpcServer,
|
||||
Args: args,
|
||||
MultiSpaceRe: multiSpaceRe,
|
||||
WeirdCharsRe: weirdCharsRe,
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
47
util/util.go
Normal file
47
util/util.go
Normal file
|
@ -0,0 +1,47 @@
|
|||
package util
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"golang.org/x/text/cases"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
)
|
||||
|
||||
func Normalize(s string) string {
|
||||
c := cases.Fold()
|
||||
return c.String(norm.NFD.String(s))
|
||||
}
|
||||
|
||||
func ReverseBytes(s []byte) {
|
||||
for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 {
|
||||
s[i], s[j] = s[j], s[i]
|
||||
}
|
||||
}
|
||||
|
||||
// convert txid to txHash
|
||||
func ToHash(txid string) []byte {
|
||||
t, err := hex.DecodeString(txid)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// reverse the bytes. thanks, Satoshi 😒
|
||||
for i, j := 0, len(t)-1; i < j; i, j = i+1, j-1 {
|
||||
t[i], t[j] = t[j], t[i]
|
||||
}
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
// convert txHash to txid
|
||||
func FromHash(txHash []byte) string {
|
||||
t := make([]byte, len(txHash))
|
||||
copy(t, txHash)
|
||||
|
||||
// reverse the bytes. thanks, Satoshi 😒
|
||||
for i, j := 0, len(txHash)-1; i < j; i, j = i+1, j-1 {
|
||||
txHash[i], txHash[j] = txHash[j], txHash[i]
|
||||
}
|
||||
|
||||
return hex.EncodeToString(t)
|
||||
|
||||
}
|
Loading…
Reference in a new issue