Compare commits
91 commits
conf-lower
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
c2230cdefb | ||
|
1c3a25f82d | ||
|
7c26b80971 | ||
|
3cce89cbac | ||
|
b4377b2f54 | ||
|
3b91279cc7 | ||
|
0817b70083 | ||
|
7b7e6c66ac | ||
|
c7e8d274f7 | ||
|
be45a70c36 | ||
|
d25e03d853 | ||
|
80f77218f9 | ||
|
75c8f82072 | ||
|
38e9af24ec | ||
|
a84e0e0f84 | ||
|
fd05ea1145 | ||
|
c27baa89fe | ||
|
84cafd643f | ||
|
3b6b052000 | ||
|
20f9ccc8c5 | ||
|
08060a71d3 | ||
|
c852697c94 | ||
|
8f12d997ae | ||
|
a6f056821f | ||
|
bd06d1c992 | ||
|
1153711636 | ||
|
e0b6d16c89 | ||
|
dba14460cc | ||
|
45733d2dc4 | ||
|
aee12eba54 | ||
|
a22b4a9162 | ||
|
8138e71668 | ||
|
510f2a5d29 | ||
|
63f2c7e9e0 | ||
|
644e5e8477 | ||
|
cc20088b06 | ||
|
a581425a64 | ||
|
0529fa7d01 | ||
|
464fc88d8e | ||
|
77d499a0a3 | ||
|
a825c6a4b9 | ||
|
220ceefbd2 | ||
|
723026f967 | ||
|
ac69cd6966 | ||
|
6329ef1011 | ||
|
8a631d288f | ||
|
e9a8a3935c | ||
|
8257d459ca | ||
|
6a00c7aa82 | ||
|
c937a6aa68 | ||
|
25eb4f9acd | ||
|
7916d8b7ff | ||
|
c073c8edc9 | ||
|
2fb3b7309c | ||
|
f6474e1abe | ||
|
56dfd77c9b | ||
|
d74d2c2321 | ||
|
b9fe4dceef | ||
|
9d40cdca0f | ||
|
6250cae13c | ||
|
2311fa00e6 | ||
|
c856438d70 | ||
|
c55012176e | ||
|
7dee7cc853 | ||
|
deaa41e36e | ||
|
972d7cb608 | ||
|
852e22bc62 | ||
|
fda095c195 | ||
|
114ba46298 | ||
|
caef26ed4b | ||
|
1fafeac393 | ||
|
6bb8409f09 | ||
|
f74e392cbe | ||
|
99532dda1b | ||
|
e5cee12efe | ||
|
905e394c5f | ||
|
48de893165 | ||
|
2736a58645 | ||
|
0f085f8257 | ||
|
c4e3714d04 | ||
|
84cb1ad5c8 | ||
|
289f4a80f9 | ||
|
ab898f1207 | ||
|
bf2bdcd7a4 | ||
|
f28dc70181 | ||
|
434970647f | ||
|
46b2238838 | ||
|
4f8588a1cd | ||
|
5bf82dbd9a | ||
|
490cfc0b5d | ||
|
9aa9d6c474 |
29 changed files with 1357 additions and 1045 deletions
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
config/conf.yml
|
||||||
|
docker-compose.yml
|
||||||
|
|
25
.travis.yml
25
.travis.yml
|
@ -1,7 +1,28 @@
|
||||||
sudo: required
|
sudo: required
|
||||||
language: python
|
language: python
|
||||||
dist: xenial
|
dist: xenial
|
||||||
python: 3.7
|
python: 3.8
|
||||||
|
|
||||||
|
# for docker-compose
|
||||||
|
services:
|
||||||
|
- docker
|
||||||
|
|
||||||
|
# to avoid "well it works on my computer" moments
|
||||||
|
env:
|
||||||
|
- DOCKER_COMPOSE_VERSION=1.25.4
|
||||||
|
|
||||||
|
before_install:
|
||||||
|
# ensure docker-compose version is as specified above
|
||||||
|
- sudo rm /usr/local/bin/docker-compose
|
||||||
|
- curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose
|
||||||
|
- chmod +x docker-compose
|
||||||
|
- sudo mv docker-compose /usr/local/bin
|
||||||
|
# refresh docker images
|
||||||
|
- sudo apt-get update
|
||||||
|
|
||||||
|
before_script:
|
||||||
|
- docker-compose up -d
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
include:
|
include:
|
||||||
|
@ -9,7 +30,5 @@ jobs:
|
||||||
name: "Unit Tests"
|
name: "Unit Tests"
|
||||||
install:
|
install:
|
||||||
- pip install -e .
|
- pip install -e .
|
||||||
- mkdir database
|
|
||||||
- mkdir logs
|
|
||||||
script:
|
script:
|
||||||
- python -m unittest
|
- python -m unittest
|
||||||
|
|
34
README.md
34
README.md
|
@ -1,42 +1,45 @@
|
||||||
# LBRY Comment Server
|
# LBRY Comment Server
|
||||||
|
|
||||||
|
[![Build Status](https://travis-ci.com/lbryio/comment-server.svg?branch=master)](https://travis-ci.com/lbryio/comment-server)
|
||||||
[![Maintainability](https://api.codeclimate.com/v1/badges/22f420b8b5f2373fd885/maintainability)](https://codeclimate.com/github/lbryio/comment-server/maintainability)
|
[![Maintainability](https://api.codeclimate.com/v1/badges/22f420b8b5f2373fd885/maintainability)](https://codeclimate.com/github/lbryio/comment-server/maintainability)
|
||||||
|
|
||||||
This is the code for the LBRY Comment Server.
|
|
||||||
Fork it, run it, set it on fire. Up to you.
|
|
||||||
|
|
||||||
|
|
||||||
## Before Installing
|
## Before Installing
|
||||||
|
|
||||||
Comment Deletion requires having the [`lbry-sdk`](https://github.com/lbryio/lbry-sdk)
|
Install the [`lbry-sdk`](https://github.com/lbryio/lbry-sdk)
|
||||||
in order to validate & properly delete comments.
|
in order to validate & properly delete comments.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
#### Installing the server:
|
#### Installing the server:
|
||||||
```bash
|
```bash
|
||||||
|
|
||||||
$ git clone https://github.com/osilkin98/comment-server
|
$ git clone https://github.com/lbryio/comment-server
|
||||||
$ cd comment-server
|
$ cd comment-server
|
||||||
|
|
||||||
# create a virtual environment
|
# create a virtual environment
|
||||||
$ virtualenv --python=python3 venv
|
$ virtualenv --python=python3.8 venv
|
||||||
|
|
||||||
# Enter the virtual environment
|
# Enter the virtual environment
|
||||||
$ source venv/bin/activate
|
$ source venv/bin/activate
|
||||||
|
|
||||||
# install the Server as a Executable Target
|
# Install required dependencies
|
||||||
(venv) $ python setup.py develop
|
(venv) $ pip install -e .
|
||||||
|
|
||||||
|
# Run the server
|
||||||
|
(venv) $ python src/main.py \
|
||||||
|
--port=5921 \ # use a different port besides the default
|
||||||
|
--config=conf.yml \ # provide a custom config file
|
||||||
|
& \ # detach and run the service in the background
|
||||||
```
|
```
|
||||||
|
|
||||||
### Installing the systemd Service Monitor
|
### Installing the systemd Service Monitor
|
||||||
|
|
||||||
As a super-user, copy the .service and .target files to the systemd directory:
|
As a super-user, copy the .service and .target files to the systemd directory:
|
||||||
```bash
|
```bash
|
||||||
$ sudo comment-server/config/comment-server* /etc/systemd/system
|
$ sudo cp config/comment-server* /etc/systemd/system
|
||||||
```
|
```
|
||||||
Then `$ sudo systemctl daemon-reload` to refresh the systemd service files.
|
Then `$ sudo systemctl daemon-reload` to refresh the systemd service files.
|
||||||
|
|
||||||
|
@ -69,16 +72,11 @@ To Test the database, simply run:
|
||||||
|
|
||||||
There are basic tests to run against the server, though they require
|
There are basic tests to run against the server, though they require
|
||||||
that there is a server instance running, though the database
|
that there is a server instance running, though the database
|
||||||
chosen may have to be edited in `config/conf.json`.
|
chosen may have to be edited in `config/conf.yml`.
|
||||||
|
|
||||||
Additionally there are HTTP requests that can be send with whatever
|
Additionally there are HTTP requests that can be send with whatever
|
||||||
software you choose to test the integrity of the comment server.
|
software you choose to test the integrity of the comment server.
|
||||||
|
|
||||||
## Schema
|
|
||||||
|
|
||||||
|
|
||||||
![schema](schema.png)
|
|
||||||
|
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
Contributions are welcome, verbosity is encouraged. Please be considerate
|
Contributions are welcome, verbosity is encouraged. Please be considerate
|
||||||
|
@ -109,7 +107,7 @@ For more details, please refer to [lbry.tech/contribute](https://lbry.tech/contr
|
||||||
|
|
||||||
## License
|
## License
|
||||||
This project is licensed by AGPLv3.
|
This project is licensed by AGPLv3.
|
||||||
See [LICENSE](LICENSE.nd) for the full license.
|
See [LICENSE](LICENSE.md) for the full license.
|
||||||
|
|
||||||
## Security
|
## Security
|
||||||
We take security seriously.
|
We take security seriously.
|
||||||
|
|
|
@ -13,5 +13,9 @@
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": 5921,
|
"port": 5921,
|
||||||
"backup_int": 3600,
|
"backup_int": 3600,
|
||||||
"lbrynet": "http://localhost:5279"
|
"lbrynet": "http://localhost:5279",
|
||||||
|
"notifications": {
|
||||||
|
"url": "https://api.lbry.com/event/comment",
|
||||||
|
"auth_token": "token goes here"
|
||||||
|
}
|
||||||
}
|
}
|
31
config/conf.yml
Normal file
31
config/conf.yml
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
---
|
||||||
|
# for running local-tests without using MySQL for now
|
||||||
|
testing:
|
||||||
|
database: sqlite
|
||||||
|
file: comments.db
|
||||||
|
pragmas:
|
||||||
|
journal_mode: wal
|
||||||
|
cache_size: 64000
|
||||||
|
foreign_keys: 0
|
||||||
|
ignore_check_constraints: 1
|
||||||
|
synchronous: 0
|
||||||
|
|
||||||
|
# actual database should be running MySQL
|
||||||
|
production:
|
||||||
|
charset: utf8mb4
|
||||||
|
database: mysql
|
||||||
|
name: social
|
||||||
|
user: lbry
|
||||||
|
password: lbry
|
||||||
|
host: localhost
|
||||||
|
port: 3306
|
||||||
|
|
||||||
|
mode: production
|
||||||
|
logging:
|
||||||
|
format: "%(asctime)s | %(levelname)s | %(name)s | %(module)s.%(funcName)s:%(lineno)d
|
||||||
|
| %(message)s"
|
||||||
|
aiohttp_format: "%(asctime)s | %(levelname)s | %(name)s | %(message)s"
|
||||||
|
datefmt: "%Y-%m-%d %H:%M:%S"
|
||||||
|
host: localhost
|
||||||
|
port: 5921
|
||||||
|
lbrynet: http://localhost:5279
|
25
docker-compose.yml
Normal file
25
docker-compose.yml
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
version: "3.7"
|
||||||
|
services:
|
||||||
|
###########
|
||||||
|
## MySQL ##
|
||||||
|
###########
|
||||||
|
mysql:
|
||||||
|
image: mysql/mysql-server:5.7.27
|
||||||
|
restart: "no"
|
||||||
|
command: --character_set_server=utf8mb4 --max_allowed_packet=1073741824
|
||||||
|
ports:
|
||||||
|
- "3306:3306"
|
||||||
|
environment:
|
||||||
|
- MYSQL_ALLOW_EMPTY_PASSWORD=true
|
||||||
|
- MYSQL_DATABASE=social
|
||||||
|
- MYSQL_USER=lbry
|
||||||
|
- MYSQL_PASSWORD=lbry
|
||||||
|
- MYSQL_LOG_CONSOLE=true
|
||||||
|
#############
|
||||||
|
## Adminer ##
|
||||||
|
#############
|
||||||
|
adminer:
|
||||||
|
image: adminer
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- 8080:8080
|
0
logs/__init__.py
Normal file
0
logs/__init__.py
Normal file
67
scripts/aggregate_comments_by_claim.py
Normal file
67
scripts/aggregate_comments_by_claim.py
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
from src.database.queries import obtain_connection
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
conn = obtain_connection('your_path_here')
|
||||||
|
with conn:
|
||||||
|
curs = conn.cursor()
|
||||||
|
res = curs.execute("""
|
||||||
|
SELECT DISTINCT LbryClaimId claim_id FROM COMMENT;
|
||||||
|
""").fetchall()
|
||||||
|
rows = [tuple(r)[0] for r in res]
|
||||||
|
claims = {cid: {"comments": []} for cid in rows}
|
||||||
|
|
||||||
|
comments = curs.execute("""
|
||||||
|
SELECT channel_name, comment, comment_id, claim_id, timestamp
|
||||||
|
FROM COMMENTS_ON_CLAIMS
|
||||||
|
""").fetchall()
|
||||||
|
comments = [dict(r) for r in comments]
|
||||||
|
while len(comments) > 0:
|
||||||
|
c = comments.pop()
|
||||||
|
cid = c.pop('claim_id')
|
||||||
|
claims[cid]['comments'].append(c)
|
||||||
|
|
||||||
|
lbrynet = 'http://localhost:5279'
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as client:
|
||||||
|
i = 0
|
||||||
|
for cid, data in claims.items():
|
||||||
|
body = {
|
||||||
|
'method': 'claim_search',
|
||||||
|
'params': {
|
||||||
|
'claim_id': cid,
|
||||||
|
'no_totals': True
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async with client.post(lbrynet, json=body) as resp:
|
||||||
|
res = (await resp.json())
|
||||||
|
|
||||||
|
try:
|
||||||
|
res = res['result']['items']
|
||||||
|
print(f'{i} - ok')
|
||||||
|
if res:
|
||||||
|
data.update({
|
||||||
|
'name': res[0]['name'],
|
||||||
|
'permanent_url': res[0]['permanent_url']
|
||||||
|
})
|
||||||
|
except KeyError:
|
||||||
|
print(f'{i}: broke')
|
||||||
|
# await asyncio.sleep(1)
|
||||||
|
i += 1
|
||||||
|
return claims
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
claims = loop.run_until_complete(main())
|
||||||
|
# print(claims)
|
||||||
|
with open('comments_on_claims.json', 'w') as fp:
|
||||||
|
json.dump(claims, fp, indent=2)
|
||||||
|
|
||||||
|
|
|
@ -1,84 +0,0 @@
|
||||||
import sqlite3
|
|
||||||
import time
|
|
||||||
|
|
||||||
import faker
|
|
||||||
from faker.providers import misc
|
|
||||||
|
|
||||||
fake = faker.Faker()
|
|
||||||
fake.add_provider(misc)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
song_time = """One, two, three!
|
|
||||||
My baby don't mess around
|
|
||||||
'Cause she loves me so
|
|
||||||
This I know fo sho!
|
|
||||||
But does she really wanna
|
|
||||||
But can't stand to see me walk out tha door
|
|
||||||
Don't try to fight the feeling
|
|
||||||
Because the thought alone is killin' me right now
|
|
||||||
Thank God for Mom and Dad
|
|
||||||
For sticking to together
|
|
||||||
Like we don't know how
|
|
||||||
Hey ya! Hey ya!
|
|
||||||
Hey ya! Hey ya!
|
|
||||||
Hey ya! Hey ya!
|
|
||||||
Hey ya! Hey ya!
|
|
||||||
You think you've got it
|
|
||||||
Oh, you think you've got it
|
|
||||||
But got it just don't get it when there's nothin' at all
|
|
||||||
We get together
|
|
||||||
Oh, we get together
|
|
||||||
But separate's always better when there's feelings involved
|
|
||||||
Know what they say -its
|
|
||||||
Nothing lasts forever!
|
|
||||||
Then what makes it, then what makes it
|
|
||||||
Then what makes it, then what makes it
|
|
||||||
Then what makes love the exception?
|
|
||||||
So why, oh, why, oh
|
|
||||||
Why, oh, why, oh, why, oh
|
|
||||||
Are we still in denial when we know we're not happy here
|
|
||||||
Hey ya! (y'all don't want to here me, ya just want to dance) Hey ya!
|
|
||||||
Don't want to meet your daddy (oh ohh), just want you in my caddy (oh ohh)
|
|
||||||
Hey ya! (oh, oh!) Hey ya! (oh, oh!)
|
|
||||||
Don't want to meet your momma, just want to make you cum-a (oh, oh!)
|
|
||||||
I'm (oh, oh) I'm (oh, oh) I'm just being honest! (oh, oh)
|
|
||||||
I'm just being honest!
|
|
||||||
Hey! alright now! alright now, fellas!
|
|
||||||
Yea?
|
|
||||||
Now, what cooler than being cool?
|
|
||||||
Ice cold!
|
|
||||||
I can't hear ya! I say what's, what's cooler than being cool?
|
|
||||||
Ice cold!
|
|
||||||
Alright alright alright alright alright alright alright alright alright alright alright alright alright alright alright alright!
|
|
||||||
Okay, now ladies!
|
|
||||||
Yea?
|
|
||||||
Now we gonna break this thang down for just a few seconds
|
|
||||||
Now don't have me break this thang down for nothin'
|
|
||||||
I want to see you on your badest behavior!
|
|
||||||
Lend me some sugar, I am your neighbor!
|
|
||||||
Ah! Here we go now,
|
|
||||||
Shake it, shake it, shake it, shake it, shake it
|
|
||||||
Shake it, shake it, shake it, shake it
|
|
||||||
Shake it like a Polaroid picture! Hey ya!
|
|
||||||
Shake it, shake it, shake it, shake it, shake it
|
|
||||||
Shake it, shake it, shake it, suga!
|
|
||||||
Shake it like a Polaroid picture!
|
|
||||||
Now all the Beyonce's, and Lucy Lu's, and baby dolls
|
|
||||||
Get on tha floor get on tha floor!
|
|
||||||
Shake it like a Polaroid picture!
|
|
||||||
Oh, you! oh, you!
|
|
||||||
Hey ya!(oh, oh) Hey ya!(oh, oh)
|
|
||||||
Hey ya!(oh, oh) Hey ya!(oh, oh)
|
|
||||||
Hey ya!(oh, oh) Hey ya!(oh, oh)"""
|
|
||||||
|
|
||||||
song = song_time.split('\n')
|
|
||||||
claim_id = '2aa106927b733e2602ffb565efaccc78c2ed89df'
|
|
||||||
run_len = [(fake.sha256(), song_time, claim_id, str(int(time.time()))) for k in range(5000)]
|
|
||||||
|
|
||||||
conn = sqlite3.connect('database/default_test.db')
|
|
||||||
with conn:
|
|
||||||
curs = conn.executemany("""
|
|
||||||
INSERT INTO COMMENT(CommentId, Body, LbryClaimId, Timestamp) VALUES (?, ?, ?, ?)
|
|
||||||
""", run_len)
|
|
||||||
print(f'rows changed: {curs.rowcount}')
|
|
|
@ -1,13 +1,15 @@
|
||||||
import binascii
|
import binascii
|
||||||
|
import logging
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
|
# todo: remove sqlite3 as a dependency
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
from src.server.misc import is_signature_valid, get_encoded_signature
|
from src.server.validation import is_signature_valid, get_encoded_signature
|
||||||
from src.server.database import clean
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def request_lbrynet(url, method, **params):
|
async def request_lbrynet(url, method, **params):
|
||||||
|
|
12
setup.py
12
setup.py
|
@ -14,15 +14,17 @@ setup(
|
||||||
data_files=[('config', ['config/conf.json',])],
|
data_files=[('config', ['config/conf.json',])],
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
install_requires=[
|
install_requires=[
|
||||||
|
'pymysql',
|
||||||
|
'pyyaml',
|
||||||
'Faker>=1.0.7',
|
'Faker>=1.0.7',
|
||||||
'asyncio>=3.4.3',
|
'asyncio',
|
||||||
'aiohttp==3.5.4',
|
'aiohttp',
|
||||||
'aiojobs==0.2.2',
|
'aiojobs',
|
||||||
'ecdsa==0.13',
|
'ecdsa>=0.13.3',
|
||||||
'cryptography==2.5',
|
'cryptography==2.5',
|
||||||
'aiosqlite==0.10.0',
|
|
||||||
'PyNaCl>=1.3.0',
|
'PyNaCl>=1.3.0',
|
||||||
'requests',
|
'requests',
|
||||||
'cython',
|
'cython',
|
||||||
|
'peewee'
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,76 +1,50 @@
|
||||||
PRAGMA FOREIGN_KEYS = ON;
|
USE `social`;
|
||||||
|
ALTER DATABASE `social`
|
||||||
|
DEFAULT CHARACTER SET utf8mb4
|
||||||
|
DEFAULT COLLATE utf8mb4_unicode_ci;
|
||||||
|
|
||||||
-- Although I know this file is unnecessary, I like keeping it around.
|
DROP TABLE IF EXISTS `CHANNEL`;
|
||||||
|
CREATE TABLE `CHANNEL` (
|
||||||
|
`claimid` VARCHAR(40) NOT NULL,
|
||||||
|
`name` CHAR(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
|
||||||
|
CONSTRAINT `channel_pk` PRIMARY KEY (`claimid`)
|
||||||
|
)
|
||||||
|
CHARACTER SET utf8mb4
|
||||||
|
COLLATE utf8mb4_unicode_ci;
|
||||||
|
|
||||||
-- I'm not gonna remove it.
|
DROP TABLE IF EXISTS `COMMENT`;
|
||||||
|
CREATE TABLE `COMMENT` (
|
||||||
|
-- should be changed to CHAR(64)
|
||||||
|
`commentid` CHAR(64) NOT NULL,
|
||||||
|
-- should be changed to CHAR(40)
|
||||||
|
`lbryclaimid` CHAR(40) NOT NULL,
|
||||||
|
-- can be null, so idk if this should be char(40)
|
||||||
|
`channelid` CHAR(40) DEFAULT NULL,
|
||||||
|
`body` TEXT
|
||||||
|
CHARACTER SET utf8mb4
|
||||||
|
COLLATE utf8mb4_unicode_ci
|
||||||
|
NOT NULL,
|
||||||
|
`parentid` CHAR(64) DEFAULT NULL,
|
||||||
|
`signature` CHAR(128) DEFAULT NULL,
|
||||||
|
-- 22 chars long is prolly enough
|
||||||
|
`signingts` VARCHAR(22) DEFAULT NULL,
|
||||||
|
|
||||||
-- tables
|
`timestamp` INTEGER NOT NULL,
|
||||||
CREATE TABLE IF NOT EXISTS COMMENT
|
-- there's no way that the timestamp will ever reach 22 characters
|
||||||
(
|
`ishidden` BOOLEAN DEFAULT FALSE,
|
||||||
CommentId TEXT NOT NULL,
|
CONSTRAINT `COMMENT_PRIMARY_KEY` PRIMARY KEY (`commentid`)
|
||||||
LbryClaimId TEXT NOT NULL,
|
-- setting null implies comment is top level
|
||||||
ChannelId TEXT DEFAULT NULL,
|
)
|
||||||
Body TEXT NOT NULL,
|
CHARACTER SET utf8mb4
|
||||||
ParentId TEXT DEFAULT NULL,
|
COLLATE utf8mb4_unicode_ci;
|
||||||
Signature TEXT DEFAULT NULL,
|
|
||||||
Timestamp INTEGER NOT NULL,
|
|
||||||
SigningTs TEXT DEFAULT NULL,
|
|
||||||
IsHidden BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
CONSTRAINT COMMENT_PRIMARY_KEY PRIMARY KEY (CommentId) ON CONFLICT IGNORE,
|
|
||||||
CONSTRAINT COMMENT_SIGNATURE_SK UNIQUE (Signature) ON CONFLICT ABORT,
|
|
||||||
CONSTRAINT COMMENT_CHANNEL_FK FOREIGN KEY (ChannelId) REFERENCES CHANNEL (ClaimId)
|
|
||||||
ON DELETE NO ACTION ON UPDATE NO ACTION,
|
|
||||||
CONSTRAINT COMMENT_PARENT_FK FOREIGN KEY (ParentId) REFERENCES COMMENT (CommentId)
|
|
||||||
ON UPDATE CASCADE ON DELETE NO ACTION -- setting null implies comment is top level
|
|
||||||
);
|
|
||||||
|
|
||||||
-- ALTER TABLE COMMENT ADD COLUMN IsHidden BOOLEAN DEFAULT (FALSE);
|
|
||||||
-- ALTER TABLE COMMENT ADD COLUMN SigningTs TEXT DEFAULT NULL;
|
|
||||||
|
|
||||||
-- DROP TABLE IF EXISTS CHANNEL;
|
|
||||||
CREATE TABLE IF NOT EXISTS CHANNEL
|
|
||||||
(
|
|
||||||
ClaimId TEXT NOT NULL,
|
|
||||||
Name TEXT NOT NULL,
|
|
||||||
CONSTRAINT CHANNEL_PK PRIMARY KEY (ClaimId)
|
|
||||||
ON CONFLICT IGNORE
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
-- indexes
|
ALTER TABLE COMMENT
|
||||||
-- DROP INDEX IF EXISTS COMMENT_CLAIM_INDEX;
|
ADD CONSTRAINT `comment_channel_fk` FOREIGN KEY (`channelid`) REFERENCES `CHANNEL` (`claimid`)
|
||||||
-- CREATE INDEX IF NOT EXISTS CLAIM_COMMENT_INDEX ON COMMENT (LbryClaimId, CommentId);
|
ON DELETE CASCADE ON UPDATE CASCADE,
|
||||||
|
ADD CONSTRAINT `comment_parent_fk` FOREIGN KEY (`parentid`) REFERENCES `COMMENT` (`commentid`)
|
||||||
|
ON UPDATE CASCADE ON DELETE CASCADE
|
||||||
|
;
|
||||||
|
|
||||||
-- CREATE INDEX IF NOT EXISTS CHANNEL_COMMENT_INDEX ON COMMENT (ChannelId, CommentId);
|
CREATE INDEX `claim_comment_index` ON `COMMENT` (`lbryclaimid`, `commentid`);
|
||||||
|
CREATE INDEX `channel_comment_index` ON `COMMENT` (`channelid`, `commentid`);
|
||||||
-- VIEWS
|
|
||||||
CREATE VIEW IF NOT EXISTS COMMENTS_ON_CLAIMS AS SELECT
|
|
||||||
C.CommentId AS comment_id,
|
|
||||||
C.Body AS comment,
|
|
||||||
C.LbryClaimId AS claim_id,
|
|
||||||
C.Timestamp AS timestamp,
|
|
||||||
CHAN.Name AS channel_name,
|
|
||||||
CHAN.ClaimId AS channel_id,
|
|
||||||
('lbry://' || CHAN.Name || '#' || CHAN.ClaimId) AS channel_url,
|
|
||||||
C.Signature AS signature,
|
|
||||||
C.SigningTs AS signing_ts,
|
|
||||||
C.ParentId AS parent_id,
|
|
||||||
C.IsHidden AS is_hidden
|
|
||||||
FROM COMMENT AS C
|
|
||||||
LEFT OUTER JOIN CHANNEL CHAN ON C.ChannelId = CHAN.ClaimId
|
|
||||||
ORDER BY C.Timestamp DESC;
|
|
||||||
|
|
||||||
|
|
||||||
DROP VIEW IF EXISTS COMMENT_REPLIES;
|
|
||||||
CREATE VIEW IF NOT EXISTS COMMENT_REPLIES (Author, CommentBody, ParentAuthor, ParentCommentBody) AS
|
|
||||||
SELECT AUTHOR.Name, OG.Body, PCHAN.Name, PARENT.Body
|
|
||||||
FROM COMMENT AS OG
|
|
||||||
JOIN COMMENT AS PARENT
|
|
||||||
ON OG.ParentId = PARENT.CommentId
|
|
||||||
JOIN CHANNEL AS PCHAN ON PARENT.ChannelId = PCHAN.ClaimId
|
|
||||||
JOIN CHANNEL AS AUTHOR ON OG.ChannelId = AUTHOR.ClaimId
|
|
||||||
ORDER BY OG.Timestamp;
|
|
||||||
|
|
||||||
-- this is the default channel for anyone who wants to publish anonymously
|
|
||||||
-- INSERT INTO CHANNEL
|
|
||||||
-- VALUES ('9cb713f01bf247a0e03170b5ed00d5161340c486', '@Anonymous');
|
|
||||||
|
|
217
src/database/models.py
Normal file
217
src/database/models.py
Normal file
|
@ -0,0 +1,217 @@
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import math
|
||||||
|
import typing
|
||||||
|
|
||||||
|
from peewee import *
|
||||||
|
import nacl.hash
|
||||||
|
|
||||||
|
from src.server.validation import is_valid_base_comment
|
||||||
|
from src.misc import clean
|
||||||
|
|
||||||
|
|
||||||
|
class Channel(Model):
|
||||||
|
claim_id = FixedCharField(column_name='claimid', primary_key=True, max_length=40)
|
||||||
|
name = CharField(column_name='name', max_length=255)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
table_name = 'CHANNEL'
|
||||||
|
|
||||||
|
|
||||||
|
class Comment(Model):
|
||||||
|
comment = TextField(column_name='body')
|
||||||
|
channel = ForeignKeyField(
|
||||||
|
backref='comments',
|
||||||
|
column_name='channelid',
|
||||||
|
field='claim_id',
|
||||||
|
model=Channel,
|
||||||
|
null=True
|
||||||
|
)
|
||||||
|
comment_id = FixedCharField(column_name='commentid', primary_key=True, max_length=64)
|
||||||
|
is_hidden = BooleanField(column_name='ishidden', constraints=[SQL("DEFAULT 0")])
|
||||||
|
claim_id = FixedCharField(max_length=40, column_name='lbryclaimid')
|
||||||
|
parent = ForeignKeyField(
|
||||||
|
column_name='ParentId',
|
||||||
|
field='comment_id',
|
||||||
|
model='self',
|
||||||
|
null=True,
|
||||||
|
backref='replies'
|
||||||
|
)
|
||||||
|
signature = FixedCharField(max_length=128, column_name='signature', null=True, unique=True)
|
||||||
|
signing_ts = TextField(column_name='signingts', null=True)
|
||||||
|
timestamp = IntegerField(column_name='timestamp')
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
table_name = 'COMMENT'
|
||||||
|
indexes = (
|
||||||
|
(('channel', 'comment_id'), False),
|
||||||
|
(('claim_id', 'comment_id'), False),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
FIELDS = {
|
||||||
|
'comment': Comment.comment,
|
||||||
|
'comment_id': Comment.comment_id,
|
||||||
|
'claim_id': Comment.claim_id,
|
||||||
|
'timestamp': Comment.timestamp,
|
||||||
|
'signature': Comment.signature,
|
||||||
|
'signing_ts': Comment.signing_ts,
|
||||||
|
'is_hidden': Comment.is_hidden,
|
||||||
|
'parent_id': Comment.parent.alias('parent_id'),
|
||||||
|
'channel_id': Channel.claim_id.alias('channel_id'),
|
||||||
|
'channel_name': Channel.name.alias('channel_name'),
|
||||||
|
'channel_url': ('lbry://' + Channel.name + '#' + Channel.claim_id).alias('channel_url')
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def comment_list(claim_id: str = None, parent_id: str = None,
|
||||||
|
top_level: bool = False, exclude_mode: str = None,
|
||||||
|
page: int = 1, page_size: int = 50, expressions=None,
|
||||||
|
select_fields: list = None, exclude_fields: list = None) -> dict:
|
||||||
|
fields = FIELDS.keys()
|
||||||
|
if exclude_fields:
|
||||||
|
fields -= set(exclude_fields)
|
||||||
|
if select_fields:
|
||||||
|
fields &= set(select_fields)
|
||||||
|
attributes = [FIELDS[field] for field in fields]
|
||||||
|
query = Comment.select(*attributes)
|
||||||
|
|
||||||
|
# todo: allow this process to be more automated, so it can just be an expression
|
||||||
|
if claim_id:
|
||||||
|
query = query.where(Comment.claim_id == claim_id)
|
||||||
|
if top_level:
|
||||||
|
query = query.where(Comment.parent.is_null())
|
||||||
|
|
||||||
|
if parent_id:
|
||||||
|
query = query.where(Comment.ParentId == parent_id)
|
||||||
|
|
||||||
|
if exclude_mode:
|
||||||
|
show_hidden = exclude_mode.lower() == 'hidden'
|
||||||
|
query = query.where((Comment.is_hidden == show_hidden))
|
||||||
|
|
||||||
|
if expressions:
|
||||||
|
query = query.where(expressions)
|
||||||
|
|
||||||
|
total = query.count()
|
||||||
|
query = (query
|
||||||
|
.join(Channel, JOIN.LEFT_OUTER)
|
||||||
|
.order_by(Comment.timestamp.desc())
|
||||||
|
.paginate(page, page_size))
|
||||||
|
items = [clean(item) for item in query.dicts()]
|
||||||
|
# has_hidden_comments is deprecated
|
||||||
|
data = {
|
||||||
|
'page': page,
|
||||||
|
'page_size': page_size,
|
||||||
|
'total_pages': math.ceil(total / page_size),
|
||||||
|
'total_items': total,
|
||||||
|
'items': items,
|
||||||
|
'has_hidden_comments': exclude_mode is not None and exclude_mode == 'hidden',
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def get_comment(comment_id: str) -> dict:
|
||||||
|
try:
|
||||||
|
comment = comment_list(expressions=(Comment.comment_id == comment_id), page_size=1).get('items').pop()
|
||||||
|
except IndexError:
|
||||||
|
raise ValueError(f'Comment does not exist with id {comment_id}')
|
||||||
|
else:
|
||||||
|
return comment
|
||||||
|
|
||||||
|
|
||||||
|
def create_comment_id(comment: str, channel_id: str, timestamp: int):
|
||||||
|
# We convert the timestamp from seconds into minutes
|
||||||
|
# to prevent spammers from commenting the same BS everywhere.
|
||||||
|
nearest_minute = str(math.floor(timestamp / 60))
|
||||||
|
|
||||||
|
# don't use claim_id for the comment_id anymore so comments
|
||||||
|
# are not unique to just one claim
|
||||||
|
prehash = b':'.join([
|
||||||
|
comment.encode(),
|
||||||
|
channel_id.encode(),
|
||||||
|
nearest_minute.encode()
|
||||||
|
])
|
||||||
|
return nacl.hash.sha256(prehash).decode()
|
||||||
|
|
||||||
|
|
||||||
|
def create_comment(comment: str = None, claim_id: str = None,
|
||||||
|
parent_id: str = None, channel_id: str = None,
|
||||||
|
channel_name: str = None, signature: str = None,
|
||||||
|
signing_ts: str = None) -> dict:
|
||||||
|
if not is_valid_base_comment(
|
||||||
|
comment=comment,
|
||||||
|
claim_id=claim_id,
|
||||||
|
parent_id=parent_id,
|
||||||
|
channel_id=channel_id,
|
||||||
|
channel_name=channel_name,
|
||||||
|
signature=signature,
|
||||||
|
signing_ts=signing_ts
|
||||||
|
):
|
||||||
|
raise ValueError('Invalid Parameters given for comment')
|
||||||
|
|
||||||
|
channel, _ = Channel.get_or_create(name=channel_name, claim_id=channel_id)
|
||||||
|
if parent_id and not claim_id:
|
||||||
|
parent: Comment = Comment.get_by_id(parent_id)
|
||||||
|
claim_id = parent.claim_id
|
||||||
|
|
||||||
|
timestamp = int(time.time())
|
||||||
|
comment_id = create_comment_id(comment, channel_id, timestamp)
|
||||||
|
new_comment = Comment.create(
|
||||||
|
claim_id=claim_id,
|
||||||
|
comment_id=comment_id,
|
||||||
|
comment=comment,
|
||||||
|
parent=parent_id,
|
||||||
|
channel=channel,
|
||||||
|
signature=signature,
|
||||||
|
signing_ts=signing_ts,
|
||||||
|
timestamp=timestamp
|
||||||
|
)
|
||||||
|
return get_comment(new_comment.comment_id)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_comment(comment_id: str) -> bool:
|
||||||
|
try:
|
||||||
|
comment: Comment = Comment.get_by_id(comment_id)
|
||||||
|
except DoesNotExist as e:
|
||||||
|
raise ValueError from e
|
||||||
|
else:
|
||||||
|
return 0 < comment.delete_instance(True, delete_nullable=True)
|
||||||
|
|
||||||
|
|
||||||
|
def edit_comment(comment_id: str, new_comment: str, new_sig: str, new_ts: str) -> bool:
|
||||||
|
try:
|
||||||
|
comment: Comment = Comment.get_by_id(comment_id)
|
||||||
|
except DoesNotExist as e:
|
||||||
|
raise ValueError from e
|
||||||
|
else:
|
||||||
|
comment.comment = new_comment
|
||||||
|
comment.signature = new_sig
|
||||||
|
comment.signing_ts = new_ts
|
||||||
|
|
||||||
|
# todo: add a 'last-modified' timestamp
|
||||||
|
comment.timestamp = int(time.time())
|
||||||
|
return comment.save() > 0
|
||||||
|
|
||||||
|
|
||||||
|
def set_hidden_flag(comment_ids: typing.List[str], hidden=True) -> bool:
|
||||||
|
# sets `is_hidden` flag for all `comment_ids` to the `hidden` param
|
||||||
|
update = (Comment
|
||||||
|
.update(is_hidden=hidden)
|
||||||
|
.where(Comment.comment_id.in_(comment_ids)))
|
||||||
|
return update.execute() > 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
logger = logging.getLogger('peewee')
|
||||||
|
logger.addHandler(logging.StreamHandler())
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
comments = comment_list(
|
||||||
|
page_size=20,
|
||||||
|
expressions=((Comment.timestamp < 1583272089) &
|
||||||
|
(Comment.claim_id ** '420%'))
|
||||||
|
)
|
||||||
|
|
||||||
|
print(json.dumps(comments, indent=4))
|
|
@ -1,243 +0,0 @@
|
||||||
import atexit
|
|
||||||
import logging
|
|
||||||
import sqlite3
|
|
||||||
import time
|
|
||||||
import typing
|
|
||||||
|
|
||||||
import math
|
|
||||||
import nacl.hash
|
|
||||||
|
|
||||||
from src.database.schema import CREATE_TABLES_QUERY
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
SELECT_COMMENTS_ON_CLAIMS = """
|
|
||||||
SELECT comment, comment_id, channel_name, channel_id, channel_url,
|
|
||||||
timestamp, signature, signing_ts, parent_id, is_hidden
|
|
||||||
FROM COMMENTS_ON_CLAIMS
|
|
||||||
"""
|
|
||||||
|
|
||||||
SELECT_COMMENTS_ON_CLAIMS_CLAIMID = """
|
|
||||||
SELECT comment, comment_id, claim_id, channel_name, channel_id, channel_url,
|
|
||||||
timestamp, signature, signing_ts, parent_id, is_hidden
|
|
||||||
FROM COMMENTS_ON_CLAIMS
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def clean(thing: dict) -> dict:
|
|
||||||
if 'is_hidden' in thing:
|
|
||||||
thing.update({'is_hidden': bool(thing['is_hidden'])})
|
|
||||||
return {k: v for k, v in thing.items() if v is not None}
|
|
||||||
|
|
||||||
|
|
||||||
def obtain_connection(filepath: str = None, row_factory: bool = True):
|
|
||||||
connection = sqlite3.connect(filepath)
|
|
||||||
if row_factory:
|
|
||||||
connection.row_factory = sqlite3.Row
|
|
||||||
return connection
|
|
||||||
|
|
||||||
|
|
||||||
def get_claim_comments(conn: sqlite3.Connection, claim_id: str, parent_id: str = None,
|
|
||||||
page: int = 1, page_size: int = 50, top_level=False):
|
|
||||||
with conn:
|
|
||||||
if top_level:
|
|
||||||
results = [clean(dict(row)) for row in conn.execute(
|
|
||||||
SELECT_COMMENTS_ON_CLAIMS + " WHERE claim_id = ? AND parent_id IS NULL LIMIT ? OFFSET ?",
|
|
||||||
(claim_id, page_size, page_size * (page - 1))
|
|
||||||
)]
|
|
||||||
count = conn.execute(
|
|
||||||
"SELECT COUNT(*) FROM COMMENTS_ON_CLAIMS WHERE claim_id = ? AND parent_id IS NULL",
|
|
||||||
(claim_id,)
|
|
||||||
)
|
|
||||||
elif parent_id is None:
|
|
||||||
results = [clean(dict(row)) for row in conn.execute(
|
|
||||||
SELECT_COMMENTS_ON_CLAIMS + "WHERE claim_id = ? LIMIT ? OFFSET ? ",
|
|
||||||
(claim_id, page_size, page_size * (page - 1))
|
|
||||||
)]
|
|
||||||
count = conn.execute(
|
|
||||||
"SELECT COUNT(*) FROM COMMENTS_ON_CLAIMS WHERE claim_id = ?",
|
|
||||||
(claim_id,)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
results = [clean(dict(row)) for row in conn.execute(
|
|
||||||
SELECT_COMMENTS_ON_CLAIMS + "WHERE claim_id = ? AND parent_id = ? LIMIT ? OFFSET ? ",
|
|
||||||
(claim_id, parent_id, page_size, page_size * (page - 1))
|
|
||||||
)]
|
|
||||||
count = conn.execute(
|
|
||||||
"SELECT COUNT(*) FROM COMMENTS_ON_CLAIMS WHERE claim_id = ? AND parent_id = ?",
|
|
||||||
(claim_id, parent_id)
|
|
||||||
)
|
|
||||||
count = tuple(count.fetchone())[0]
|
|
||||||
return {
|
|
||||||
'items': results,
|
|
||||||
'page': page,
|
|
||||||
'page_size': page_size,
|
|
||||||
'total_pages': math.ceil(count / page_size),
|
|
||||||
'total_items': count,
|
|
||||||
'has_hidden_comments': claim_has_hidden_comments(conn, claim_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_claim_hidden_comments(conn: sqlite3.Connection, claim_id: str, hidden=True, page=1, page_size=50):
|
|
||||||
with conn:
|
|
||||||
results = conn.execute(
|
|
||||||
SELECT_COMMENTS_ON_CLAIMS + "WHERE claim_id = ? AND is_hidden IS ? LIMIT ? OFFSET ?",
|
|
||||||
(claim_id, hidden, page_size, page_size * (page - 1))
|
|
||||||
)
|
|
||||||
count = conn.execute(
|
|
||||||
"SELECT COUNT(*) FROM COMMENTS_ON_CLAIMS WHERE claim_id = ? AND is_hidden IS ?", (claim_id, hidden)
|
|
||||||
)
|
|
||||||
results = [clean(dict(row)) for row in results.fetchall()]
|
|
||||||
count = tuple(count.fetchone())[0]
|
|
||||||
|
|
||||||
return {
|
|
||||||
'items': results,
|
|
||||||
'page': page,
|
|
||||||
'page_size': page_size,
|
|
||||||
'total_pages': math.ceil(count/page_size),
|
|
||||||
'total_items': count,
|
|
||||||
'has_hidden_comments': claim_has_hidden_comments(conn, claim_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def claim_has_hidden_comments(conn, claim_id):
|
|
||||||
with conn:
|
|
||||||
result = conn.execute(
|
|
||||||
"SELECT COUNT(DISTINCT is_hidden) FROM COMMENTS_ON_CLAIMS WHERE claim_id = ? AND is_hidden IS 1",
|
|
||||||
(claim_id,)
|
|
||||||
)
|
|
||||||
return bool(tuple(result.fetchone())[0])
|
|
||||||
|
|
||||||
|
|
||||||
def insert_comment(conn: sqlite3.Connection, claim_id: str, comment: str, parent_id: str = None,
|
|
||||||
channel_id: str = None, signature: str = None, signing_ts: str = None) -> str:
|
|
||||||
timestamp = int(time.time())
|
|
||||||
prehash = b':'.join((claim_id.encode(), comment.encode(), str(timestamp).encode(),))
|
|
||||||
comment_id = nacl.hash.sha256(prehash).decode()
|
|
||||||
with conn:
|
|
||||||
conn.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO COMMENT(CommentId, LbryClaimId, ChannelId, Body, ParentId,
|
|
||||||
Timestamp, Signature, SigningTs)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
||||||
""",
|
|
||||||
(comment_id, claim_id, channel_id, comment, parent_id, timestamp, signature, signing_ts)
|
|
||||||
)
|
|
||||||
logging.info('Inserted Comment into DB, `comment_id`: %s', comment_id)
|
|
||||||
return comment_id
|
|
||||||
|
|
||||||
|
|
||||||
def get_comment_or_none(conn: sqlite3.Connection, comment_id: str) -> dict:
|
|
||||||
with conn:
|
|
||||||
curry = conn.execute(SELECT_COMMENTS_ON_CLAIMS_CLAIMID + "WHERE comment_id = ?", (comment_id,))
|
|
||||||
thing = curry.fetchone()
|
|
||||||
return clean(dict(thing)) if thing else None
|
|
||||||
|
|
||||||
|
|
||||||
def get_comment_ids(conn: sqlite3.Connection, claim_id: str, parent_id: str = None, page=1, page_size=50):
|
|
||||||
""" Just return a list of the comment IDs that are associated with the given claim_id.
|
|
||||||
If get_all is specified then it returns all the IDs, otherwise only the IDs at that level.
|
|
||||||
if parent_id is left null then it only returns the top level comments.
|
|
||||||
|
|
||||||
For pagination the parameters are:
|
|
||||||
get_all XOR (page_size + page)
|
|
||||||
"""
|
|
||||||
with conn:
|
|
||||||
if parent_id is None:
|
|
||||||
curs = conn.execute("""
|
|
||||||
SELECT comment_id FROM COMMENTS_ON_CLAIMS
|
|
||||||
WHERE claim_id = ? AND parent_id IS NULL LIMIT ? OFFSET ?
|
|
||||||
""", (claim_id, page_size, page_size*abs(page - 1),)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
curs = conn.execute("""
|
|
||||||
SELECT comment_id FROM COMMENTS_ON_CLAIMS
|
|
||||||
WHERE claim_id = ? AND parent_id = ? LIMIT ? OFFSET ?
|
|
||||||
""", (claim_id, parent_id, page_size, page_size * abs(page - 1),)
|
|
||||||
)
|
|
||||||
return [tuple(row)[0] for row in curs.fetchall()]
|
|
||||||
|
|
||||||
|
|
||||||
def get_comments_by_id(conn, comment_ids: typing.Union[list, tuple]) -> typing.Union[list, None]:
|
|
||||||
""" Returns a list containing the comment data associated with each ID within the list"""
|
|
||||||
# format the input, under the assumption that the
|
|
||||||
placeholders = ', '.join('?' for _ in comment_ids)
|
|
||||||
with conn:
|
|
||||||
return [clean(dict(row)) for row in conn.execute(
|
|
||||||
SELECT_COMMENTS_ON_CLAIMS_CLAIMID + f'WHERE comment_id IN ({placeholders})',
|
|
||||||
tuple(comment_ids)
|
|
||||||
)]
|
|
||||||
|
|
||||||
|
|
||||||
def delete_comment_by_id(conn: sqlite3.Connection, comment_id: str):
|
|
||||||
with conn:
|
|
||||||
curs = conn.execute("DELETE FROM COMMENT WHERE CommentId = ?", (comment_id,))
|
|
||||||
return bool(curs.rowcount)
|
|
||||||
|
|
||||||
|
|
||||||
def insert_channel(conn: sqlite3.Connection, channel_name: str, channel_id: str):
|
|
||||||
with conn:
|
|
||||||
curs = conn.execute('INSERT INTO CHANNEL(ClaimId, Name) VALUES (?, ?)', (channel_id, channel_name))
|
|
||||||
return bool(curs.rowcount)
|
|
||||||
|
|
||||||
|
|
||||||
def get_channel_id_from_comment_id(conn: sqlite3.Connection, comment_id: str):
|
|
||||||
with conn:
|
|
||||||
channel = conn.execute(
|
|
||||||
"SELECT channel_id, channel_name FROM COMMENTS_ON_CLAIMS WHERE comment_id = ?", (comment_id,)
|
|
||||||
).fetchone()
|
|
||||||
return dict(channel) if channel else {}
|
|
||||||
|
|
||||||
|
|
||||||
def get_claim_ids_from_comment_ids(conn: sqlite3.Connection, comment_ids: list):
|
|
||||||
with conn:
|
|
||||||
cids = conn.execute(
|
|
||||||
f""" SELECT CommentId as comment_id, LbryClaimId AS claim_id FROM COMMENT
|
|
||||||
WHERE CommentId IN ({', '.join('?' for _ in comment_ids)}) """,
|
|
||||||
tuple(comment_ids)
|
|
||||||
)
|
|
||||||
return {row['comment_id']: row['claim_id'] for row in cids.fetchall()}
|
|
||||||
|
|
||||||
|
|
||||||
def hide_comments_by_id(conn: sqlite3.Connection, comment_ids: list):
|
|
||||||
with conn:
|
|
||||||
curs = conn.cursor()
|
|
||||||
curs.executemany(
|
|
||||||
"UPDATE COMMENT SET IsHidden = 1 WHERE CommentId = ?",
|
|
||||||
[[c] for c in comment_ids]
|
|
||||||
)
|
|
||||||
return bool(curs.rowcount)
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseWriter(object):
|
|
||||||
_writer = None
|
|
||||||
|
|
||||||
def __init__(self, db_file):
|
|
||||||
if not DatabaseWriter._writer:
|
|
||||||
self.conn = obtain_connection(db_file)
|
|
||||||
DatabaseWriter._writer = self
|
|
||||||
atexit.register(self.cleanup)
|
|
||||||
logging.info('Database writer has been created at %s', repr(self))
|
|
||||||
else:
|
|
||||||
logging.warning('Someone attempted to insantiate DatabaseWriter')
|
|
||||||
raise TypeError('Database Writer already exists!')
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
logging.info('Cleaning up database writer')
|
|
||||||
self.conn.close()
|
|
||||||
DatabaseWriter._writer = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def connection(self):
|
|
||||||
return self.conn
|
|
||||||
|
|
||||||
|
|
||||||
def setup_database(db_path):
|
|
||||||
with sqlite3.connect(db_path) as conn:
|
|
||||||
conn.executescript(CREATE_TABLES_QUERY)
|
|
||||||
|
|
||||||
|
|
||||||
def backup_database(conn: sqlite3.Connection, back_fp):
|
|
||||||
with sqlite3.connect(back_fp) as back:
|
|
||||||
conn.backup(back)
|
|
|
@ -1,77 +0,0 @@
|
||||||
PRAGMAS = """
|
|
||||||
PRAGMA FOREIGN_KEYS = ON;
|
|
||||||
"""
|
|
||||||
|
|
||||||
CREATE_COMMENT_TABLE = """
|
|
||||||
CREATE TABLE IF NOT EXISTS COMMENT (
|
|
||||||
CommentId TEXT NOT NULL,
|
|
||||||
LbryClaimId TEXT NOT NULL,
|
|
||||||
ChannelId TEXT DEFAULT NULL,
|
|
||||||
Body TEXT NOT NULL,
|
|
||||||
ParentId TEXT DEFAULT NULL,
|
|
||||||
Signature TEXT DEFAULT NULL,
|
|
||||||
Timestamp INTEGER NOT NULL,
|
|
||||||
SigningTs TEXT DEFAULT NULL,
|
|
||||||
IsHidden BOOLEAN NOT NULL DEFAULT 0,
|
|
||||||
CONSTRAINT COMMENT_PRIMARY_KEY PRIMARY KEY (CommentId) ON CONFLICT IGNORE,
|
|
||||||
CONSTRAINT COMMENT_SIGNATURE_SK UNIQUE (Signature) ON CONFLICT ABORT,
|
|
||||||
CONSTRAINT COMMENT_CHANNEL_FK FOREIGN KEY (ChannelId) REFERENCES CHANNEL (ClaimId)
|
|
||||||
ON DELETE NO ACTION ON UPDATE NO ACTION,
|
|
||||||
CONSTRAINT COMMENT_PARENT_FK FOREIGN KEY (ParentId) REFERENCES COMMENT (CommentId)
|
|
||||||
ON UPDATE CASCADE ON DELETE NO ACTION -- setting null implies comment is top level
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
CREATE_COMMENT_INDEXES = """
|
|
||||||
CREATE INDEX IF NOT EXISTS CLAIM_COMMENT_INDEX ON COMMENT (LbryClaimId, CommentId);
|
|
||||||
CREATE INDEX IF NOT EXISTS CHANNEL_COMMENT_INDEX ON COMMENT (ChannelId, CommentId);
|
|
||||||
"""
|
|
||||||
|
|
||||||
CREATE_CHANNEL_TABLE = """
|
|
||||||
CREATE TABLE IF NOT EXISTS CHANNEL (
|
|
||||||
ClaimId TEXT NOT NULL,
|
|
||||||
Name TEXT NOT NULL,
|
|
||||||
CONSTRAINT CHANNEL_PK PRIMARY KEY (ClaimId)
|
|
||||||
ON CONFLICT IGNORE
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
CREATE_COMMENTS_ON_CLAIMS_VIEW = """
|
|
||||||
CREATE VIEW IF NOT EXISTS COMMENTS_ON_CLAIMS AS SELECT
|
|
||||||
C.CommentId AS comment_id,
|
|
||||||
C.Body AS comment,
|
|
||||||
C.LbryClaimId AS claim_id,
|
|
||||||
C.Timestamp AS timestamp,
|
|
||||||
CHAN.Name AS channel_name,
|
|
||||||
CHAN.ClaimId AS channel_id,
|
|
||||||
('lbry://' || CHAN.Name || '#' || CHAN.ClaimId) AS channel_url,
|
|
||||||
C.Signature AS signature,
|
|
||||||
C.SigningTs AS signing_ts,
|
|
||||||
C.ParentId AS parent_id,
|
|
||||||
C.IsHidden AS is_hidden
|
|
||||||
FROM COMMENT AS C
|
|
||||||
LEFT OUTER JOIN CHANNEL CHAN ON C.ChannelId = CHAN.ClaimId
|
|
||||||
ORDER BY C.Timestamp DESC;
|
|
||||||
"""
|
|
||||||
|
|
||||||
# not being used right now but should be kept around when Tom finally asks for replies
|
|
||||||
CREATE_COMMENT_REPLIES_VIEW = """
|
|
||||||
CREATE VIEW IF NOT EXISTS COMMENT_REPLIES (Author, CommentBody, ParentAuthor, ParentCommentBody) AS
|
|
||||||
SELECT AUTHOR.Name, OG.Body, PCHAN.Name, PARENT.Body
|
|
||||||
FROM COMMENT AS OG
|
|
||||||
JOIN COMMENT AS PARENT
|
|
||||||
ON OG.ParentId = PARENT.CommentId
|
|
||||||
JOIN CHANNEL AS PCHAN ON PARENT.ChannelId = PCHAN.ClaimId
|
|
||||||
JOIN CHANNEL AS AUTHOR ON OG.ChannelId = AUTHOR.ClaimId
|
|
||||||
ORDER BY OG.Timestamp;
|
|
||||||
"""
|
|
||||||
|
|
||||||
CREATE_TABLES_QUERY = (
|
|
||||||
PRAGMAS +
|
|
||||||
CREATE_COMMENT_TABLE +
|
|
||||||
CREATE_COMMENT_INDEXES +
|
|
||||||
CREATE_CHANNEL_TABLE +
|
|
||||||
CREATE_COMMENTS_ON_CLAIMS_VIEW +
|
|
||||||
CREATE_COMMENT_REPLIES_VIEW
|
|
||||||
)
|
|
||||||
|
|
|
@ -1,81 +1,150 @@
|
||||||
|
# TODO: scrap notification routines from these files & supply them in handles
|
||||||
import logging
|
import logging
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
from asyncio import coroutine
|
from asyncio import coroutine
|
||||||
|
|
||||||
from src.database.queries import hide_comments_by_id
|
from src.server.validation import is_valid_channel
|
||||||
from src.database.queries import delete_comment_by_id
|
from src.server.validation import is_valid_base_comment
|
||||||
from src.database.queries import get_comment_or_none
|
from src.server.validation import is_valid_credential_input
|
||||||
from src.database.queries import insert_comment
|
from src.server.validation import validate_signature_from_claim
|
||||||
from src.database.queries import insert_channel
|
from src.server.validation import body_is_valid
|
||||||
from src.database.queries import get_claim_ids_from_comment_ids
|
from src.misc import get_claim_from_id
|
||||||
from src.server.misc import validate_signature_from_claim
|
from src.server.external import send_notifications
|
||||||
from src.server.misc import channel_matches_pattern_or_error
|
from src.server.external import send_notification
|
||||||
from src.server.misc import get_claim_from_id
|
import src.database.queries as db
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def create_comment_or_error(conn, comment, claim_id, channel_id=None, channel_name=None,
|
def create_comment_or_error(conn, comment, claim_id=None, channel_id=None, channel_name=None,
|
||||||
signature=None, signing_ts=None, parent_id=None) -> dict:
|
signature=None, signing_ts=None, parent_id=None) -> dict:
|
||||||
if channel_id or channel_name or signature or signing_ts:
|
insert_channel_or_error(conn, channel_name, channel_id)
|
||||||
insert_channel_or_error(conn, channel_name, channel_id)
|
fn = db.insert_comment if parent_id is None else db.insert_reply
|
||||||
comment_id = insert_comment(
|
comment_id = fn(
|
||||||
conn=conn, comment=comment, claim_id=claim_id, channel_id=channel_id,
|
conn=conn,
|
||||||
signature=signature, parent_id=parent_id, signing_ts=signing_ts
|
comment=comment,
|
||||||
|
claim_id=claim_id,
|
||||||
|
channel_id=channel_id,
|
||||||
|
signature=signature,
|
||||||
|
parent_id=parent_id,
|
||||||
|
signing_ts=signing_ts
|
||||||
)
|
)
|
||||||
return get_comment_or_none(conn, comment_id)
|
return db.get_comment_or_none(conn, comment_id)
|
||||||
|
|
||||||
|
|
||||||
def insert_channel_or_error(conn: sqlite3.Connection, channel_name: str, channel_id: str):
|
def insert_channel_or_error(conn: sqlite3.Connection, channel_name: str, channel_id: str):
|
||||||
try:
|
try:
|
||||||
channel_matches_pattern_or_error(channel_id, channel_name)
|
is_valid_channel(channel_id, channel_name)
|
||||||
insert_channel(conn, channel_name, channel_id)
|
db.insert_channel(conn, channel_name, channel_id)
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
logger.exception('Invalid channel values given')
|
logger.exception('Invalid channel values given')
|
||||||
raise ValueError('Received invalid values for channel_id or channel_name')
|
raise ValueError('Received invalid values for channel_id or channel_name')
|
||||||
|
|
||||||
|
|
||||||
async def abandon_comment(app, comment_id):
|
""" COROUTINE WRAPPERS """
|
||||||
return await coroutine(delete_comment_by_id)(app['writer'], comment_id)
|
|
||||||
|
|
||||||
|
|
||||||
async def abandon_comment_if_authorized(app, comment_id, channel_id, signature, signing_ts, **kwargs):
|
async def _create_comment(app, params): # CREATE
|
||||||
claim = await get_claim_from_id(app, channel_id)
|
|
||||||
if not validate_signature_from_claim(claim, signature, signing_ts, comment_id):
|
|
||||||
return False
|
|
||||||
|
|
||||||
job = await app['comment_scheduler'].spawn(abandon_comment(app, comment_id))
|
|
||||||
return await job.wait()
|
|
||||||
|
|
||||||
|
|
||||||
async def write_comment(app, params):
|
|
||||||
return await coroutine(create_comment_or_error)(app['writer'], **params)
|
return await coroutine(create_comment_or_error)(app['writer'], **params)
|
||||||
|
|
||||||
|
|
||||||
async def hide_comments(app, comment_ids):
|
async def _hide_comments(app, comment_ids): # UPDATE
|
||||||
return await coroutine(hide_comments_by_id)(app['writer'], comment_ids)
|
return await coroutine(db.hide_comments_by_id)(app['writer'], comment_ids)
|
||||||
|
|
||||||
|
|
||||||
async def hide_comments_where_authorized(app, pieces: list):
|
async def _edit_comment(**params):
|
||||||
comment_cids = get_claim_ids_from_comment_ids(
|
return await coroutine(db.edit_comment_by_id)(**params)
|
||||||
|
|
||||||
|
|
||||||
|
async def _abandon_comment(app, comment_id): # DELETE
|
||||||
|
return await coroutine(db.delete_comment_by_id)(app['writer'], comment_id)
|
||||||
|
|
||||||
|
|
||||||
|
""" Core Functions called by request handlers """
|
||||||
|
|
||||||
|
|
||||||
|
async def create_comment(app, params):
|
||||||
|
if is_valid_base_comment(**params):
|
||||||
|
job = await app['comment_scheduler'].spawn(_create_comment(app, params))
|
||||||
|
comment = await job.wait()
|
||||||
|
if comment:
|
||||||
|
await app['webhooks'].spawn(
|
||||||
|
send_notification(app, 'CREATE', comment)
|
||||||
|
)
|
||||||
|
return comment
|
||||||
|
else:
|
||||||
|
raise ValueError('base comment is invalid')
|
||||||
|
|
||||||
|
|
||||||
|
async def hide_comments(app, pieces: list) -> list:
|
||||||
|
comment_cids = db.get_claim_ids_from_comment_ids(
|
||||||
conn=app['reader'],
|
conn=app['reader'],
|
||||||
comment_ids=[p['comment_id'] for p in pieces]
|
comment_ids=[p['comment_id'] for p in pieces]
|
||||||
)
|
)
|
||||||
# TODO: Amortize this process
|
# TODO: Amortize this process
|
||||||
claims = {}
|
claims = {}
|
||||||
comments_to_hide = []
|
comments_to_hide = []
|
||||||
|
# go through a list of dict objects
|
||||||
for p in pieces:
|
for p in pieces:
|
||||||
|
# maps the comment_id from the piece to a claim_id
|
||||||
claim_id = comment_cids[p['comment_id']]
|
claim_id = comment_cids[p['comment_id']]
|
||||||
|
# resolve the claim from its id
|
||||||
if claim_id not in claims:
|
if claim_id not in claims:
|
||||||
claims[claim_id] = await get_claim_from_id(app, claim_id, no_totals=True)
|
claim = await get_claim_from_id(app, claim_id)
|
||||||
|
if claim:
|
||||||
|
claims[claim_id] = claim
|
||||||
|
|
||||||
|
# get the claim's signing channel, then use it to validate the hidden comment
|
||||||
channel = claims[claim_id].get('signing_channel')
|
channel = claims[claim_id].get('signing_channel')
|
||||||
if validate_signature_from_claim(channel, p['signature'], p['signing_ts'], p['comment_id']):
|
if validate_signature_from_claim(channel, p['signature'], p['signing_ts'], p['comment_id']):
|
||||||
comments_to_hide.append(p['comment_id'])
|
comments_to_hide.append(p)
|
||||||
|
|
||||||
if comments_to_hide:
|
comment_ids = [c['comment_id'] for c in comments_to_hide]
|
||||||
job = await app['comment_scheduler'].spawn(hide_comments(app, comments_to_hide))
|
job = await app['comment_scheduler'].spawn(_hide_comments(app, comment_ids))
|
||||||
await job.wait()
|
await app['webhooks'].spawn(
|
||||||
|
send_notifications(
|
||||||
|
app, 'UPDATE', db.get_comments_by_id(app['reader'], comment_ids)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await job.wait()
|
||||||
|
return comment_ids
|
||||||
|
|
||||||
return {'hidden': comments_to_hide}
|
|
||||||
|
async def edit_comment(app, comment_id: str, comment: str, channel_id: str,
|
||||||
|
channel_name: str, signature: str, signing_ts: str):
|
||||||
|
if not(is_valid_credential_input(channel_id, channel_name, signature, signing_ts)
|
||||||
|
and body_is_valid(comment)):
|
||||||
|
logging.error('Invalid argument values, check input and try again')
|
||||||
|
return
|
||||||
|
|
||||||
|
cmnt = db.get_comment_or_none(app['reader'], comment_id)
|
||||||
|
if not(cmnt and 'channel_id' in cmnt and cmnt['channel_id'] == channel_id.lower()):
|
||||||
|
logging.error("comment doesnt exist")
|
||||||
|
return
|
||||||
|
|
||||||
|
channel = await get_claim_from_id(app, channel_id)
|
||||||
|
if not validate_signature_from_claim(channel, signature, signing_ts, comment):
|
||||||
|
logging.error("signature could not be validated")
|
||||||
|
return
|
||||||
|
|
||||||
|
job = await app['comment_scheduler'].spawn(_edit_comment(
|
||||||
|
conn=app['writer'],
|
||||||
|
comment_id=comment_id,
|
||||||
|
signature=signature,
|
||||||
|
signing_ts=signing_ts,
|
||||||
|
comment=comment
|
||||||
|
))
|
||||||
|
|
||||||
|
return await job.wait()
|
||||||
|
|
||||||
|
|
||||||
|
async def abandon_comment(app, comment_id, channel_id, signature, signing_ts, **kwargs):
|
||||||
|
channel = await get_claim_from_id(app, channel_id)
|
||||||
|
if not validate_signature_from_claim(channel, signature, signing_ts, comment_id):
|
||||||
|
return False
|
||||||
|
|
||||||
|
comment = db.get_comment_or_none(app['reader'], comment_id)
|
||||||
|
job = await app['comment_scheduler'].spawn(_abandon_comment(app, comment_id))
|
||||||
|
await app['webhooks'].spawn(send_notification(app, 'DELETE', comment))
|
||||||
|
return await job.wait()
|
||||||
|
|
7
src/definitions.py
Normal file
7
src/definitions.py
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
SRC_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
ROOT_DIR = os.path.dirname(SRC_DIR)
|
||||||
|
CONFIG_FILE = os.path.join(ROOT_DIR, 'config', 'conf.yml')
|
||||||
|
LOGGING_DIR = os.path.join(ROOT_DIR, 'logs')
|
||||||
|
DATABASE_DIR = os.path.join(ROOT_DIR, 'database')
|
51
src/main.py
51
src/main.py
|
@ -1,13 +1,19 @@
|
||||||
import logging.config
|
|
||||||
import logging
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import yaml
|
||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from src.settings import config
|
|
||||||
from src.server.app import run_app
|
from src.server.app import run_app
|
||||||
|
from src.definitions import LOGGING_DIR, CONFIG_FILE, DATABASE_DIR
|
||||||
|
|
||||||
|
|
||||||
def config_logging_from_settings(conf):
|
def setup_logging_from_config(conf: dict):
|
||||||
|
# set the logging directory here from the settings file
|
||||||
|
if not os.path.exists(LOGGING_DIR):
|
||||||
|
os.mkdir(LOGGING_DIR)
|
||||||
|
|
||||||
_config = {
|
_config = {
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"disable_existing_loggers": False,
|
"disable_existing_loggers": False,
|
||||||
|
@ -32,7 +38,7 @@ def config_logging_from_settings(conf):
|
||||||
"level": "DEBUG",
|
"level": "DEBUG",
|
||||||
"formatter": "standard",
|
"formatter": "standard",
|
||||||
"class": "logging.handlers.RotatingFileHandler",
|
"class": "logging.handlers.RotatingFileHandler",
|
||||||
"filename": conf['path']['debug_log'],
|
"filename": os.path.join(LOGGING_DIR, 'debug.log'),
|
||||||
"maxBytes": 10485760,
|
"maxBytes": 10485760,
|
||||||
"backupCount": 5
|
"backupCount": 5
|
||||||
},
|
},
|
||||||
|
@ -40,7 +46,7 @@ def config_logging_from_settings(conf):
|
||||||
"level": "ERROR",
|
"level": "ERROR",
|
||||||
"formatter": "standard",
|
"formatter": "standard",
|
||||||
"class": "logging.handlers.RotatingFileHandler",
|
"class": "logging.handlers.RotatingFileHandler",
|
||||||
"filename": conf['path']['error_log'],
|
"filename": os.path.join(LOGGING_DIR, 'error.log'),
|
||||||
"maxBytes": 10485760,
|
"maxBytes": 10485760,
|
||||||
"backupCount": 5
|
"backupCount": 5
|
||||||
},
|
},
|
||||||
|
@ -48,7 +54,7 @@ def config_logging_from_settings(conf):
|
||||||
"level": "NOTSET",
|
"level": "NOTSET",
|
||||||
"formatter": "aiohttp",
|
"formatter": "aiohttp",
|
||||||
"class": "logging.handlers.RotatingFileHandler",
|
"class": "logging.handlers.RotatingFileHandler",
|
||||||
"filename": conf['path']['server_log'],
|
"filename": os.path.join(LOGGING_DIR, 'server.log'),
|
||||||
"maxBytes": 10485760,
|
"maxBytes": 10485760,
|
||||||
"backupCount": 5
|
"backupCount": 5
|
||||||
}
|
}
|
||||||
|
@ -70,15 +76,42 @@ def config_logging_from_settings(conf):
|
||||||
logging.config.dictConfig(_config)
|
logging.config.dictConfig(_config)
|
||||||
|
|
||||||
|
|
||||||
|
def get_config(filepath):
|
||||||
|
with open(filepath, 'r') as cfile:
|
||||||
|
config = yaml.load(cfile, Loader=yaml.Loader)
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def setup_db_from_config(config: dict):
|
||||||
|
mode = config['mode']
|
||||||
|
if config[mode]['database'] == 'sqlite':
|
||||||
|
if not os.path.exists(DATABASE_DIR):
|
||||||
|
os.mkdir(DATABASE_DIR)
|
||||||
|
|
||||||
|
config[mode]['db_file'] = os.path.join(
|
||||||
|
DATABASE_DIR, config[mode]['name']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
def main(argv=None):
|
||||||
argv = argv or sys.argv[1:]
|
argv = argv or sys.argv[1:]
|
||||||
parser = argparse.ArgumentParser(description='LBRY Comment Server')
|
parser = argparse.ArgumentParser(description='LBRY Comment Server')
|
||||||
parser.add_argument('--port', type=int)
|
parser.add_argument('--port', type=int)
|
||||||
|
parser.add_argument('--config', type=str)
|
||||||
|
parser.add_argument('--mode', type=str)
|
||||||
args = parser.parse_args(argv)
|
args = parser.parse_args(argv)
|
||||||
config_logging_from_settings(config)
|
|
||||||
|
config = get_config(CONFIG_FILE) if not args.config else args.config
|
||||||
|
setup_logging_from_config(config)
|
||||||
|
|
||||||
|
if args.mode:
|
||||||
|
config['mode'] = args.mode
|
||||||
|
|
||||||
|
setup_db_from_config(config)
|
||||||
|
|
||||||
if args.port:
|
if args.port:
|
||||||
config['port'] = args.port
|
config['port'] = args.port
|
||||||
config_logging_from_settings(config)
|
|
||||||
run_app(config)
|
run_app(config)
|
||||||
|
|
||||||
|
|
||||||
|
|
26
src/misc.py
Normal file
26
src/misc.py
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from src.server.external import request_lbrynet
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ID_LIST = {'claim_id', 'parent_id', 'comment_id', 'channel_id'}
|
||||||
|
|
||||||
|
|
||||||
|
async def get_claim_from_id(app, claim_id, **kwargs):
|
||||||
|
try:
|
||||||
|
return (await request_lbrynet(app, 'claim_search', claim_id=claim_id, **kwargs))['items'][0]
|
||||||
|
except IndexError:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def clean_input_params(kwargs: dict):
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
if type(v) is str and k != 'comment':
|
||||||
|
kwargs[k] = v.strip()
|
||||||
|
if k in ID_LIST:
|
||||||
|
kwargs[k] = v.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def clean(thing: dict) -> dict:
|
||||||
|
return {k: v for k, v in thing.items() if v is not None}
|
|
@ -1,78 +1,81 @@
|
||||||
# cython: language_level=3
|
# cython: language_level=3
|
||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import pathlib
|
|
||||||
import signal
|
import signal
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import aiojobs
|
import aiojobs
|
||||||
import aiojobs.aiohttp
|
import aiojobs.aiohttp
|
||||||
import asyncio
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from src.database.queries import setup_database, backup_database
|
from peewee import *
|
||||||
from src.database.queries import obtain_connection, DatabaseWriter
|
|
||||||
from src.server.handles import api_endpoint, get_api_endpoint
|
from src.server.handles import api_endpoint, get_api_endpoint
|
||||||
|
from src.database.models import Comment, Channel
|
||||||
|
|
||||||
|
MODELS = [Comment, Channel]
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def setup_db_schema(app):
|
def setup_database(app):
|
||||||
if not pathlib.Path(app['db_path']).exists():
|
config = app['config']
|
||||||
logger.info(f'Setting up schema in {app["db_path"]}')
|
mode = config['mode']
|
||||||
setup_database(app['db_path'])
|
|
||||||
else:
|
|
||||||
logger.info(f'Database already exists in {app["db_path"]}, skipping setup')
|
|
||||||
|
|
||||||
|
# switch between Database objects
|
||||||
|
if config[mode]['database'] == 'mysql':
|
||||||
|
app['db'] = MySQLDatabase(
|
||||||
|
database=config[mode]['name'],
|
||||||
|
user=config[mode]['user'],
|
||||||
|
host=config[mode]['host'],
|
||||||
|
password=config[mode]['password'],
|
||||||
|
port=config[mode]['port'],
|
||||||
|
charset=config[mode]['charset'],
|
||||||
|
)
|
||||||
|
elif config[mode]['database'] == 'sqlite':
|
||||||
|
app['db'] = SqliteDatabase(
|
||||||
|
config[mode]['file'],
|
||||||
|
pragmas=config[mode]['pragmas']
|
||||||
|
)
|
||||||
|
|
||||||
async def database_backup_routine(app):
|
# bind the Model list to the database
|
||||||
try:
|
app['db'].bind(MODELS, bind_refs=False, bind_backrefs=False)
|
||||||
while True:
|
|
||||||
await asyncio.sleep(app['config']['backup_int'])
|
|
||||||
with app['reader'] as conn:
|
|
||||||
logger.debug('backing up database')
|
|
||||||
backup_database(conn, app['backup'])
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
async def start_background_tasks(app):
|
async def start_background_tasks(app):
|
||||||
app['reader'] = obtain_connection(app['db_path'], True)
|
app['db'].connect()
|
||||||
app['waitful_backup'] = asyncio.create_task(database_backup_routine(app))
|
app['db'].create_tables(MODELS)
|
||||||
app['comment_scheduler'] = await aiojobs.create_scheduler(limit=1, pending_limit=0)
|
|
||||||
app['db_writer'] = DatabaseWriter(app['db_path'])
|
# for requesting to external and internal APIs
|
||||||
app['writer'] = app['db_writer'].connection
|
app['webhooks'] = await aiojobs.create_scheduler(pending_limit=0)
|
||||||
|
|
||||||
|
|
||||||
async def close_database_connections(app):
|
async def close_database_connections(app):
|
||||||
logger.info('Ending background backup loop')
|
app['db'].close()
|
||||||
app['waitful_backup'].cancel()
|
|
||||||
await app['waitful_backup']
|
|
||||||
app['reader'].close()
|
|
||||||
app['writer'].close()
|
|
||||||
app['db_writer'].cleanup()
|
|
||||||
|
|
||||||
|
|
||||||
async def close_comment_scheduler(app):
|
async def close_schedulers(app):
|
||||||
logger.info('Closing comment_scheduler')
|
logger.info('Closing scheduler for webhook requests')
|
||||||
await app['comment_scheduler'].close()
|
await app['webhooks'].close()
|
||||||
|
|
||||||
|
|
||||||
class CommentDaemon:
|
class CommentDaemon:
|
||||||
def __init__(self, config, db_file=None, backup=None, **kwargs):
|
def __init__(self, config, **kwargs):
|
||||||
app = web.Application()
|
app = web.Application()
|
||||||
app['config'] = config
|
app['config'] = config
|
||||||
self.config = app['config']
|
|
||||||
if db_file:
|
# configure the config
|
||||||
app['db_path'] = db_file
|
self.config = config
|
||||||
app['backup'] = backup
|
self.host = config['host']
|
||||||
else:
|
self.port = config['port']
|
||||||
app['db_path'] = config['path']['database']
|
|
||||||
app['backup'] = backup or (app['db_path'] + '.backup')
|
setup_database(app)
|
||||||
app.on_startup.append(setup_db_schema)
|
|
||||||
|
# configure the order of tasks to run during app lifetime
|
||||||
app.on_startup.append(start_background_tasks)
|
app.on_startup.append(start_background_tasks)
|
||||||
app.on_shutdown.append(close_comment_scheduler)
|
app.on_shutdown.append(close_schedulers)
|
||||||
app.on_cleanup.append(close_database_connections)
|
app.on_cleanup.append(close_database_connections)
|
||||||
aiojobs.aiohttp.setup(app, **kwargs)
|
aiojobs.aiohttp.setup(app, **kwargs)
|
||||||
|
|
||||||
|
# Configure the routes
|
||||||
app.add_routes([
|
app.add_routes([
|
||||||
web.post('/api', api_endpoint),
|
web.post('/api', api_endpoint),
|
||||||
web.get('/', get_api_endpoint),
|
web.get('/', get_api_endpoint),
|
||||||
|
@ -88,20 +91,19 @@ class CommentDaemon:
|
||||||
await self.app_runner.setup()
|
await self.app_runner.setup()
|
||||||
self.app_site = web.TCPSite(
|
self.app_site = web.TCPSite(
|
||||||
runner=self.app_runner,
|
runner=self.app_runner,
|
||||||
host=host or self.config['host'],
|
host=host or self.host,
|
||||||
port=port or self.config['port'],
|
port=port or self.port,
|
||||||
)
|
)
|
||||||
await self.app_site.start()
|
await self.app_site.start()
|
||||||
logger.info(f'Comment Server is running on {self.config["host"]}:{self.config["port"]}')
|
logger.info(f'Comment Server is running on {self.host}:{self.port}')
|
||||||
|
|
||||||
async def stop(self):
|
async def stop(self):
|
||||||
await self.app_runner.shutdown()
|
await self.app_runner.shutdown()
|
||||||
await self.app_runner.cleanup()
|
await self.app_runner.cleanup()
|
||||||
|
|
||||||
|
|
||||||
def run_app(config, db_file=None):
|
def run_app(config):
|
||||||
comment_app = CommentDaemon(config=config, db_file=db_file, close_timeout=5.0)
|
comment_app = CommentDaemon(config=config)
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
def __exit():
|
def __exit():
|
||||||
|
|
48
src/server/errors.py
Normal file
48
src/server/errors.py
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
ERRORS = {
|
||||||
|
'INVALID_PARAMS': {'code': -32602, 'message': 'Invalid Method Parameter(s).'},
|
||||||
|
'INTERNAL': {'code': -32603, 'message': 'Internal Server Error. Please notify a LBRY Administrator.'},
|
||||||
|
'METHOD_NOT_FOUND': {'code': -32601, 'message': 'The method does not exist / is not available.'},
|
||||||
|
'INVALID_REQUEST': {'code': -32600, 'message': 'The JSON sent is not a valid Request object.'},
|
||||||
|
'PARSE_ERROR': {
|
||||||
|
'code': -32700,
|
||||||
|
'message': 'Invalid JSON was received by the server.\n'
|
||||||
|
'An error occurred on the server while parsing the JSON text.'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def make_error(error, exc=None) -> dict:
|
||||||
|
body = ERRORS[error] if error in ERRORS else ERRORS['INTERNAL']
|
||||||
|
try:
|
||||||
|
if exc:
|
||||||
|
exc_name = type(exc).__name__
|
||||||
|
body.update({exc_name: str(exc)})
|
||||||
|
|
||||||
|
finally:
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
async def report_error(app, exc, body: dict):
|
||||||
|
try:
|
||||||
|
if 'slack_webhook' in app['config']:
|
||||||
|
body_dump = json.dumps(body, indent=4)
|
||||||
|
exec_name = type(exc).__name__
|
||||||
|
exec_body = str(exc)
|
||||||
|
message = {
|
||||||
|
"text": f"Got `{exec_name}`: `\n{exec_body}`\n```{body_dump}```"
|
||||||
|
}
|
||||||
|
async with aiohttp.ClientSession() as sesh:
|
||||||
|
async with sesh.post(app['config']['slack_webhook'], json=message) as resp:
|
||||||
|
await resp.wait_for_close()
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
logger.critical('Error while logging to slack webhook')
|
63
src/server/external.py
Normal file
63
src/server/external.py
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
import logging
|
||||||
|
from json import JSONDecodeError
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from aiohttp import ClientConnectorError
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def send_notifications(app, action: str, comments: List[dict]):
|
||||||
|
events = create_notification_batch(action, comments)
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
for event in events:
|
||||||
|
event.update(auth_token=app['config']['notifications']['auth_token'])
|
||||||
|
try:
|
||||||
|
async with session.get(app['config']['notifications']['url'], params=event) as resp:
|
||||||
|
logger.debug(f'Completed Notification: {await resp.text()}, HTTP Status: {resp.status}')
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f'Error requesting internal API, Status {resp.status}: {resp.text()}, '
|
||||||
|
f'comment_id: {event["comment_id"]}')
|
||||||
|
|
||||||
|
|
||||||
|
async def send_notification(app, action: str, comment: dict):
|
||||||
|
await send_notifications(app, action, [comment])
|
||||||
|
|
||||||
|
|
||||||
|
def create_notification_batch(action: str, comments: List[dict]) -> List[dict]:
|
||||||
|
action_type = action[0].capitalize() # to turn Create -> C, edit -> U, delete -> D
|
||||||
|
events = []
|
||||||
|
for comment in comments:
|
||||||
|
event = {
|
||||||
|
'action_type': action_type,
|
||||||
|
'comment_id': comment['comment_id'],
|
||||||
|
'claim_id': comment['claim_id']
|
||||||
|
}
|
||||||
|
if comment.get('channel_id'):
|
||||||
|
event['channel_id'] = comment['channel_id']
|
||||||
|
if comment.get('parent_id'):
|
||||||
|
event['parent_id'] = comment['parent_id']
|
||||||
|
if comment.get('comment'):
|
||||||
|
event['comment'] = comment['comment']
|
||||||
|
events.append(event)
|
||||||
|
return events
|
||||||
|
|
||||||
|
|
||||||
|
async def request_lbrynet(app, method, **params):
|
||||||
|
body = {'method': method, 'params': {**params}}
|
||||||
|
try:
|
||||||
|
async with aiohttp.request('POST', app['config']['lbrynet'], json=body) as req:
|
||||||
|
try:
|
||||||
|
resp = await req.json()
|
||||||
|
except JSONDecodeError as jde:
|
||||||
|
logger.exception(jde.msg)
|
||||||
|
raise Exception('JSON Decode Error In lbrynet request')
|
||||||
|
finally:
|
||||||
|
if 'result' in resp:
|
||||||
|
return resp['result']
|
||||||
|
raise ValueError('LBRYNET Request Error', {'error': resp['error']})
|
||||||
|
except (ConnectionRefusedError, ClientConnectorError):
|
||||||
|
logger.critical("Connection to the LBRYnet daemon failed, make sure it's running.")
|
||||||
|
raise Exception("Server cannot verify delete signature")
|
|
@ -1,21 +1,23 @@
|
||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
import typing
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from aiojobs.aiohttp import atomic
|
from aiojobs.aiohttp import atomic
|
||||||
|
from peewee import DoesNotExist
|
||||||
|
|
||||||
from src.server.misc import clean_input_params
|
from src.server.external import send_notification
|
||||||
from src.database.queries import get_claim_comments
|
from src.server.validation import validate_signature_from_claim
|
||||||
from src.database.queries import get_comments_by_id, get_comment_ids
|
from src.misc import clean_input_params, get_claim_from_id
|
||||||
from src.database.queries import get_channel_id_from_comment_id
|
from src.server.errors import make_error, report_error
|
||||||
from src.database.queries import get_claim_hidden_comments
|
from src.database.models import Comment, Channel
|
||||||
from src.server.misc import is_valid_base_comment
|
from src.database.models import get_comment
|
||||||
from src.server.misc import is_valid_credential_input
|
from src.database.models import comment_list
|
||||||
from src.server.misc import make_error
|
from src.database.models import create_comment
|
||||||
from src.database.writes import abandon_comment_if_authorized
|
from src.database.models import edit_comment
|
||||||
from src.database.writes import write_comment
|
from src.database.models import delete_comment
|
||||||
from src.database.writes import hide_comments_where_authorized
|
from src.database.models import set_hidden_flag
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -26,53 +28,198 @@ def ping(*args):
|
||||||
return 'pong'
|
return 'pong'
|
||||||
|
|
||||||
|
|
||||||
def handle_get_channel_from_comment_id(app, kwargs: dict):
|
def handle_get_channel_from_comment_id(app: web.Application, comment_id: str) -> dict:
|
||||||
return get_channel_id_from_comment_id(app['reader'], **kwargs)
|
comment = get_comment(comment_id)
|
||||||
|
return {
|
||||||
|
'channel_id': comment['channel_id'],
|
||||||
|
'channel_name': comment['channel_name']
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def handle_get_comment_ids(app, kwargs):
|
def handle_get_comment_ids(
|
||||||
return get_comment_ids(app['reader'], **kwargs)
|
app: web.Application,
|
||||||
|
claim_id: str,
|
||||||
|
parent_id: str = None,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 50,
|
||||||
|
flattened=False
|
||||||
|
) -> dict:
|
||||||
|
results = comment_list(
|
||||||
|
claim_id=claim_id,
|
||||||
|
parent_id=parent_id,
|
||||||
|
top_level=(parent_id is None),
|
||||||
|
page=page,
|
||||||
|
page_size=page_size,
|
||||||
|
select_fields=['comment_id', 'parent_id']
|
||||||
|
)
|
||||||
|
if flattened:
|
||||||
|
results.update({
|
||||||
|
'items': [item['comment_id'] for item in results['items']],
|
||||||
|
'replies': [(item['comment_id'], item.get('parent_id'))
|
||||||
|
for item in results['items']]
|
||||||
|
})
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
def handle_get_claim_comments(app, kwargs):
|
def handle_get_comments_by_id(
|
||||||
return get_claim_comments(app['reader'], **kwargs)
|
app: web.Application,
|
||||||
|
comment_ids: typing.Union[list, tuple]
|
||||||
|
) -> dict:
|
||||||
|
expression = Comment.comment_id.in_(comment_ids)
|
||||||
|
return comment_list(expressions=expression, page_size=len(comment_ids))
|
||||||
|
|
||||||
|
|
||||||
def handle_get_comments_by_id(app, kwargs):
|
def handle_get_claim_comments(
|
||||||
return get_comments_by_id(app['reader'], **kwargs)
|
app: web.Application,
|
||||||
|
claim_id: str,
|
||||||
|
parent_id: str = None,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 50,
|
||||||
|
top_level: bool = False
|
||||||
|
) -> dict:
|
||||||
|
return comment_list(
|
||||||
|
claim_id=claim_id,
|
||||||
|
parent_id=parent_id,
|
||||||
|
page=page,
|
||||||
|
page_size=page_size,
|
||||||
|
top_level=top_level
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def handle_get_claim_hidden_comments(app, kwargs):
|
def handle_get_claim_hidden_comments(
|
||||||
return get_claim_hidden_comments(app['reader'], **kwargs)
|
app: web.Application,
|
||||||
|
claim_id: str,
|
||||||
|
hidden: bool,
|
||||||
|
page: int = 1,
|
||||||
|
page_size: int = 50,
|
||||||
|
) -> dict:
|
||||||
|
exclude = 'hidden' if hidden else 'visible'
|
||||||
|
return comment_list(
|
||||||
|
claim_id=claim_id,
|
||||||
|
exclude_mode=exclude,
|
||||||
|
page=page,
|
||||||
|
page_size=page_size
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def handle_create_comment(app, params):
|
async def handle_abandon_comment(
|
||||||
if is_valid_base_comment(**params) and is_valid_credential_input(**params):
|
app: web.Application,
|
||||||
job = await app['comment_scheduler'].spawn(write_comment(app, params))
|
comment_id: str,
|
||||||
return await job.wait()
|
signature: str,
|
||||||
|
signing_ts: str,
|
||||||
|
**kwargs,
|
||||||
|
) -> dict:
|
||||||
|
comment = get_comment(comment_id)
|
||||||
|
try:
|
||||||
|
channel = await get_claim_from_id(app, comment['channel_id'])
|
||||||
|
except DoesNotExist:
|
||||||
|
raise ValueError('Could not find a channel associated with the given comment')
|
||||||
else:
|
else:
|
||||||
raise ValueError('base comment is invalid')
|
if not validate_signature_from_claim(channel, signature, signing_ts, comment_id):
|
||||||
|
raise ValueError('Abandon signature could not be validated')
|
||||||
|
await app['webhooks'].spawn(send_notification(app, 'DELETE', comment))
|
||||||
|
with app['db'].atomic():
|
||||||
|
return {
|
||||||
|
'abandoned': delete_comment(comment_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async def handle_abandon_comment(app, params):
|
async def handle_hide_comments(app: web.Application, pieces: list, hide: bool = True) -> dict:
|
||||||
return {'abandoned': await abandon_comment_if_authorized(app, **params)}
|
# let's get all the distinct claim_ids from the list of comment_ids
|
||||||
|
pieces_by_id = {p['comment_id']: p for p in pieces}
|
||||||
|
comment_ids = list(pieces_by_id.keys())
|
||||||
|
comments = (Comment
|
||||||
|
.select(Comment.comment_id, Comment.claim_id)
|
||||||
|
.where(Comment.comment_id.in_(comment_ids))
|
||||||
|
.tuples())
|
||||||
|
|
||||||
|
# resolve the claims and map them to their corresponding comment_ids
|
||||||
|
claims = {}
|
||||||
|
for comment_id, claim_id in comments:
|
||||||
|
try:
|
||||||
|
# try and resolve the claim, if fails then we mark it as null
|
||||||
|
# and remove the associated comment from the pieces
|
||||||
|
if claim_id not in claims:
|
||||||
|
claims[claim_id] = await get_claim_from_id(app, claim_id)
|
||||||
|
|
||||||
|
# try to get a public key to validate
|
||||||
|
if claims[claim_id] is None or 'signing_channel' not in claims[claim_id]:
|
||||||
|
raise ValueError(f'could not get signing channel from claim_id: {claim_id}')
|
||||||
|
|
||||||
|
# try to validate signature
|
||||||
|
else:
|
||||||
|
channel = claims[claim_id]['signing_channel']
|
||||||
|
piece = pieces_by_id[comment_id]
|
||||||
|
is_valid_signature = validate_signature_from_claim(
|
||||||
|
claim=channel,
|
||||||
|
signature=piece['signature'],
|
||||||
|
signing_ts=piece['signing_ts'],
|
||||||
|
data=piece['comment_id']
|
||||||
|
)
|
||||||
|
if not is_valid_signature:
|
||||||
|
raise ValueError(f'could not validate signature on comment_id: {comment_id}')
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
# remove the piece from being hidden
|
||||||
|
pieces_by_id.pop(comment_id)
|
||||||
|
|
||||||
|
# remaining items in pieces_by_id have been able to successfully validate
|
||||||
|
with app['db'].atomic():
|
||||||
|
set_hidden_flag(list(pieces_by_id.keys()), hidden=hide)
|
||||||
|
|
||||||
|
query = Comment.select().where(Comment.comment_id.in_(comment_ids)).objects()
|
||||||
|
result = {
|
||||||
|
'hidden': [c.comment_id for c in query if c.is_hidden],
|
||||||
|
'visible': [c.comment_id for c in query if not c.is_hidden],
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
async def handle_hide_comments(app, params):
|
async def handle_edit_comment(app, comment: str = None, comment_id: str = None,
|
||||||
return await hide_comments_where_authorized(app, **params)
|
signature: str = None, signing_ts: str = None, **params) -> dict:
|
||||||
|
current = get_comment(comment_id)
|
||||||
|
channel_claim = await get_claim_from_id(app, current['channel_id'])
|
||||||
|
if not validate_signature_from_claim(channel_claim, signature, signing_ts, comment):
|
||||||
|
raise ValueError('Signature could not be validated')
|
||||||
|
|
||||||
|
with app['db'].atomic():
|
||||||
|
if not edit_comment(comment_id, comment, signature, signing_ts):
|
||||||
|
raise ValueError('Comment could not be edited')
|
||||||
|
updated_comment = get_comment(comment_id)
|
||||||
|
await app['webhooks'].spawn(send_notification(app, 'UPDATE', updated_comment))
|
||||||
|
return updated_comment
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: retrieve stake amounts for each channel & store in db
|
||||||
|
async def handle_create_comment(app, comment: str = None, claim_id: str = None,
|
||||||
|
parent_id: str = None, channel_id: str = None, channel_name: str = None,
|
||||||
|
signature: str = None, signing_ts: str = None) -> dict:
|
||||||
|
with app['db'].atomic():
|
||||||
|
comment = create_comment(
|
||||||
|
comment=comment,
|
||||||
|
claim_id=claim_id,
|
||||||
|
parent_id=parent_id,
|
||||||
|
channel_id=channel_id,
|
||||||
|
channel_name=channel_name,
|
||||||
|
signature=signature,
|
||||||
|
signing_ts=signing_ts
|
||||||
|
)
|
||||||
|
await app['webhooks'].spawn(send_notification(app, 'CREATE', comment))
|
||||||
|
return comment
|
||||||
|
|
||||||
|
|
||||||
METHODS = {
|
METHODS = {
|
||||||
'ping': ping,
|
'ping': ping,
|
||||||
'get_claim_comments': handle_get_claim_comments,
|
'get_claim_comments': handle_get_claim_comments, # this gets used
|
||||||
'get_claim_hidden_comments': handle_get_claim_hidden_comments,
|
'get_claim_hidden_comments': handle_get_claim_hidden_comments, # this gets used
|
||||||
'get_comment_ids': handle_get_comment_ids,
|
'get_comment_ids': handle_get_comment_ids,
|
||||||
'get_comments_by_id': handle_get_comments_by_id,
|
'get_comments_by_id': handle_get_comments_by_id, # this gets used
|
||||||
'get_channel_from_comment_id': handle_get_channel_from_comment_id,
|
'get_channel_from_comment_id': handle_get_channel_from_comment_id, # this gets used
|
||||||
'create_comment': handle_create_comment,
|
'create_comment': handle_create_comment, # this gets used
|
||||||
'delete_comment': handle_abandon_comment,
|
'delete_comment': handle_abandon_comment,
|
||||||
'abandon_comment': handle_abandon_comment,
|
'abandon_comment': handle_abandon_comment, # this gets used
|
||||||
'hide_comments': handle_hide_comments
|
'hide_comments': handle_hide_comments, # this gets used
|
||||||
|
'edit_comment': handle_edit_comment # this gets used
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -86,16 +233,20 @@ async def process_json(app, body: dict) -> dict:
|
||||||
start = time.time()
|
start = time.time()
|
||||||
try:
|
try:
|
||||||
if asyncio.iscoroutinefunction(METHODS[method]):
|
if asyncio.iscoroutinefunction(METHODS[method]):
|
||||||
result = await METHODS[method](app, params)
|
result = await METHODS[method](app, **params)
|
||||||
else:
|
else:
|
||||||
result = METHODS[method](app, params)
|
result = METHODS[method](app, **params)
|
||||||
response['result'] = result
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
logger.exception(f'Got {type(err).__name__}: {err}')
|
logger.exception(f'Got {type(err).__name__}:\n{err}')
|
||||||
if type(err) in (ValueError, TypeError):
|
if type(err) in (ValueError, TypeError): # param error, not too important
|
||||||
response['error'] = make_error('INVALID_PARAMS', err)
|
response['error'] = make_error('INVALID_PARAMS', err)
|
||||||
else:
|
else:
|
||||||
response['error'] = make_error('INTERNAL', err)
|
response['error'] = make_error('INTERNAL', err)
|
||||||
|
await app['webhooks'].spawn(report_error(app, err, body))
|
||||||
|
else:
|
||||||
|
response['result'] = result
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
end = time.time()
|
end = time.time()
|
||||||
logger.debug(f'Time taken to process {method}: {end - start} secs')
|
logger.debug(f'Time taken to process {method}: {end - start} secs')
|
||||||
|
|
|
@ -1,136 +0,0 @@
|
||||||
import binascii
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
from json import JSONDecodeError
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import aiohttp
|
|
||||||
|
|
||||||
import ecdsa
|
|
||||||
from aiohttp import ClientConnectorError
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
|
||||||
from cryptography.hazmat.primitives.serialization import load_der_public_key
|
|
||||||
from cryptography.hazmat.primitives import hashes
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import ec
|
|
||||||
from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
|
|
||||||
from cryptography.exceptions import InvalidSignature
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
ID_LIST = {'claim_id', 'parent_id', 'comment_id', 'channel_id'}
|
|
||||||
|
|
||||||
ERRORS = {
|
|
||||||
'INVALID_PARAMS': {'code': -32602, 'message': 'Invalid Method Parameter(s).'},
|
|
||||||
'INTERNAL': {'code': -32603, 'message': 'Internal Server Error. Please notify a LBRY Administrator.'},
|
|
||||||
'METHOD_NOT_FOUND': {'code': -32601, 'message': 'The method does not exist / is not available.'},
|
|
||||||
'INVALID_REQUEST': {'code': -32600, 'message': 'The JSON sent is not a valid Request object.'},
|
|
||||||
'PARSE_ERROR': {
|
|
||||||
'code': -32700,
|
|
||||||
'message': 'Invalid JSON was received by the server.\n'
|
|
||||||
'An error occurred on the server while parsing the JSON text.'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def make_error(error, exc=None) -> dict:
|
|
||||||
body = ERRORS[error] if error in ERRORS else ERRORS['INTERNAL']
|
|
||||||
try:
|
|
||||||
if exc:
|
|
||||||
body.update({type(exc).__name__: str(exc)})
|
|
||||||
finally:
|
|
||||||
return body
|
|
||||||
|
|
||||||
|
|
||||||
async def request_lbrynet(app, method, **params):
|
|
||||||
body = {'method': method, 'params': {**params}}
|
|
||||||
try:
|
|
||||||
async with aiohttp.request('POST', app['config']['lbrynet'], json=body) as req:
|
|
||||||
try:
|
|
||||||
resp = await req.json()
|
|
||||||
except JSONDecodeError as jde:
|
|
||||||
logger.exception(jde.msg)
|
|
||||||
raise Exception('JSON Decode Error In lbrynet request')
|
|
||||||
finally:
|
|
||||||
if 'result' in resp:
|
|
||||||
return resp['result']
|
|
||||||
raise ValueError('LBRYNET Request Error', {'error': resp['error']})
|
|
||||||
except (ConnectionRefusedError, ClientConnectorError):
|
|
||||||
logger.critical("Connection to the LBRYnet daemon failed, make sure it's running.")
|
|
||||||
raise Exception("Server cannot verify delete signature")
|
|
||||||
|
|
||||||
|
|
||||||
async def get_claim_from_id(app, claim_id, **kwargs):
|
|
||||||
return (await request_lbrynet(app, 'claim_search', no_totals=True, claim_id=claim_id, **kwargs))['items'][0]
|
|
||||||
|
|
||||||
|
|
||||||
def get_encoded_signature(signature):
|
|
||||||
signature = signature.encode() if type(signature) is str else signature
|
|
||||||
r = int(signature[:int(len(signature) / 2)], 16)
|
|
||||||
s = int(signature[int(len(signature) / 2):], 16)
|
|
||||||
return ecdsa.util.sigencode_der(r, s, len(signature) * 4)
|
|
||||||
|
|
||||||
|
|
||||||
def channel_matches_pattern_or_error(channel_id, channel_name):
|
|
||||||
assert channel_id and channel_name
|
|
||||||
assert re.fullmatch(
|
|
||||||
'^@(?:(?![\x00-\x08\x0b\x0c\x0e-\x1f\x23-\x26'
|
|
||||||
'\x2f\x3a\x3d\x3f-\x40\uFFFE-\U0000FFFF]).){1,255}$',
|
|
||||||
channel_name
|
|
||||||
)
|
|
||||||
assert re.fullmatch('([a-f0-9]|[A-F0-9]){40}', channel_id)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def is_signature_valid(encoded_signature, signature_digest, public_key_bytes):
|
|
||||||
try:
|
|
||||||
public_key = load_der_public_key(public_key_bytes, default_backend())
|
|
||||||
public_key.verify(encoded_signature, signature_digest, ec.ECDSA(Prehashed(hashes.SHA256())))
|
|
||||||
return True
|
|
||||||
except (ValueError, InvalidSignature):
|
|
||||||
logger.exception('Signature validation failed')
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_base_comment(comment, claim_id, parent_id=None, **kwargs):
|
|
||||||
try:
|
|
||||||
assert 0 < len(comment) <= 2000
|
|
||||||
assert (parent_id is None) or (0 < len(parent_id) <= 2000)
|
|
||||||
assert re.fullmatch('[a-z0-9]{40}', claim_id)
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_credential_input(channel_id=None, channel_name=None, signature=None, signing_ts=None, **kwargs):
|
|
||||||
if channel_name or channel_name or signature or signing_ts:
|
|
||||||
try:
|
|
||||||
assert channel_matches_pattern_or_error(channel_id, channel_name)
|
|
||||||
if signature or signing_ts:
|
|
||||||
assert len(signature) == 128
|
|
||||||
assert signing_ts.isalnum()
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def validate_signature_from_claim(claim, signature, signing_ts, data: str):
|
|
||||||
try:
|
|
||||||
if claim:
|
|
||||||
public_key = claim['value']['public_key']
|
|
||||||
claim_hash = binascii.unhexlify(claim['claim_id'].encode())[::-1]
|
|
||||||
injest = b''.join((signing_ts.encode(), claim_hash, data.encode()))
|
|
||||||
return is_signature_valid(
|
|
||||||
encoded_signature=get_encoded_signature(signature),
|
|
||||||
signature_digest=hashlib.sha256(injest).digest(),
|
|
||||||
public_key_bytes=binascii.unhexlify(public_key.encode())
|
|
||||||
)
|
|
||||||
except:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def clean_input_params(kwargs: dict):
|
|
||||||
for k, v in kwargs.items():
|
|
||||||
if type(v) is str:
|
|
||||||
kwargs[k] = v.strip()
|
|
||||||
if k in ID_LIST:
|
|
||||||
kwargs[k] = v.lower()
|
|
117
src/server/validation.py
Normal file
117
src/server/validation.py
Normal file
|
@ -0,0 +1,117 @@
|
||||||
|
import logging
|
||||||
|
import binascii
|
||||||
|
import hashlib
|
||||||
|
import re
|
||||||
|
|
||||||
|
import ecdsa
|
||||||
|
import typing
|
||||||
|
from cryptography.exceptions import InvalidSignature
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import ec
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
|
||||||
|
from cryptography.hazmat.primitives.serialization import load_der_public_key
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_channel(channel_id: str, channel_name: str) -> bool:
|
||||||
|
return channel_id and claim_id_is_valid(channel_id) and \
|
||||||
|
channel_name and channel_name_is_valid(channel_name)
|
||||||
|
|
||||||
|
|
||||||
|
def is_signature_valid(encoded_signature, signature_digest, public_key_bytes) -> bool:
|
||||||
|
try:
|
||||||
|
public_key = load_der_public_key(public_key_bytes, default_backend())
|
||||||
|
public_key.verify(encoded_signature, signature_digest, ec.ECDSA(Prehashed(hashes.SHA256())))
|
||||||
|
return True
|
||||||
|
except (ValueError, InvalidSignature):
|
||||||
|
logger.exception('Signature validation failed')
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def channel_name_is_valid(channel_name: str) -> bool:
|
||||||
|
return re.fullmatch(
|
||||||
|
'@(?:(?![\x00-\x08\x0b\x0c\x0e-\x1f\x23-\x26'
|
||||||
|
'\x2f\x3a\x3d\x3f-\x40\uFFFE-\U0000FFFF]).){1,255}',
|
||||||
|
channel_name
|
||||||
|
) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def body_is_valid(comment: str) -> bool:
|
||||||
|
return 0 < len(comment) <= 2000
|
||||||
|
|
||||||
|
|
||||||
|
def comment_id_is_valid(comment_id: str) -> bool:
|
||||||
|
return re.fullmatch('([a-z0-9]{64}|[A-Z0-9]{64})', comment_id) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def claim_id_is_valid(claim_id: str) -> bool:
|
||||||
|
return re.fullmatch('([a-z0-9]{40}|[A-Z0-9]{40})', claim_id) is not None
|
||||||
|
|
||||||
|
|
||||||
|
# default to None so params can be treated as kwargs; param count becomes more manageable
|
||||||
|
def is_valid_base_comment(
|
||||||
|
comment: str = None,
|
||||||
|
claim_id: str = None,
|
||||||
|
parent_id: str = None,
|
||||||
|
strict: bool = False,
|
||||||
|
**kwargs,
|
||||||
|
) -> bool:
|
||||||
|
try:
|
||||||
|
assert comment and body_is_valid(comment)
|
||||||
|
# strict mode assumes that the parent_id might not exist
|
||||||
|
if strict:
|
||||||
|
assert claim_id and claim_id_is_valid(claim_id)
|
||||||
|
assert parent_id is None or comment_id_is_valid(parent_id)
|
||||||
|
# non-strict removes reference restrictions
|
||||||
|
else:
|
||||||
|
assert claim_id or parent_id
|
||||||
|
if claim_id:
|
||||||
|
assert claim_id_is_valid(claim_id)
|
||||||
|
else:
|
||||||
|
assert comment_id_is_valid(parent_id)
|
||||||
|
|
||||||
|
except AssertionError:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return is_valid_credential_input(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_credential_input(channel_id: str = None, channel_name: str = None,
|
||||||
|
signature: str = None, signing_ts: str = None) -> bool:
|
||||||
|
try:
|
||||||
|
assert None not in (channel_id, channel_name, signature, signing_ts)
|
||||||
|
assert is_valid_channel(channel_id, channel_name)
|
||||||
|
assert len(signature) == 128
|
||||||
|
assert signing_ts.isalnum()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(f'Failed to validate channel: lbry://{channel_name}#{channel_id}, '
|
||||||
|
f'signature: {signature} signing_ts: {signing_ts}')
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def validate_signature_from_claim(claim: dict, signature: typing.Union[str, bytes],
|
||||||
|
signing_ts: str, data: str) -> bool:
|
||||||
|
try:
|
||||||
|
if claim:
|
||||||
|
public_key = claim['value']['public_key']
|
||||||
|
claim_hash = binascii.unhexlify(claim['claim_id'].encode())[::-1]
|
||||||
|
injest = b''.join((signing_ts.encode(), claim_hash, data.encode()))
|
||||||
|
return is_signature_valid(
|
||||||
|
encoded_signature=get_encoded_signature(signature),
|
||||||
|
signature_digest=hashlib.sha256(injest).digest(),
|
||||||
|
public_key_bytes=binascii.unhexlify(public_key.encode())
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_encoded_signature(signature: typing.Union[str, bytes]) -> bytes:
|
||||||
|
signature = signature.encode() if type(signature) is str else signature
|
||||||
|
r = int(signature[:int(len(signature) / 2)], 16)
|
||||||
|
s = int(signature[int(len(signature) / 2):], 16)
|
||||||
|
return ecdsa.util.sigencode_der(r, s, len(signature) * 4)
|
|
@ -1,17 +0,0 @@
|
||||||
# cython: language_level=3
|
|
||||||
import json
|
|
||||||
import pathlib
|
|
||||||
|
|
||||||
root_dir = pathlib.Path(__file__).parent.parent
|
|
||||||
config_path = root_dir / 'config' / 'conf.json'
|
|
||||||
|
|
||||||
|
|
||||||
def get_config(filepath):
|
|
||||||
with open(filepath, 'r') as cfile:
|
|
||||||
conf = json.load(cfile)
|
|
||||||
for key, path in conf['path'].items():
|
|
||||||
conf['path'][key] = str(root_dir / path)
|
|
||||||
return conf
|
|
||||||
|
|
||||||
|
|
||||||
config = get_config(config_path)
|
|
|
@ -1,18 +1,13 @@
|
||||||
import unittest
|
|
||||||
|
|
||||||
from random import randint
|
from random import randint
|
||||||
import faker
|
import faker
|
||||||
from faker.providers import internet
|
from faker.providers import internet
|
||||||
from faker.providers import lorem
|
from faker.providers import lorem
|
||||||
from faker.providers import misc
|
from faker.providers import misc
|
||||||
|
|
||||||
from src.database.queries import get_comments_by_id
|
from src.database.models import create_comment
|
||||||
from src.database.queries import get_comment_ids
|
from src.database.models import delete_comment
|
||||||
from src.database.queries import get_claim_comments
|
from src.database.models import comment_list, get_comment
|
||||||
from src.database.queries import get_claim_hidden_comments
|
from src.database.models import set_hidden_flag
|
||||||
from src.database.writes import create_comment_or_error
|
|
||||||
from src.database.queries import hide_comments_by_id
|
|
||||||
from src.database.queries import delete_comment_by_id
|
|
||||||
from test.testcase import DatabaseTestCase
|
from test.testcase import DatabaseTestCase
|
||||||
|
|
||||||
fake = faker.Faker()
|
fake = faker.Faker()
|
||||||
|
@ -27,69 +22,58 @@ class TestDatabaseOperations(DatabaseTestCase):
|
||||||
self.claimId = '529357c3422c6046d3fec76be2358004ba22e340'
|
self.claimId = '529357c3422c6046d3fec76be2358004ba22e340'
|
||||||
|
|
||||||
def test01NamedComments(self):
|
def test01NamedComments(self):
|
||||||
comment = create_comment_or_error(
|
comment = create_comment(
|
||||||
conn=self.conn,
|
|
||||||
claim_id=self.claimId,
|
claim_id=self.claimId,
|
||||||
comment='This is a named comment',
|
comment='This is a named comment',
|
||||||
channel_name='@username',
|
channel_name='@username',
|
||||||
channel_id='529357c3422c6046d3fec76be2358004ba22abcd',
|
channel_id='529357c3422c6046d3fec76be2358004ba22abcd',
|
||||||
signature=fake.uuid4(),
|
signature='22'*64,
|
||||||
signing_ts='aaa'
|
signing_ts='aaa'
|
||||||
)
|
)
|
||||||
self.assertIsNotNone(comment)
|
self.assertIsNotNone(comment)
|
||||||
self.assertNotIn('parent_in', comment)
|
self.assertNotIn('parent_in', comment)
|
||||||
|
|
||||||
previous_id = comment['comment_id']
|
previous_id = comment['comment_id']
|
||||||
reply = create_comment_or_error(
|
reply = create_comment(
|
||||||
conn=self.conn,
|
|
||||||
claim_id=self.claimId,
|
claim_id=self.claimId,
|
||||||
comment='This is a named response',
|
comment='This is a named response',
|
||||||
channel_name='@another_username',
|
channel_name='@another_username',
|
||||||
channel_id='529357c3422c6046d3fec76be2358004ba224bcd',
|
channel_id='529357c3422c6046d3fec76be2358004ba224bcd',
|
||||||
parent_id=previous_id,
|
parent_id=previous_id,
|
||||||
signature=fake.uuid4(),
|
signature='11'*64,
|
||||||
signing_ts='aaa'
|
signing_ts='aaa'
|
||||||
)
|
)
|
||||||
self.assertIsNotNone(reply)
|
self.assertIsNotNone(reply)
|
||||||
self.assertEqual(reply['parent_id'], comment['comment_id'])
|
self.assertEqual(reply['parent_id'], comment['comment_id'])
|
||||||
|
|
||||||
def test02AnonymousComments(self):
|
def test02AnonymousComments(self):
|
||||||
comment = create_comment_or_error(
|
self.assertRaises(
|
||||||
conn=self.conn,
|
ValueError,
|
||||||
|
create_comment,
|
||||||
claim_id=self.claimId,
|
claim_id=self.claimId,
|
||||||
comment='This is an ANONYMOUS comment'
|
comment='This is an ANONYMOUS comment'
|
||||||
)
|
)
|
||||||
self.assertIsNotNone(comment)
|
|
||||||
previous_id = comment['comment_id']
|
|
||||||
reply = create_comment_or_error(
|
|
||||||
conn=self.conn,
|
|
||||||
claim_id=self.claimId,
|
|
||||||
comment='This is an unnamed response',
|
|
||||||
parent_id=previous_id
|
|
||||||
)
|
|
||||||
self.assertIsNotNone(reply)
|
|
||||||
self.assertEqual(reply['parent_id'], comment['comment_id'])
|
|
||||||
|
|
||||||
def test03SignedComments(self):
|
def test03SignedComments(self):
|
||||||
comment = create_comment_or_error(
|
comment = create_comment(
|
||||||
conn=self.conn,
|
|
||||||
claim_id=self.claimId,
|
claim_id=self.claimId,
|
||||||
comment='I like big butts and i cannot lie',
|
comment='I like big butts and i cannot lie',
|
||||||
channel_name='@sirmixalot',
|
channel_name='@sirmixalot',
|
||||||
channel_id='529357c3422c6046d3fec76be2358005ba22abcd',
|
channel_id='529357c3422c6046d3fec76be2358005ba22abcd',
|
||||||
signature=fake.uuid4(),
|
signature='24'*64,
|
||||||
signing_ts='asdasd'
|
signing_ts='asdasd'
|
||||||
)
|
)
|
||||||
self.assertIsNotNone(comment)
|
self.assertIsNotNone(comment)
|
||||||
self.assertIn('signing_ts', comment)
|
self.assertIn('signing_ts', comment)
|
||||||
|
|
||||||
previous_id = comment['comment_id']
|
previous_id = comment['comment_id']
|
||||||
reply = create_comment_or_error(
|
reply = create_comment(
|
||||||
conn=self.conn,
|
|
||||||
claim_id=self.claimId,
|
claim_id=self.claimId,
|
||||||
comment='This is a LBRY verified response',
|
comment='This is a LBRY verified response',
|
||||||
channel_name='@LBRY',
|
channel_name='@LBRY',
|
||||||
channel_id='529357c3422c6046d3fec76be2358001ba224bcd',
|
channel_id='529357c3422c6046d3fec76be2358001ba224bcd',
|
||||||
parent_id=previous_id,
|
parent_id=previous_id,
|
||||||
signature=fake.uuid4(),
|
signature='12'*64,
|
||||||
signing_ts='sfdfdfds'
|
signing_ts='sfdfdfds'
|
||||||
)
|
)
|
||||||
self.assertIsNotNone(reply)
|
self.assertIsNotNone(reply)
|
||||||
|
@ -98,129 +82,109 @@ class TestDatabaseOperations(DatabaseTestCase):
|
||||||
|
|
||||||
def test04UsernameVariations(self):
|
def test04UsernameVariations(self):
|
||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
AssertionError,
|
ValueError,
|
||||||
callable=create_comment_or_error,
|
create_comment,
|
||||||
conn=self.conn,
|
|
||||||
claim_id=self.claimId,
|
claim_id=self.claimId,
|
||||||
channel_name='$#(@#$@#$',
|
channel_name='$#(@#$@#$',
|
||||||
channel_id='529357c3422c6046d3fec76be2358001ba224b23',
|
channel_id='529357c3422c6046d3fec76be2358001ba224b23',
|
||||||
comment='this is an invalid username'
|
comment='this is an invalid username',
|
||||||
|
signature='1' * 128,
|
||||||
|
signing_ts='123'
|
||||||
)
|
)
|
||||||
valid_username = create_comment_or_error(
|
|
||||||
conn=self.conn,
|
valid_username = create_comment(
|
||||||
claim_id=self.claimId,
|
claim_id=self.claimId,
|
||||||
channel_name='@' + 'a' * 255,
|
channel_name='@' + 'a' * 255,
|
||||||
channel_id='529357c3422c6046d3fec76be2358001ba224b23',
|
channel_id='529357c3422c6046d3fec76be2358001ba224b23',
|
||||||
comment='this is a valid username'
|
comment='this is a valid username',
|
||||||
|
signature='1'*128,
|
||||||
|
signing_ts='123'
|
||||||
)
|
)
|
||||||
self.assertIsNotNone(valid_username)
|
self.assertIsNotNone(valid_username)
|
||||||
self.assertRaises(AssertionError,
|
|
||||||
callable=create_comment_or_error,
|
|
||||||
conn=self.conn,
|
|
||||||
claim_id=self.claimId,
|
|
||||||
channel_name='@' + 'a' * 256,
|
|
||||||
channel_id='529357c3422c6046d3fec76be2358001ba224b23',
|
|
||||||
comment='this username is too long'
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
AssertionError,
|
ValueError,
|
||||||
callable=create_comment_or_error,
|
create_comment,
|
||||||
conn=self.conn,
|
claim_id=self.claimId,
|
||||||
|
channel_name='@' + 'a' * 256,
|
||||||
|
channel_id='529357c3422c6046d3fec76be2358001ba224b23',
|
||||||
|
comment='this username is too long',
|
||||||
|
signature='2' * 128,
|
||||||
|
signing_ts='123'
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertRaises(
|
||||||
|
ValueError,
|
||||||
|
create_comment,
|
||||||
claim_id=self.claimId,
|
claim_id=self.claimId,
|
||||||
channel_name='',
|
channel_name='',
|
||||||
channel_id='529357c3422c6046d3fec76be2358001ba224b23',
|
channel_id='529357c3422c6046d3fec76be2358001ba224b23',
|
||||||
comment='this username should not default to ANONYMOUS'
|
comment='this username should not default to ANONYMOUS',
|
||||||
|
signature='3' * 128,
|
||||||
|
signing_ts='123'
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
AssertionError,
|
ValueError,
|
||||||
callable=create_comment_or_error,
|
create_comment,
|
||||||
conn=self.conn,
|
|
||||||
claim_id=self.claimId,
|
claim_id=self.claimId,
|
||||||
channel_name='@',
|
channel_name='@',
|
||||||
channel_id='529357c3422c6046d3fec76be2358001ba224b23',
|
channel_id='529357c3422c6046d3fec76be2358001ba224b23',
|
||||||
comment='this username is too short'
|
comment='this username is too short',
|
||||||
|
signature='3' * 128,
|
||||||
|
signing_ts='123'
|
||||||
)
|
)
|
||||||
|
|
||||||
def test05InsertRandomComments(self):
|
def test05HideComments(self):
|
||||||
# TODO: Fix this test into something practical
|
comm = create_comment(
|
||||||
self.skipTest('This is a bad test')
|
comment='Comment #1',
|
||||||
top_comments, claim_ids = generate_top_comments_random()
|
claim_id=self.claimId,
|
||||||
total = 0
|
channel_id='1'*40,
|
||||||
success = 0
|
channel_name='@Doge123',
|
||||||
for _, comments in top_comments.items():
|
signature='a'*128,
|
||||||
for i, comment in enumerate(comments):
|
signing_ts='123'
|
||||||
with self.subTest(comment=comment):
|
)
|
||||||
result = create_comment_or_error(self.conn, **comment)
|
comment = get_comment(comm['comment_id'])
|
||||||
if result:
|
|
||||||
success += 1
|
|
||||||
comments[i] = result
|
|
||||||
del comment
|
|
||||||
total += len(comments)
|
|
||||||
self.assertLessEqual(success, total)
|
|
||||||
self.assertGreater(success, 0)
|
|
||||||
success = 0
|
|
||||||
for reply in generate_replies_random(top_comments):
|
|
||||||
reply_id = create_comment_or_error(self.conn, **reply)
|
|
||||||
if reply_id:
|
|
||||||
success += 1
|
|
||||||
self.assertGreater(success, 0)
|
|
||||||
self.assertLess(success, total)
|
|
||||||
del top_comments
|
|
||||||
del claim_ids
|
|
||||||
|
|
||||||
def test06GenerateAndListComments(self):
|
|
||||||
# TODO: Make this test not suck
|
|
||||||
self.skipTest('this is a stupid test')
|
|
||||||
top_comments, claim_ids = generate_top_comments()
|
|
||||||
total, success = 0, 0
|
|
||||||
for _, comments in top_comments.items():
|
|
||||||
for i, comment in enumerate(comments):
|
|
||||||
result = create_comment_or_error(self.conn, **comment)
|
|
||||||
if result:
|
|
||||||
success += 1
|
|
||||||
comments[i] = result
|
|
||||||
del comment
|
|
||||||
total += len(comments)
|
|
||||||
self.assertEqual(total, success)
|
|
||||||
self.assertGreater(total, 0)
|
|
||||||
for reply in generate_replies(top_comments):
|
|
||||||
create_comment_or_error(self.conn, **reply)
|
|
||||||
for claim_id in claim_ids:
|
|
||||||
comments_ids = get_comment_ids(self.conn, claim_id)
|
|
||||||
with self.subTest(comments_ids=comments_ids):
|
|
||||||
self.assertIs(type(comments_ids), list)
|
|
||||||
self.assertGreaterEqual(len(comments_ids), 0)
|
|
||||||
self.assertLessEqual(len(comments_ids), 50)
|
|
||||||
replies = get_comments_by_id(self.conn, comments_ids)
|
|
||||||
self.assertLessEqual(len(replies), 50)
|
|
||||||
self.assertEqual(len(replies), len(comments_ids))
|
|
||||||
|
|
||||||
def test07HideComments(self):
|
|
||||||
comm = create_comment_or_error(self.conn, 'Comment #1', self.claimId, '1'*40, '@Doge123', 'a'*128, '123')
|
|
||||||
comment = get_comments_by_id(self.conn, [comm['comment_id']]).pop()
|
|
||||||
self.assertFalse(comment['is_hidden'])
|
self.assertFalse(comment['is_hidden'])
|
||||||
success = hide_comments_by_id(self.conn, [comm['comment_id']])
|
|
||||||
|
success = set_hidden_flag([comm['comment_id']])
|
||||||
self.assertTrue(success)
|
self.assertTrue(success)
|
||||||
comment = get_comments_by_id(self.conn, [comm['comment_id']]).pop()
|
|
||||||
self.assertTrue(comment['is_hidden'])
|
comment = get_comment(comm['comment_id'])
|
||||||
success = hide_comments_by_id(self.conn, [comm['comment_id']])
|
|
||||||
self.assertTrue(success)
|
|
||||||
comment = get_comments_by_id(self.conn, [comm['comment_id']]).pop()
|
|
||||||
self.assertTrue(comment['is_hidden'])
|
self.assertTrue(comment['is_hidden'])
|
||||||
|
|
||||||
def test08DeleteComments(self):
|
success = set_hidden_flag([comm['comment_id']])
|
||||||
comm = create_comment_or_error(self.conn, 'Comment #1', self.claimId, '1'*40, '@Doge123', 'a'*128, '123')
|
self.assertTrue(success)
|
||||||
comments = get_claim_comments(self.conn, self.claimId)
|
|
||||||
match = list(filter(lambda x: comm['comment_id'] == x['comment_id'], comments['items']))
|
comment = get_comment(comm['comment_id'])
|
||||||
self.assertTrue(match)
|
self.assertTrue(comment['is_hidden'])
|
||||||
deleted = delete_comment_by_id(self.conn, comm['comment_id'])
|
|
||||||
|
def test06DeleteComments(self):
|
||||||
|
# make sure that the comment was created
|
||||||
|
comm = create_comment(
|
||||||
|
comment='Comment #1',
|
||||||
|
claim_id=self.claimId,
|
||||||
|
channel_id='1'*40,
|
||||||
|
channel_name='@Doge123',
|
||||||
|
signature='a'*128,
|
||||||
|
signing_ts='123'
|
||||||
|
)
|
||||||
|
comments = comment_list(self.claimId)
|
||||||
|
match = [x for x in comments['items'] if x['comment_id'] == comm['comment_id']]
|
||||||
|
self.assertTrue(len(match) > 0)
|
||||||
|
|
||||||
|
deleted = delete_comment(comm['comment_id'])
|
||||||
self.assertTrue(deleted)
|
self.assertTrue(deleted)
|
||||||
comments = get_claim_comments(self.conn, self.claimId)
|
|
||||||
match = list(filter(lambda x: comm['comment_id'] == x['comment_id'], comments['items']))
|
# make sure that we can't find the comment here
|
||||||
|
comments = comment_list(self.claimId)
|
||||||
|
match = [x for x in comments['items'] if x['comment_id'] == comm['comment_id']]
|
||||||
self.assertFalse(match)
|
self.assertFalse(match)
|
||||||
deleted = delete_comment_by_id(self.conn, comm['comment_id'])
|
self.assertRaises(
|
||||||
self.assertFalse(deleted)
|
ValueError,
|
||||||
|
delete_comment,
|
||||||
|
comment_id=comm['comment_id'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ListDatabaseTest(DatabaseTestCase):
|
class ListDatabaseTest(DatabaseTestCase):
|
||||||
|
@ -231,61 +195,75 @@ class ListDatabaseTest(DatabaseTestCase):
|
||||||
def testLists(self):
|
def testLists(self):
|
||||||
for claim_id in self.claim_ids:
|
for claim_id in self.claim_ids:
|
||||||
with self.subTest(claim_id=claim_id):
|
with self.subTest(claim_id=claim_id):
|
||||||
comments = get_claim_comments(self.conn, claim_id)
|
comments = comment_list(claim_id)
|
||||||
self.assertIsNotNone(comments)
|
self.assertIsNotNone(comments)
|
||||||
self.assertGreater(comments['page_size'], 0)
|
self.assertGreater(comments['page_size'], 0)
|
||||||
self.assertIn('has_hidden_comments', comments)
|
self.assertIn('has_hidden_comments', comments)
|
||||||
self.assertFalse(comments['has_hidden_comments'])
|
self.assertFalse(comments['has_hidden_comments'])
|
||||||
top_comments = get_claim_comments(self.conn, claim_id, top_level=True, page=1, page_size=50)
|
top_comments = comment_list(claim_id, top_level=True, page=1, page_size=50)
|
||||||
self.assertIsNotNone(top_comments)
|
self.assertIsNotNone(top_comments)
|
||||||
self.assertEqual(top_comments['page_size'], 50)
|
self.assertEqual(top_comments['page_size'], 50)
|
||||||
self.assertEqual(top_comments['page'], 1)
|
self.assertEqual(top_comments['page'], 1)
|
||||||
self.assertGreaterEqual(top_comments['total_pages'], 0)
|
self.assertGreaterEqual(top_comments['total_pages'], 0)
|
||||||
self.assertGreaterEqual(top_comments['total_items'], 0)
|
self.assertGreaterEqual(top_comments['total_items'], 0)
|
||||||
comment_ids = get_comment_ids(self.conn, claim_id, page_size=50, page=1)
|
comment_ids = comment_list(claim_id, page_size=50, page=1)
|
||||||
with self.subTest(comment_ids=comment_ids):
|
with self.subTest(comment_ids=comment_ids):
|
||||||
self.assertIsNotNone(comment_ids)
|
self.assertIsNotNone(comment_ids)
|
||||||
self.assertLessEqual(len(comment_ids), 50)
|
self.assertLessEqual(len(comment_ids), 50)
|
||||||
matching_comments = get_comments_by_id(self.conn, comment_ids)
|
matching_comments = (comment_ids)
|
||||||
self.assertIsNotNone(matching_comments)
|
self.assertIsNotNone(matching_comments)
|
||||||
self.assertEqual(len(matching_comments), len(comment_ids))
|
self.assertEqual(len(matching_comments), len(comment_ids))
|
||||||
|
|
||||||
def testHiddenCommentLists(self):
|
def testHiddenCommentLists(self):
|
||||||
claim_id = 'a'*40
|
claim_id = 'a'*40
|
||||||
comm1 = create_comment_or_error(self.conn, 'Comment #1', claim_id, '1'*40, '@Doge123', 'a'*128, '123')
|
comm1 = create_comment(
|
||||||
comm2 = create_comment_or_error(self.conn, 'Comment #2', claim_id, '1'*40, '@Doge123', 'b'*128, '123')
|
'Comment #1',
|
||||||
comm3 = create_comment_or_error(self.conn, 'Comment #3', claim_id, '1'*40, '@Doge123', 'c'*128, '123')
|
claim_id,
|
||||||
|
channel_id='1'*40,
|
||||||
|
channel_name='@Doge123',
|
||||||
|
signature='a'*128,
|
||||||
|
signing_ts='123'
|
||||||
|
)
|
||||||
|
comm2 = create_comment(
|
||||||
|
'Comment #2', claim_id,
|
||||||
|
channel_id='1'*40,
|
||||||
|
channel_name='@Doge123',
|
||||||
|
signature='b'*128,
|
||||||
|
signing_ts='123'
|
||||||
|
)
|
||||||
|
comm3 = create_comment(
|
||||||
|
'Comment #3', claim_id,
|
||||||
|
channel_id='1'*40,
|
||||||
|
channel_name='@Doge123',
|
||||||
|
signature='c'*128,
|
||||||
|
signing_ts='123'
|
||||||
|
)
|
||||||
comments = [comm1, comm2, comm3]
|
comments = [comm1, comm2, comm3]
|
||||||
|
|
||||||
comment_list = get_claim_comments(self.conn, claim_id)
|
listed_comments = comment_list(claim_id)
|
||||||
self.assertIn('items', comment_list)
|
self.assertEqual(len(comments), listed_comments['total_items'])
|
||||||
self.assertIn('has_hidden_comments', comment_list)
|
self.assertFalse(listed_comments['has_hidden_comments'])
|
||||||
self.assertEqual(len(comments), comment_list['total_items'])
|
|
||||||
self.assertIn('has_hidden_comments', comment_list)
|
|
||||||
self.assertFalse(comment_list['has_hidden_comments'])
|
|
||||||
hide_comments_by_id(self.conn, [comm2['comment_id']])
|
|
||||||
|
|
||||||
default_comments = get_claim_hidden_comments(self.conn, claim_id)
|
set_hidden_flag([comm2['comment_id']])
|
||||||
self.assertIn('has_hidden_comments', default_comments)
|
hidden = comment_list(claim_id, exclude_mode='hidden')
|
||||||
|
|
||||||
hidden_comments = get_claim_hidden_comments(self.conn, claim_id, hidden=True)
|
self.assertTrue(hidden['has_hidden_comments'])
|
||||||
self.assertIn('has_hidden_comments', hidden_comments)
|
self.assertGreater(len(hidden['items']), 0)
|
||||||
self.assertEqual(default_comments, hidden_comments)
|
|
||||||
|
|
||||||
hidden_comment = hidden_comments['items'][0]
|
visible = comment_list(claim_id, exclude_mode='visible')
|
||||||
|
self.assertFalse(visible['has_hidden_comments'])
|
||||||
|
self.assertNotEqual(listed_comments['items'], visible['items'])
|
||||||
|
|
||||||
|
# make sure the hidden comment is the one we marked as hidden
|
||||||
|
hidden_comment = hidden['items'][0]
|
||||||
self.assertEqual(hidden_comment['comment_id'], comm2['comment_id'])
|
self.assertEqual(hidden_comment['comment_id'], comm2['comment_id'])
|
||||||
|
|
||||||
visible_comments = get_claim_hidden_comments(self.conn, claim_id, hidden=False)
|
hidden_ids = [c['comment_id'] for c in hidden['items']]
|
||||||
self.assertIn('has_hidden_comments', visible_comments)
|
visible_ids = [c['comment_id'] for c in visible['items']]
|
||||||
self.assertNotIn(hidden_comment, visible_comments['items'])
|
|
||||||
|
|
||||||
hidden_ids = [c['comment_id'] for c in hidden_comments['items']]
|
|
||||||
visible_ids = [c['comment_id'] for c in visible_comments['items']]
|
|
||||||
composite_ids = hidden_ids + visible_ids
|
composite_ids = hidden_ids + visible_ids
|
||||||
|
listed_comments = comment_list(claim_id)
|
||||||
|
all_ids = [c['comment_id'] for c in listed_comments['items']]
|
||||||
composite_ids.sort()
|
composite_ids.sort()
|
||||||
|
|
||||||
comment_list = get_claim_comments(self.conn, claim_id)
|
|
||||||
all_ids = [c['comment_id'] for c in comment_list['items']]
|
|
||||||
all_ids.sort()
|
all_ids.sort()
|
||||||
self.assertEqual(composite_ids, all_ids)
|
self.assertEqual(composite_ids, all_ids)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import os
|
import os
|
||||||
|
import random
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import re
|
|
||||||
from itertools import *
|
from itertools import *
|
||||||
|
|
||||||
import faker
|
import faker
|
||||||
|
@ -8,21 +9,44 @@ from faker.providers import internet
|
||||||
from faker.providers import lorem
|
from faker.providers import lorem
|
||||||
from faker.providers import misc
|
from faker.providers import misc
|
||||||
|
|
||||||
from src.settings import config
|
from src.main import get_config, CONFIG_FILE
|
||||||
from src.server import app
|
from src.server import app
|
||||||
|
from src.server.validation import is_valid_base_comment
|
||||||
|
|
||||||
from test.testcase import AsyncioTestCase
|
from test.testcase import AsyncioTestCase
|
||||||
|
|
||||||
|
|
||||||
|
config = get_config(CONFIG_FILE)
|
||||||
|
config['mode'] = 'testing'
|
||||||
|
config['testing']['file'] = ':memory:'
|
||||||
|
|
||||||
|
|
||||||
|
if 'slack_webhook' in config:
|
||||||
|
config.pop('slack_webhook')
|
||||||
|
|
||||||
|
|
||||||
fake = faker.Faker()
|
fake = faker.Faker()
|
||||||
fake.add_provider(internet)
|
fake.add_provider(internet)
|
||||||
fake.add_provider(lorem)
|
fake.add_provider(lorem)
|
||||||
fake.add_provider(misc)
|
fake.add_provider(misc)
|
||||||
|
|
||||||
|
|
||||||
def fake_lbryusername():
|
def fake_lbryusername() -> str:
|
||||||
return '@' + fake.user_name()
|
return '@' + fake.user_name()
|
||||||
|
|
||||||
|
|
||||||
|
def nothing():
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def fake_signature() -> str:
|
||||||
|
return fake.sha256() + fake.sha256()
|
||||||
|
|
||||||
|
|
||||||
|
def fake_signing_ts() -> str:
|
||||||
|
return str(random.randint(1, 2**32 - 1))
|
||||||
|
|
||||||
|
|
||||||
async def jsonrpc_post(url, method, **params):
|
async def jsonrpc_post(url, method, **params):
|
||||||
json_body = {
|
json_body = {
|
||||||
'jsonrpc': '2.0',
|
'jsonrpc': '2.0',
|
||||||
|
@ -34,17 +58,14 @@ async def jsonrpc_post(url, method, **params):
|
||||||
return await request.json()
|
return await request.json()
|
||||||
|
|
||||||
|
|
||||||
def nothing():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
replace = {
|
replace = {
|
||||||
'claim_id': fake.sha1,
|
'claim_id': fake.sha1,
|
||||||
'comment': fake.text,
|
'comment': fake.text,
|
||||||
'channel_id': fake.sha1,
|
'channel_id': fake.sha1,
|
||||||
'channel_name': fake_lbryusername,
|
'channel_name': fake_lbryusername,
|
||||||
'signature': fake.uuid4,
|
'signature': fake_signature,
|
||||||
'parent_id': fake.sha256
|
'signing_ts': fake_signing_ts,
|
||||||
|
'parent_id': fake.sha256,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -55,10 +76,10 @@ def create_test_comments(values: iter, **default):
|
||||||
|
|
||||||
|
|
||||||
class ServerTest(AsyncioTestCase):
|
class ServerTest(AsyncioTestCase):
|
||||||
db_file = 'test.db'
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
config['mode'] = 'testing'
|
||||||
|
config['testing']['file'] = ':memory:'
|
||||||
self.host = 'localhost'
|
self.host = 'localhost'
|
||||||
self.port = 5931
|
self.port = 5931
|
||||||
|
|
||||||
|
@ -69,48 +90,16 @@ class ServerTest(AsyncioTestCase):
|
||||||
@classmethod
|
@classmethod
|
||||||
def tearDownClass(cls) -> None:
|
def tearDownClass(cls) -> None:
|
||||||
print('exit reached')
|
print('exit reached')
|
||||||
os.remove(cls.db_file)
|
|
||||||
|
|
||||||
async def asyncSetUp(self):
|
async def asyncSetUp(self):
|
||||||
await super().asyncSetUp()
|
await super().asyncSetUp()
|
||||||
self.server = app.CommentDaemon(config, db_file=self.db_file)
|
self.server = app.CommentDaemon(config)
|
||||||
await self.server.start(host=self.host, port=self.port)
|
await self.server.start(host=self.host, port=self.port)
|
||||||
self.addCleanup(self.server.stop)
|
self.addCleanup(self.server.stop)
|
||||||
|
|
||||||
async def post_comment(self, **params):
|
async def post_comment(self, **params):
|
||||||
return await jsonrpc_post(self.url, 'create_comment', **params)
|
return await jsonrpc_post(self.url, 'create_comment', **params)
|
||||||
|
|
||||||
def is_valid_message(self, comment=None, claim_id=None, parent_id=None,
|
|
||||||
channel_name=None, channel_id=None, signature=None, signing_ts=None):
|
|
||||||
try:
|
|
||||||
assert comment is not None and claim_id is not None
|
|
||||||
assert re.fullmatch('([a-f0-9]|[A-F0-9]){40}', claim_id)
|
|
||||||
assert 0 < len(comment) <= 2000
|
|
||||||
if parent_id is not None:
|
|
||||||
assert re.fullmatch('([a-f0-9]){64}', parent_id)
|
|
||||||
|
|
||||||
if channel_name or channel_id or signature or signing_ts:
|
|
||||||
assert channel_id is not None and channel_name is not None
|
|
||||||
assert re.fullmatch('([a-f0-9]|[A-F0-9]){40}', channel_id)
|
|
||||||
assert self.valid_channel_name(channel_name)
|
|
||||||
assert (signature is None and signing_ts is None) or \
|
|
||||||
(signature is not None and signing_ts is not None)
|
|
||||||
if signature:
|
|
||||||
assert len(signature) == 128
|
|
||||||
if parent_id:
|
|
||||||
assert parent_id.isalnum()
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def valid_channel_name(channel_name):
|
|
||||||
return re.fullmatch(
|
|
||||||
'^@(?:(?![\x00-\x08\x0b\x0c\x0e-\x1f\x23-\x26'
|
|
||||||
'\x2f\x3a\x3d\x3f-\x40\uFFFE-\U0000FFFF]).){1,255}$',
|
|
||||||
channel_name
|
|
||||||
)
|
|
||||||
|
|
||||||
async def test01CreateCommentNoReply(self):
|
async def test01CreateCommentNoReply(self):
|
||||||
anonymous_test = create_test_comments(
|
anonymous_test = create_test_comments(
|
||||||
('claim_id', 'channel_id', 'channel_name', 'comment'),
|
('claim_id', 'channel_id', 'channel_name', 'comment'),
|
||||||
|
@ -120,13 +109,13 @@ class ServerTest(AsyncioTestCase):
|
||||||
claim_id=None
|
claim_id=None
|
||||||
)
|
)
|
||||||
for test in anonymous_test:
|
for test in anonymous_test:
|
||||||
with self.subTest(test=test):
|
with self.subTest(test='null fields: ' + ', '.join(k for k, v in test.items() if not v)):
|
||||||
message = await self.post_comment(**test)
|
message = await self.post_comment(**test)
|
||||||
self.assertTrue('result' in message or 'error' in message)
|
self.assertTrue('result' in message or 'error' in message)
|
||||||
if 'error' in message:
|
if 'error' in message:
|
||||||
self.assertFalse(self.is_valid_message(**test))
|
self.assertFalse(is_valid_base_comment(**test))
|
||||||
else:
|
else:
|
||||||
self.assertTrue(self.is_valid_message(**test))
|
self.assertTrue(is_valid_base_comment(**test))
|
||||||
|
|
||||||
async def test02CreateNamedCommentsNoReply(self):
|
async def test02CreateNamedCommentsNoReply(self):
|
||||||
named_test = create_test_comments(
|
named_test = create_test_comments(
|
||||||
|
@ -142,22 +131,24 @@ class ServerTest(AsyncioTestCase):
|
||||||
message = await self.post_comment(**test)
|
message = await self.post_comment(**test)
|
||||||
self.assertTrue('result' in message or 'error' in message)
|
self.assertTrue('result' in message or 'error' in message)
|
||||||
if 'error' in message:
|
if 'error' in message:
|
||||||
self.assertFalse(self.is_valid_message(**test))
|
self.assertFalse(is_valid_base_comment(**test))
|
||||||
else:
|
else:
|
||||||
self.assertTrue(self.is_valid_message(**test))
|
self.assertTrue(is_valid_base_comment(**test))
|
||||||
|
|
||||||
async def test03CreateAllTestComments(self):
|
async def test03CreateAllTestComments(self):
|
||||||
test_all = create_test_comments(replace.keys(), **{
|
test_all = create_test_comments(replace.keys(), **{
|
||||||
k: None for k in replace.keys()
|
k: None for k in replace.keys()
|
||||||
})
|
})
|
||||||
|
test_all.reverse()
|
||||||
for test in test_all:
|
for test in test_all:
|
||||||
with self.subTest(test=test):
|
nulls = 'null fields: ' + ', '.join(k for k, v in test.items() if not v)
|
||||||
|
with self.subTest(test=nulls):
|
||||||
message = await self.post_comment(**test)
|
message = await self.post_comment(**test)
|
||||||
self.assertTrue('result' in message or 'error' in message)
|
self.assertTrue('result' in message or 'error' in message)
|
||||||
if 'error' in message:
|
if 'error' in message:
|
||||||
self.assertFalse(self.is_valid_message(**test))
|
self.assertFalse(is_valid_base_comment(**test, strict=True))
|
||||||
else:
|
else:
|
||||||
self.assertTrue(self.is_valid_message(**test))
|
self.assertTrue(is_valid_base_comment(**test, strict=True))
|
||||||
|
|
||||||
async def test04CreateAllReplies(self):
|
async def test04CreateAllReplies(self):
|
||||||
claim_id = '1d8a5cc39ca02e55782d619e67131c0a20843be8'
|
claim_id = '1d8a5cc39ca02e55782d619e67131c0a20843be8'
|
||||||
|
@ -166,6 +157,8 @@ class ServerTest(AsyncioTestCase):
|
||||||
channel_id=fake.sha1(),
|
channel_id=fake.sha1(),
|
||||||
comment='Hello everybody and welcome back to my chan nel',
|
comment='Hello everybody and welcome back to my chan nel',
|
||||||
claim_id=claim_id,
|
claim_id=claim_id,
|
||||||
|
signing_ts='1234',
|
||||||
|
signature='_'*128
|
||||||
)
|
)
|
||||||
parent_id = parent_comment['result']['comment_id']
|
parent_id = parent_comment['result']['comment_id']
|
||||||
test_all = create_test_comments(
|
test_all = create_test_comments(
|
||||||
|
@ -185,9 +178,37 @@ class ServerTest(AsyncioTestCase):
|
||||||
message = await self.post_comment(**test)
|
message = await self.post_comment(**test)
|
||||||
self.assertTrue('result' in message or 'error' in message)
|
self.assertTrue('result' in message or 'error' in message)
|
||||||
if 'error' in message:
|
if 'error' in message:
|
||||||
self.assertFalse(self.is_valid_message(**test))
|
self.assertFalse(is_valid_base_comment(**test))
|
||||||
else:
|
else:
|
||||||
self.assertTrue(self.is_valid_message(**test))
|
self.assertTrue(is_valid_base_comment(**test))
|
||||||
|
|
||||||
|
async def testSlackWebhook(self):
|
||||||
|
claim_id = '1d8a5cc39ca02e55782d619e67131c0a20843be8'
|
||||||
|
channel_name = '@name'
|
||||||
|
channel_id = fake.sha1()
|
||||||
|
signature = '{}'*64
|
||||||
|
signing_ts = '1234'
|
||||||
|
|
||||||
|
base = await self.post_comment(
|
||||||
|
channel_name=channel_name,
|
||||||
|
channel_id=channel_id,
|
||||||
|
comment='duplicate',
|
||||||
|
claim_id=claim_id,
|
||||||
|
signing_ts=signing_ts,
|
||||||
|
signature=signature
|
||||||
|
)
|
||||||
|
|
||||||
|
comment_id = base['result']['comment_id']
|
||||||
|
|
||||||
|
with self.subTest(test=comment_id):
|
||||||
|
await self.post_comment(
|
||||||
|
channel_name=channel_name,
|
||||||
|
channel_id=channel_id,
|
||||||
|
comment='duplicate',
|
||||||
|
claim_id=claim_id,
|
||||||
|
signing_ts=signing_ts,
|
||||||
|
signature=signature
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ListCommentsTest(AsyncioTestCase):
|
class ListCommentsTest(AsyncioTestCase):
|
||||||
|
@ -196,7 +217,8 @@ class ListCommentsTest(AsyncioTestCase):
|
||||||
'comment': fake.text,
|
'comment': fake.text,
|
||||||
'channel_id': fake.sha1,
|
'channel_id': fake.sha1,
|
||||||
'channel_name': fake_lbryusername,
|
'channel_name': fake_lbryusername,
|
||||||
'signature': nothing,
|
'signature': fake_signature,
|
||||||
|
'signing_ts': fake_signing_ts,
|
||||||
'parent_id': nothing
|
'parent_id': nothing
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -204,7 +226,8 @@ class ListCommentsTest(AsyncioTestCase):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.host = 'localhost'
|
self.host = 'localhost'
|
||||||
self.port = 5931
|
self.port = 5931
|
||||||
self.db_file = 'list_test.db'
|
config['mode'] = 'testing'
|
||||||
|
config['testing']['file'] = ':memory:'
|
||||||
self.claim_id = '1d8a5cc39ca02e55782d619e67131c0a20843be8'
|
self.claim_id = '1d8a5cc39ca02e55782d619e67131c0a20843be8'
|
||||||
self.comment_ids = None
|
self.comment_ids = None
|
||||||
|
|
||||||
|
@ -215,32 +238,34 @@ class ListCommentsTest(AsyncioTestCase):
|
||||||
async def post_comment(self, **params):
|
async def post_comment(self, **params):
|
||||||
return await jsonrpc_post(self.url, 'create_comment', **params)
|
return await jsonrpc_post(self.url, 'create_comment', **params)
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
async def create_lots_of_comments(self, n=23):
|
||||||
print('exit reached')
|
self.comment_list = [{key: self.replace[key]() for key in self.replace.keys()} for _ in range(23)]
|
||||||
os.remove(self.db_file)
|
for comment in self.comment_list:
|
||||||
|
comment['claim_id'] = self.claim_id
|
||||||
|
self.comment_ids = [(await self.post_comment(**comm))['result']['comment_id']
|
||||||
|
for comm in self.comment_list]
|
||||||
|
|
||||||
async def asyncSetUp(self):
|
async def asyncSetUp(self):
|
||||||
await super().asyncSetUp()
|
await super().asyncSetUp()
|
||||||
self.server = app.CommentDaemon(config, db_file=self.db_file)
|
self.server = app.CommentDaemon(config)
|
||||||
await self.server.start(self.host, self.port)
|
await self.server.start(self.host, self.port)
|
||||||
self.addCleanup(self.server.stop)
|
self.addCleanup(self.server.stop)
|
||||||
if self.comment_ids is None:
|
|
||||||
self.comment_list = [{key: self.replace[key]() for key in self.replace.keys()} for _ in range(23)]
|
|
||||||
for comment in self.comment_list:
|
|
||||||
comment['claim_id'] = self.claim_id
|
|
||||||
self.comment_ids = [(await self.post_comment(**comm))['result']['comment_id']
|
|
||||||
for comm in self.comment_list]
|
|
||||||
|
|
||||||
async def testListComments(self):
|
async def testListComments(self):
|
||||||
|
await self.create_lots_of_comments()
|
||||||
response_one = await jsonrpc_post(
|
response_one = await jsonrpc_post(
|
||||||
self.url, 'get_claim_comments', page_size=20, page=1, top_level=1, claim_id=self.claim_id
|
self.url, 'get_claim_comments', page_size=20, page=1, top_level=1, claim_id=self.claim_id
|
||||||
)
|
)
|
||||||
self.assertIsNotNone(response_one)
|
self.assertIsNotNone(response_one)
|
||||||
self.assertIn('result', response_one)
|
self.assertIn('result', response_one)
|
||||||
response_one: dict = response_one['result']
|
response_one: dict = response_one['result']
|
||||||
self.assertIs(type(response_one), dict)
|
self.assertEqual(response_one['page_size'], len(response_one['items']))
|
||||||
self.assertEquals(response_one['page_size'], len(response_one['items']))
|
|
||||||
self.assertIn('items', response_one)
|
self.assertIn('items', response_one)
|
||||||
|
|
||||||
|
comments = response_one['items']
|
||||||
|
hidden = list(filter(lambda c: c['is_hidden'], comments))
|
||||||
|
self.assertEqual(hidden, [])
|
||||||
|
|
||||||
self.assertGreaterEqual(response_one['total_pages'], response_one['page'])
|
self.assertGreaterEqual(response_one['total_pages'], response_one['page'])
|
||||||
last_page = response_one['total_pages']
|
last_page = response_one['total_pages']
|
||||||
response = await jsonrpc_post(self.url, 'get_claim_comments', page_size=20,
|
response = await jsonrpc_post(self.url, 'get_claim_comments', page_size=20,
|
||||||
|
|
|
@ -1,12 +1,38 @@
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import unittest
|
import unittest
|
||||||
from asyncio.runners import _cancel_all_tasks # type: ignore
|
|
||||||
from unittest.case import _Outcome
|
from unittest.case import _Outcome
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from asyncio.runners import _cancel_all_tasks # type: ignore
|
||||||
|
from peewee import *
|
||||||
|
|
||||||
from src.database.queries import obtain_connection, setup_database
|
from src.database.models import Channel, Comment
|
||||||
|
|
||||||
|
|
||||||
|
test_db = SqliteDatabase(':memory:')
|
||||||
|
|
||||||
|
|
||||||
|
MODELS = [Channel, Comment]
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseTestCase(unittest.TestCase):
|
||||||
|
def __init__(self, methodName='DatabaseTest'):
|
||||||
|
super().__init__(methodName)
|
||||||
|
|
||||||
|
def setUp(self) -> None:
|
||||||
|
super().setUp()
|
||||||
|
test_db.bind(MODELS, bind_refs=False, bind_backrefs=False)
|
||||||
|
|
||||||
|
test_db.connect()
|
||||||
|
test_db.create_tables(MODELS)
|
||||||
|
|
||||||
|
def tearDown(self) -> None:
|
||||||
|
# drop tables for next test
|
||||||
|
test_db.drop_tables(MODELS)
|
||||||
|
|
||||||
|
# close connection
|
||||||
|
test_db.close()
|
||||||
|
|
||||||
|
|
||||||
class AsyncioTestCase(unittest.TestCase):
|
class AsyncioTestCase(unittest.TestCase):
|
||||||
|
@ -117,21 +143,3 @@ class AsyncioTestCase(unittest.TestCase):
|
||||||
self.loop.run_until_complete(maybe_coroutine)
|
self.loop.run_until_complete(maybe_coroutine)
|
||||||
|
|
||||||
|
|
||||||
class DatabaseTestCase(unittest.TestCase):
|
|
||||||
db_file = 'test.db'
|
|
||||||
|
|
||||||
def __init__(self, methodName='DatabaseTest'):
|
|
||||||
super().__init__(methodName)
|
|
||||||
if pathlib.Path(self.db_file).exists():
|
|
||||||
os.remove(self.db_file)
|
|
||||||
|
|
||||||
def setUp(self) -> None:
|
|
||||||
super().setUp()
|
|
||||||
setup_database(self.db_file)
|
|
||||||
self.conn = obtain_connection(self.db_file)
|
|
||||||
self.addCleanup(self.conn.close)
|
|
||||||
self.addCleanup(os.remove, self.db_file)
|
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
|
||||||
self.conn.close()
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue