Patches comment server interface in the SDK
This commit is contained in:
parent
26dd6d2ac7
commit
17164601a1
4 changed files with 224 additions and 274 deletions
|
@ -534,8 +534,7 @@ class Config(CLIConfig):
|
|||
('lbrynet4.lbry.com', 4444) # ASIA
|
||||
])
|
||||
|
||||
# comment server uses a single string because requests are made to the /api resource
|
||||
comment_server = String("Server to store user-generated metadata.", "http://comments.lbry.com:2903/api")
|
||||
comment_server = String("Updated Comment Server version.", "comments.lbry.com/api")
|
||||
|
||||
# blockchain
|
||||
blockchain_name = String("Blockchain name - lbrycrd_main, lbrycrd_regtest, or lbrycrd_testnet", 'lbrycrd_main')
|
||||
|
|
|
@ -33,13 +33,13 @@ from lbrynet.extras.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT, UP
|
|||
from lbrynet.extras.daemon.ComponentManager import RequiredCondition
|
||||
from lbrynet.extras.daemon.ComponentManager import ComponentManager
|
||||
from lbrynet.extras.daemon.json_response_encoder import JSONResponseEncoder
|
||||
from lbrynet.extras.daemon.comment_client import jsonrpc_post, sign_comment
|
||||
from lbrynet.extras.daemon.undecorated import undecorated
|
||||
from lbrynet.wallet.transaction import Transaction, Output, Input
|
||||
from lbrynet.wallet.account import Account as LBCAccount
|
||||
from lbrynet.wallet.dewies import dewies_to_lbc, lbc_to_dewies
|
||||
from lbrynet.schema.claim import Claim
|
||||
from lbrynet.schema.url import URL
|
||||
from lbrynet.extras.daemon.comment_client import jsonrpc_batch, jsonrpc_post, rpc_body
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
|
@ -3328,179 +3328,108 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
Create and list comments.
|
||||
"""
|
||||
|
||||
@requires(WALLET_COMPONENT)
|
||||
async def jsonrpc_comment_list(self, claim_id, parent_comment_id=None, flat=False,
|
||||
page=1, page_size=None, max_replies_shown=5):
|
||||
async def jsonrpc_comment_list(self, claim_id: str, parent_id: str = None,
|
||||
page=1, page_size=50, include_replies=True):
|
||||
"""
|
||||
List comments associated with a claim.
|
||||
|
||||
Usage:
|
||||
comment_list <claim_id> [--flat] [(--page=<page> --page_size=<page_size>)]
|
||||
[--parent_comment_id=<parent_comment_id>]
|
||||
[--max_replies_shown=<max_replies_shown>]
|
||||
comment_list (<claim_id> | --claim_id=<claim_id>)
|
||||
[(--page=<page> --page_size=<page_size>)]
|
||||
[--parent_id=<parent_id>] [--include_replies]
|
||||
|
||||
Options:
|
||||
--flat : (bool) Flag to indicate whether or not you want the
|
||||
replies to be flattened along with the rest of
|
||||
the comments attached to the claim. Off by default
|
||||
--parent_comment_id=<parent_comment_id> : (int) The ID of an existing
|
||||
comment to list replies from
|
||||
--max_replies_shown=<max_replies_shown> : (int) For every comment that we pull replies from,
|
||||
only retrieve up to this amount.
|
||||
Note: This is not the same as page size.
|
||||
--include_replies : (bool) Flag to indicate whether or not you want the
|
||||
replies to be included with the response or not
|
||||
--parent_id=<parent_id> : (int)
|
||||
|
||||
--page=<page> : (int) The page you'd like to see in the comment list.
|
||||
The first page is 1, second page is 2, and so on.
|
||||
--page_size=<page_size> : (int) The amount of comments that you'd like to
|
||||
retrieve in one request
|
||||
|
||||
Returns:
|
||||
(dict) Dict containing the following schema:
|
||||
{
|
||||
"page": (int) The page of comments as provided when limiting response to page_size.
|
||||
"page_size": (int) Number of comments in the given page. -1 if page_size wasn't used
|
||||
"comments": (list) Contains all the comments (as dicts) as provided by the specified parameters
|
||||
}
|
||||
(list) Containing comments stored as dictionary objects:
|
||||
[
|
||||
{
|
||||
"comment": (str) The actual string as inputted by the user,
|
||||
"comment_id": (str) The Comment's unique identifier,
|
||||
"channel_name": (str) Name of the channel this was posted under, prepended with a '@',
|
||||
"channel_id": (str) The Channel Claim ID that this comeent was posted under,
|
||||
"signature": (str) The signature of the comment,
|
||||
"channel_uri": (str) Channel's URI in the ClaimTrie,
|
||||
"parent_id": (str) Comment this is replying to, (None) if this is the root,
|
||||
"timestamp": (int) The time at which comment was entered into the server at, in nanoseconds.
|
||||
},
|
||||
...
|
||||
]
|
||||
"""
|
||||
# Should be like this:
|
||||
# comment list [claim_id] [parent_comment_id] --flat --page=1 --page-size=10
|
||||
url = self.conf.comment_server
|
||||
# The server uses permanent URIs for keys; not claims.
|
||||
# This is temporary until we can get that functionality removed
|
||||
claim_info = (await self.jsonrpc_claim_search(claim_id=claim_id))
|
||||
if 'error' in claim_info:
|
||||
raise Exception(claim_info['error'])
|
||||
if claim_info["page"] == 0:
|
||||
return {'page': 1, 'page_size': 0, 'comments': []}
|
||||
claim_uri = claim_info["items"][0].permanent_url
|
||||
# These two cases need separation since getting replies requires a bit of magic
|
||||
# to reduce request count from O(n^2) to O(1)
|
||||
if parent_comment_id:
|
||||
# Since we don't directly get all the comment data at once,
|
||||
# we have to do a bit more work to get them
|
||||
comment_ids = await jsonrpc_post(url, 'get_comment_replies',
|
||||
comm_index=parent_comment_id, clean=False)
|
||||
comment_ids = comment_ids['result']
|
||||
if page_size is not None:
|
||||
comment_ids = comment_ids[page_size * (page - 1): page_size * page]
|
||||
# now we have to just batch request the reply comments
|
||||
comments_batch = [
|
||||
rpc_body('get_comment_data', index, comm_index=comment_id, better_keys=True)
|
||||
for index, comment_id in enumerate(comment_ids)
|
||||
]
|
||||
del comment_ids
|
||||
comments = await jsonrpc_batch(url, comments_batch, clean=True)
|
||||
else:
|
||||
# Get the content of the top level comments
|
||||
comments = await jsonrpc_post(url, 'get_claim_comments', uri=claim_uri, better_keys=True)
|
||||
if page_size is not None:
|
||||
comments = comments[page_size * (page - 1): page_size * page]
|
||||
# By now comments should be a list containing comment dicts that are supposed to be
|
||||
# at the given height that was requested. The parent_id may or may not be present
|
||||
# in the dicts, as they may or may not be replies to comments at a higher level
|
||||
# However this is dependent purely on whether or not parent_comment_id is None or not
|
||||
reply_lists = await jsonrpc_batch(url, [
|
||||
rpc_body('get_comment_replies', index, comm_index=comment['comment_id'])
|
||||
for index, comment in enumerate(comments)
|
||||
])
|
||||
response = {
|
||||
'page': page,
|
||||
'page_size': -1 if page_size is None else page_size,
|
||||
'comments': []
|
||||
}
|
||||
if flat:
|
||||
# If it's flat then we'll need to get the comments into an order such that
|
||||
# If an element e in the list has a non-null parent id, the element before it
|
||||
# is either also a reply with the same parent id, or has an id that equals e's parent id,
|
||||
# in which case it's the comment that is being replied to.
|
||||
# Otherwise, if it has a null parent id, then it is a top level comment.
|
||||
return await jsonrpc_post(
|
||||
self.conf.comment_server,
|
||||
"get_claim_comments",
|
||||
claim_id=claim_id,
|
||||
parent_id=parent_id,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
top_level=not include_replies
|
||||
)
|
||||
|
||||
# To do this, we create a dict that maps the index of the comment in the array
|
||||
# to a list containing the comment IDs of the replies
|
||||
comment_replies = {resp['id']: resp['result'] for resp in reply_lists if 'result' in resp}
|
||||
|
||||
# Next, we create a batch request for the actual data of all of the replies
|
||||
# the id in this batch request is going to be in the form 'X:Y'
|
||||
# where X is the index of the parent comment in `comments,
|
||||
# and Y is index of the reply's ID within the list X maps to in `comment_replies`
|
||||
full_replies_batch = [
|
||||
rpc_body('get_comment_data', f'{parent_idx}:{idx}', comm_index=reply_id, better_keys=True)
|
||||
for parent_idx, id_list in comment_replies.items()
|
||||
for idx, reply_id in enumerate(id_list[0:max_replies_shown])
|
||||
]
|
||||
reply_dump = await jsonrpc_batch(url, full_replies_batch)
|
||||
del full_replies_batch
|
||||
# This neatly orders the response into a dict to aggregate the
|
||||
# full comments by the parent comment they're replying to
|
||||
#
|
||||
# WARNING: The following block is going to be saving the comment dict
|
||||
# objects TO `comment_replies`. This means that the lists
|
||||
# stored in `comments_replies` may not hold just comments, but
|
||||
# the ids of the comments who weren't requested due to the
|
||||
# maximum reply limit. They need to be either cleaned out or stored
|
||||
# somewhere else
|
||||
|
||||
for comment in reply_dump:
|
||||
parent_index, reply_index = comment['id'].split(':')
|
||||
parent_index, reply_index = int(parent_index), int(reply_index)
|
||||
comment_replies[parent_index][reply_index] = comment['result']
|
||||
|
||||
for idx, parent_comment in enumerate(comments):
|
||||
if 'parent_id' not in parent_comment:
|
||||
parent_comment['parent_id'] = None
|
||||
parent_comment['reply_count'] = len(comment_replies[idx])
|
||||
parent_comment['omitted'] = 0
|
||||
if len(comment_replies[idx]) > max_replies_shown:
|
||||
parent_comment['omitted'] = len(comment_replies[idx]) - max_replies_shown
|
||||
|
||||
response['comments'].append(parent_comment)
|
||||
response['comments'] += comment_replies[idx][0:max_replies_shown]
|
||||
response['page_size'] = page_size if page_size is not None else -1
|
||||
return response
|
||||
else:
|
||||
for id_list in reply_lists:
|
||||
comments[id_list['id']]['reply_count'] = len(id_list['result'])
|
||||
comments[id_list['id']]['omitted'] = len(id_list['result'])
|
||||
response['comments'] = comments
|
||||
del reply_lists
|
||||
return response
|
||||
|
||||
@requires(WALLET_COMPONENT)
|
||||
async def jsonrpc_comment_create(self, claim_id: str, channel_id: str,
|
||||
message: str, parent_comment_id: int = None) -> dict:
|
||||
async def jsonrpc_comment_create(self, claim_id: str, comment: str, parent_id: str = None,
|
||||
channel_name: str = None, channel_id: str = None) -> dict:
|
||||
"""
|
||||
Create and associate a comment with a claim using your channel identity.
|
||||
|
||||
Usage:
|
||||
comment_create <claim_id> <channel_id> <message> [--parent_comment_id=<parent_comment_id>]
|
||||
comment_create (<comment> | --comment=<comment>)
|
||||
(<claim_id> | --claim_id=<claim_id>)
|
||||
[--channel_id=<channel_id>]
|
||||
[--channel_name=<channel_name>]
|
||||
[--parent_id=<parent_id>]
|
||||
|
||||
Options:
|
||||
--parent_comment_id=<parent_comment_id> : (int) The ID of a comment to make a response to
|
||||
--parent_id=<parent_id> : (str) The ID of a comment to make a response to
|
||||
--channel_id=<channel_id> : (str) The ID of the channel you want to post under
|
||||
--channel_name=<channel_name> : (str) The channel you want to post as, prepend with a '@'
|
||||
|
||||
|
||||
Returns:
|
||||
(dict) Comment object if successfully made
|
||||
(dict) Comment object if successfully made, (None) otherwise
|
||||
{
|
||||
"comment": (str) The actual string as inputted by the user,
|
||||
"comment_id": (str) The Comment's unique identifier,
|
||||
"channel_name": (str) Name of the channel this was posted under, prepended with a '@',
|
||||
"channel_id": (str) The Channel Claim ID that this comeent was posted under,
|
||||
"signature": (str) The signature of the comment,
|
||||
"channel_uri": (str) Channel's URI in the ClaimTrie,
|
||||
"parent_id": (str) Comment this is replying to, (None) if this is the root,
|
||||
"timestamp": (int) The time at which comment was entered into the server at, in nanoseconds.
|
||||
}
|
||||
"""
|
||||
if not 1 < len(message) <= 2000:
|
||||
raise Exception(f'Message length ({len(message)}) needs to be between 2 and 2000 chars')
|
||||
url = self.conf.comment_server
|
||||
if parent_comment_id is not None:
|
||||
comment_id = await jsonrpc_post(url, 'reply', parent_id=parent_comment_id,
|
||||
poster=channel_id, message=message)
|
||||
else:
|
||||
claim_data = await self.jsonrpc_claim_search(claim_id=claim_id)
|
||||
if 'error' not in claim_data and claim_data['total_pages'] == 1:
|
||||
uri = claim_data['items'][0].permanent_url
|
||||
comment_id = await jsonrpc_post(url, 'comment', uri=uri,
|
||||
poster=channel_id, message=message)
|
||||
else:
|
||||
raise Exception(f"permanent_url is not in the claim_data {claim_data}\n"
|
||||
f"The given claim_id ({claim_id}) may be invalid")
|
||||
return await jsonrpc_post(url, 'get_comment_data', comm_index=comment_id, better_keys=True)
|
||||
|
||||
def valid_address_or_error(self, address):
|
||||
try:
|
||||
assert self.ledger.is_valid_address(address)
|
||||
except:
|
||||
raise Exception(f"'{address}' is not a valid address")
|
||||
if bool(channel_name) ^ bool(channel_id):
|
||||
skey, sval = ('claim_id', channel_id) if channel_id else ('normalized', channel_name.lower())
|
||||
channel_list = await self.jsonrpc_channel_list()
|
||||
try:
|
||||
channel: dict = [chan for chan in channel_list if chan[skey] == sval].pop()
|
||||
channel_name, channel_id = channel['normalized'], channel['claim_id']
|
||||
except IndexError:
|
||||
raise ValueError('You must enter a valid channel_%s' % ('id' if channel_id else 'name'))
|
||||
signature = None
|
||||
if channel_id and channel_name:
|
||||
signature = sign_comment(
|
||||
channel_name=channel_name,
|
||||
channel_id=channel_id,
|
||||
comment=comment,
|
||||
salt=time.time_ns()
|
||||
)
|
||||
comment = {
|
||||
'comment': comment,
|
||||
'claim_id': claim_id,
|
||||
'parent_id': parent_id,
|
||||
'channel_id': channel_id,
|
||||
'channel_name': channel_name,
|
||||
'signature': signature
|
||||
}
|
||||
return await jsonrpc_post(self.conf.comment_server, 'create_comment', **comment)
|
||||
|
||||
@staticmethod
|
||||
def valid_stream_name_or_error(name: str):
|
||||
|
|
|
@ -1,31 +1,51 @@
|
|||
import logging
|
||||
|
||||
import aiohttp
|
||||
import hashlib
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import padding
|
||||
from cryptography.hazmat.primitives.asymmetric import utils
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import generate_private_key
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def sign_comment(**kwargs):
|
||||
private_key = generate_private_key(
|
||||
public_exponent=65537,
|
||||
key_size=4096,
|
||||
backend=default_backend()
|
||||
)
|
||||
chosen_hash = hashes.SHA256()
|
||||
hasher = hashes.Hash(chosen_hash, default_backend())
|
||||
value_to_hash = b':'.join(bytes(v, 'utf-8') for v in kwargs.values() if type(v) is str)
|
||||
hasher.update(value_to_hash)
|
||||
digest = hasher.finalize()
|
||||
signature = private_key.sign(
|
||||
digest,
|
||||
padding.PSS(
|
||||
mgf=padding.MGF1(hashes.SHA256()),
|
||||
salt_length=padding.PSS.MAX_LENGTH
|
||||
),
|
||||
utils.Prehashed(chosen_hash)
|
||||
)
|
||||
m = hashlib.sha3_256()
|
||||
m.update(signature)
|
||||
return m.hexdigest()
|
||||
|
||||
|
||||
def rpc_body(method: str, rpc_id: any, **params) -> dict:
|
||||
return {'jsonrpc': '2.0', 'id': rpc_id, 'method': method, 'params': {**params}}
|
||||
|
||||
|
||||
async def jsonrpc_post(url: str, method: str, **params) -> any:
|
||||
clean = params.pop('clean', True)
|
||||
response = (await jsonrpc_batch(url, [rpc_body(method, 1, **params)]))[0]
|
||||
if clean:
|
||||
if 'error' in response:
|
||||
return response['error']
|
||||
return response['result']
|
||||
else:
|
||||
return response
|
||||
|
||||
|
||||
async def jsonrpc_batch(url: str, calls: list, batch_size: int = 50, clean: bool = False) -> list:
|
||||
json_body = {'jsonrpc': '2.0', 'id': None, 'method': method, 'params': params}
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
complete = []
|
||||
batch_size = max(batch_size, 50)
|
||||
for i in range(0, len(calls), batch_size):
|
||||
async with aiohttp.request('POST', url, headers=headers, json=calls[i:i+batch_size]) as response:
|
||||
complete += await response.json()
|
||||
if clean:
|
||||
complete = [body['result'] if 'result' in body else None for body in complete]
|
||||
return complete
|
||||
async with aiohttp.request('POST', url, json=json_body, headers=headers) as response:
|
||||
try:
|
||||
result = await response.json()
|
||||
return result['result'] if 'result' in result else result
|
||||
except aiohttp.client.ContentTypeError as cte:
|
||||
log.exception('Unable to decode respose from server: %s', cte)
|
||||
return await response.text()
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import logging
|
||||
import time
|
||||
|
||||
import typing
|
||||
import random
|
||||
import asyncio
|
||||
from aiohttp import web
|
||||
|
||||
|
@ -10,13 +9,53 @@ from lbrynet.testcase import CommandTestCase
|
|||
import lbrynet.schema
|
||||
lbrynet.schema.BLOCKCHAIN_NAME = 'lbrycrd_regtest'
|
||||
|
||||
COMMENT_IDS = [
|
||||
"b7de681c412e315bb1a9ada6f485a2e0399400db",
|
||||
"0f7e1514f55c7fefba1e714386e05b3d705f6d29",
|
||||
"8ae19f686c39f402c80dabf25df23cf72fe426af",
|
||||
"a11ad59b54bb937ca1a88329f253b17196bd4dc3",
|
||||
"7ee87b3249fa47b296c8347cd63bba679ef629eb",
|
||||
"0100e3367f68284f4970736c9351ad90c37dade5",
|
||||
"974a5bfcce6bc72605688ba6e2efd34aa934b1dc",
|
||||
"97ea100a52aa46ae9f2a4356169307a2505e8d47",
|
||||
"2b4d193371c8f0ed45c830cb1ba3188b90bf08f1",
|
||||
"db335dc3183ca3552b6ef4a7bce36f26ed37b7eb"
|
||||
]
|
||||
|
||||
CLAIM_IDS = [
|
||||
"f6068bdc8cb66fe7eb6c3cf4cf98da93a697df47",
|
||||
"44a8c10e36ed8b60da8d5fe590cba61544fb7179",
|
||||
"a7d8a1fc90ab806c98743a7f9ca7480e2cebe2a0",
|
||||
"81a8cc2fa41eea0ae9d65ab0f8a0440605a23f1b",
|
||||
"49117e9a7bb2aab01356e1160871aad5edb09ed5",
|
||||
"2b928261918b1f7c65973c8fee9e20d4a1f1b2a4",
|
||||
"f9d6eb75d1592a967b1c405208593d30b46446c9",
|
||||
"cc70bd497eb1305096fa4e28275645f47c5d809d",
|
||||
"2e520f60bd8f79f309d68b291fe574531a7d6656",
|
||||
"16b0248c103fb7b3497bd58543f6c5dd6d47d5f2"
|
||||
]
|
||||
|
||||
CHANNEL_IDS = [
|
||||
"7b65a9886869a367371ec621abe5bac4e5dd27b9",
|
||||
"c3bbde23a8b31dc05490cede3a381080b024f878",
|
||||
"c544579ca13ce5d97e9301789620547323da15eb",
|
||||
"428e1c075b27bbce1380c16ecb5f0d228318315e",
|
||||
"1558b39438f573a47a5e0fcd78ad24d0eb358be0",
|
||||
"ac66521e1757d320568a52ab8b01029bd169b1a0",
|
||||
"aa89729a08050694ffb62e725356bbaa26481193",
|
||||
"23181733dc3b836e4d38e8cc21d79378b855cf36",
|
||||
"60efc8ced56a6a02c2d5371310f0130c541a9ded",
|
||||
"af1c95f2026d4a254512dd6e6a792a9d92b9fd21"
|
||||
]
|
||||
|
||||
|
||||
class FakedCommentServer:
|
||||
ERRORS = {
|
||||
'INVALID_URI': {'code': 1, 'message': 'Invalid claim URI'},
|
||||
'INVALID_PARAMS': {'code': -32602, 'message': 'Invalid parameters'},
|
||||
'INTERNAL': {'code': -32603, 'message': 'An internal error'},
|
||||
'UNKNOWN': {'code': -1, 'message': 'An unknown or very miscellaneous error'},
|
||||
'INVALID_METHOD': {'code': -32604, 'message': 'The Requested method does not exist'}
|
||||
|
||||
}
|
||||
|
||||
def __init__(self, port=2903):
|
||||
|
@ -26,44 +65,27 @@ class FakedCommentServer:
|
|||
self.runner = None
|
||||
self.server = None
|
||||
|
||||
def get_claim_comments(self, uri: str, better_keys: bool) -> typing.Union[dict, list, None]:
|
||||
if not uri.startswith('lbry://'): # Very basic error case
|
||||
return {'error': self.ERRORS['INVALID_URI']}
|
||||
return [self.get_comment(i) for i in range(75)]
|
||||
|
||||
def get_comment(self, comment_id: int, parent_id: int = None) -> dict:
|
||||
def get_comment(self, **kwargs) -> dict:
|
||||
return {
|
||||
'comment_id': comment_id,
|
||||
'parent_id': parent_id,
|
||||
'author': f'Person{comment_id}',
|
||||
'message': f'comment {comment_id}',
|
||||
'claim_id': random.randint(1, 2**16),
|
||||
'time_posted': random.randint(2**16, 2**32 - 1),
|
||||
'upvotes': random.randint(0, 9999), 'downvotes': random.randint(0, 9999)
|
||||
'comment_id': 'asbdsdasd',
|
||||
'parent_id': 'asdsfsfsf',
|
||||
'comment': 'asdsdadsdas',
|
||||
'timestamp': time.time_ns(),
|
||||
'channel_id': 'asdsdsdasdad',
|
||||
'channel_name': 'asdsasasfaf',
|
||||
'channel_uri': 'asdsdasda',
|
||||
'signature': 'aasdasdasda',
|
||||
}
|
||||
|
||||
def comment(self, uri: str, poster: str, message: str) -> typing.Union[int, dict, None]:
|
||||
if not uri.startswith('lbry://'):
|
||||
return {'error': self.ERRORS['INVALID_URI']}
|
||||
return random.randint(1, 9999)
|
||||
def create_comment(self, comment, claim_id, **kwargs):
|
||||
return self.get_comment(**kwargs)
|
||||
|
||||
def reply(self, parent_id: int, poster: str, message: str) -> dict:
|
||||
if 2 <= len(message) <= 2000 and 2 <= len(poster) <= 127 and parent_id > 0:
|
||||
return random.randint(parent_id + 1, 2**32 - 1)
|
||||
return {'error': self.ERRORS['INVALID_PARAMS']}
|
||||
|
||||
def get_comment_data(self, comm_index: int, better_keys: bool = False) -> typing.Union[dict, None]:
|
||||
return self.get_comment(comm_index)
|
||||
|
||||
def get_comment_replies(self, comm_index: int) -> typing.Union[list, None]:
|
||||
return [random.randint(comm_index, comm_index+250) for _ in range(75)]
|
||||
def get_claim_comments(self, page=1, page_size=50, **kwargs):
|
||||
return [self.get_comment(**kwargs) for i in range(page_size)]
|
||||
|
||||
methods = {
|
||||
'get_claim_comments': get_claim_comments,
|
||||
'get_comment_data': get_comment_data,
|
||||
'get_comment_replies': get_comment_replies,
|
||||
'comment': comment,
|
||||
'reply': reply
|
||||
'create_comment': create_comment,
|
||||
}
|
||||
|
||||
def process_json(self, body) -> dict:
|
||||
|
@ -71,12 +93,9 @@ class FakedCommentServer:
|
|||
if body['method'] in self.methods:
|
||||
params = body.get('params', {})
|
||||
result = self.methods[body['method']](self, **params)
|
||||
if type(result) is dict and 'error' in result:
|
||||
response['error'] = result['error']
|
||||
else:
|
||||
response['result'] = result
|
||||
response['result'] = result
|
||||
else:
|
||||
response['error'] = self.ERRORS['UNKNOWN']
|
||||
response['error'] = self.ERRORS['INVALID_METHOD']
|
||||
return response
|
||||
|
||||
async def _start(self):
|
||||
|
@ -106,7 +125,7 @@ class FakedCommentServer:
|
|||
response = self.process_json(body)
|
||||
return web.json_response(response)
|
||||
else:
|
||||
return web.json_response({'error': self.ERRORS['UNKNOWN']})
|
||||
raise TypeError('invalid type passed')
|
||||
|
||||
|
||||
class CommentCommands(CommandTestCase):
|
||||
|
@ -126,78 +145,61 @@ class CommentCommands(CommandTestCase):
|
|||
await self.server_task
|
||||
|
||||
async def test_comment_create(self):
|
||||
claim = await self.stream_create(name='doge', bid='0.001', data=b'loool')
|
||||
self.assertIn('outputs', claim)
|
||||
comment = await self.daemon.jsonrpc_comment_create(
|
||||
claim_id=claim['outputs'][0]['claim_id'],
|
||||
channel_id='Jimmy Buffett',
|
||||
message="It's 5 O'Clock Somewhere"
|
||||
claim_id=CLAIM_IDS[0],
|
||||
channel_name='@JimmyBuffett',
|
||||
channel_id=CHANNEL_IDS[0],
|
||||
comment="It's 5 O'Clock Somewhere"
|
||||
)
|
||||
self.assertIs(type(comment), dict, msg=f"Response type ({type(comment)})is not dict: {comment}")
|
||||
self.assertIn('message', comment, msg=f"Response {comment} doesn't contain message")
|
||||
self.assertIn('author', comment)
|
||||
self.assertIsNotNone(comment)
|
||||
self.assertNotIn('error', comment)
|
||||
self.assertIn('comment', comment, msg=f"Response {comment} doesn't contain message")
|
||||
self.assertIn('channel_name', comment)
|
||||
|
||||
async def test_comment_create_reply(self):
|
||||
claim = await self.stream_create(name='doge', bid='0.001')
|
||||
self.assertIn('outputs', claim)
|
||||
reply = await self.daemon.jsonrpc_comment_create(
|
||||
claim_id=claim['outputs'][0]['claim_id'],
|
||||
channel_id='Jimmy Buffett',
|
||||
message='Let\'s all go to Margaritaville',
|
||||
parent_comment_id=42
|
||||
claim_id=CLAIM_IDS[0],
|
||||
channel_name='@JimmyBuffett',
|
||||
channel_id=CHANNEL_IDS[0],
|
||||
comment='Let\'s all go to Margaritaville',
|
||||
parent_id=COMMENT_IDS[0]
|
||||
)
|
||||
self.assertIs(type(reply), dict, msg=f'Response {type(reply)} is not dict\nResponse: {reply}')
|
||||
self.assertIn('author', reply)
|
||||
self.assertIsNotNone(reply)
|
||||
self.assertNotIn('error', reply)
|
||||
self.assertIn('comment_id', reply)
|
||||
self.assertIsNotNone(reply['parent_id'])
|
||||
|
||||
async def test_comment_list_root_level(self):
|
||||
claim = await self.stream_create(name='doge', bid='0.001')
|
||||
self.assertIn('outputs', claim)
|
||||
claim_id = claim['outputs'][0]['claim_id']
|
||||
comments = await self.daemon.jsonrpc_comment_list(claim_id)
|
||||
self.assertIsNotNone(type(comments))
|
||||
self.assertIs(type(comments), dict)
|
||||
self.assertIn('comments', comments, f"'comments' field was not found in returned dict: {comments}")
|
||||
self.assertIs(type(comments['comments']), list, msg=f'comment_list: {comments}')
|
||||
comments = await self.daemon.jsonrpc_comment_list(claim_id, page_size=50)
|
||||
comments = await self.daemon.jsonrpc_comment_list(CLAIM_IDS[0])
|
||||
self.assertIsNotNone(comments)
|
||||
self.assertIs(type(comments), dict)
|
||||
self.assertIn('comments', comments, f"'comments' field was not found in returned dict: {comments}")
|
||||
comment_list = comments['comments']
|
||||
self.assertEqual(len(comment_list), 50, msg=f'comment_list incorrect size {len(comment_list)}: {comment_list}')
|
||||
comments = await self.daemon.jsonrpc_comment_list(claim_id, page_size=50, page=2)
|
||||
self.assertEqual(len(comments['comments']), 25, msg=f'comment list page 2: {comments["comments"]}')
|
||||
comments = await self.daemon.jsonrpc_comment_list(claim_id, page_size=50, page=3)
|
||||
self.assertEqual(len(comments['comments']), 0, msg=f'comment list is non-zero: {comments["comments"]}')
|
||||
self.assertIs(type(comments), list)
|
||||
comments = await self.daemon.jsonrpc_comment_list(CLAIM_IDS[1], page_size=50)
|
||||
self.assertIsNotNone(comments)
|
||||
self.assertLessEqual(len(comments), 50)
|
||||
self.assertGreaterEqual(len(comments), 0)
|
||||
|
||||
async def test_comment_list_replies(self):
|
||||
claim = await self.stream_create(name='doge', bid='0.001')
|
||||
self.assertIn('outputs', claim)
|
||||
claim_id = claim['outputs'][0]['claim_id']
|
||||
replies = await self.daemon.jsonrpc_comment_list(claim_id, parent_comment_id=23)
|
||||
self.assertIsInstance(replies['comments'], list, msg=f'Invalid type: {replies["comments"]} should be list')
|
||||
self.assertGreater(len(replies['comments']), 0, msg='Returned replies are empty')
|
||||
replies = (await self.daemon.jsonrpc_comment_list(claim_id, parent_comment_id=25, page_size=50))['comments']
|
||||
self.assertEqual(len(replies), 50, f'Replies invalid length ({len(replies)})')
|
||||
replies = (await self.daemon.jsonrpc_comment_list(claim_id, parent_comment_id=67,
|
||||
page_size=23, page=5))['comments']
|
||||
self.assertEqual(len(replies), 0, f'replies {replies} not 23: {len(replies)}')
|
||||
replies = (await self.daemon.jsonrpc_comment_list(claim_id, parent_comment_id=79,
|
||||
page_size=60, page=2))['comments']
|
||||
self.assertEqual(len(replies), 15, f'Size of replies is incorrect, should be 15: {replies}')
|
||||
replies = await self.daemon.jsonrpc_comment_list(CLAIM_IDS[0], parent_id=23)
|
||||
self.assertIsInstance(replies, list)
|
||||
self.assertGreater(len(replies), 0)
|
||||
replies = await self.daemon.jsonrpc_comment_list(CLAIM_IDS[2], parent_id=COMMENT_IDS[3], page_size=50)
|
||||
self.assertEqual(len(replies), 50)
|
||||
replies = await self.daemon.jsonrpc_comment_list(CLAIM_IDS[3], parent_id=COMMENT_IDS[5],
|
||||
page_size=23, page=5)
|
||||
self.assertEqual(len(replies), 23)
|
||||
replies = await self.daemon.jsonrpc_comment_list(CLAIM_IDS[5], parent_id=COMMENT_IDS[1],
|
||||
page_size=60, page=2)
|
||||
self.assertEqual(len(replies), 60)
|
||||
|
||||
async def test_comment_list_flatness_flatness_LA(self):
|
||||
claim = await self.stream_create(name='doge', bid='0.001')
|
||||
self.assertIn('outputs', claim)
|
||||
claim_id = claim['outputs'][0]['claim_id']
|
||||
replies = await self.daemon.jsonrpc_comment_list(claim_id, parent_comment_id=23, flat=True)
|
||||
self.assertIsInstance(replies['comments'], list, msg=f'Invalid type: {replies["comments"]} should be list')
|
||||
self.assertGreater(len(replies['comments']), 0, msg='Returned replies are empty')
|
||||
replies = (await self.daemon.jsonrpc_comment_list(claim_id, parent_comment_id=25, flat=True,
|
||||
max_replies_shown=0, page_size=50))['comments']
|
||||
self.assertEqual(len(replies), 50, f'Replies invalid length ({len(replies)})')
|
||||
replies = (await self.daemon.jsonrpc_comment_list(claim_id, parent_comment_id=67,
|
||||
flat=True, page_size=23, page=5))['comments']
|
||||
self.assertEqual(len(replies), 0, f'replies {replies} not 23: {len(replies)}')
|
||||
replies = (await self.daemon.jsonrpc_comment_list(claim_id, parent_comment_id=79,
|
||||
page_size=60, page=2))['comments']
|
||||
self.assertGreaterEqual(len(replies), 15, f'Size of replies is incorrect, should be 15: {replies}')
|
||||
replies = await self.daemon.jsonrpc_comment_list(CLAIM_IDS[2], parent_id=23, include_replies=True)
|
||||
self.assertGreater(len(replies), 0)
|
||||
replies = await self.daemon.jsonrpc_comment_list(CLAIM_IDS[6], parent_id=25,
|
||||
page_size=50, include_replies=True)
|
||||
self.assertGreaterEqual(len(replies), 0)
|
||||
self.assertLessEqual(len(replies), 50)
|
||||
replies = await self.daemon.jsonrpc_comment_list(CLAIM_IDS[7], parent_id=67, page_size=23, page=5)
|
||||
self.assertGreaterEqual(len(replies), 0)
|
||||
self.assertLessEqual(len(replies), 23)
|
||||
replies = await self.daemon.jsonrpc_comment_list(CLAIM_IDS[9], parent_id=79, page=2, include_replies=True)
|
||||
self.assertGreaterEqual(len(replies), 15)
|
||||
|
|
Loading…
Add table
Reference in a new issue