2015-08-20 17:27:15 +02:00
|
|
|
import json
|
|
|
|
import logging
|
2018-07-05 05:16:52 +02:00
|
|
|
from twisted.internet import defer
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
|
2015-09-08 21:42:56 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
class ServerRequestHandler(object):
|
2016-09-29 20:39:09 +02:00
|
|
|
"""This class handles requests from clients. It can upload blobs and
|
|
|
|
return request for information about more blobs that are
|
|
|
|
associated with streams.
|
|
|
|
"""
|
2018-07-03 06:51:25 +02:00
|
|
|
#implements(interfaces.IPushProducer, interfaces.IConsumer, IRequestHandler)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def __init__(self, consumer):
|
|
|
|
self.consumer = consumer
|
|
|
|
self.production_paused = False
|
|
|
|
self.request_buff = ''
|
|
|
|
self.response_buff = ''
|
|
|
|
self.producer = None
|
|
|
|
self.request_received = False
|
|
|
|
self.CHUNK_SIZE = 2**14
|
|
|
|
self.query_handlers = {} # {IQueryHandler: [query_identifiers]}
|
|
|
|
self.blob_sender = None
|
|
|
|
self.consumer.registerProducer(self, True)
|
|
|
|
|
|
|
|
#IPushProducer stuff
|
|
|
|
|
|
|
|
def pauseProducing(self):
|
|
|
|
self.production_paused = True
|
|
|
|
|
|
|
|
def stopProducing(self):
|
|
|
|
if self.producer is not None:
|
|
|
|
self.producer.stopProducing()
|
|
|
|
self.producer = None
|
|
|
|
self.production_paused = True
|
|
|
|
self.consumer.unregisterProducer()
|
|
|
|
|
|
|
|
def resumeProducing(self):
|
|
|
|
|
|
|
|
from twisted.internet import reactor
|
|
|
|
|
|
|
|
self.production_paused = False
|
|
|
|
self._produce_more()
|
|
|
|
if self.producer is not None:
|
|
|
|
reactor.callLater(0, self.producer.resumeProducing)
|
|
|
|
|
|
|
|
def _produce_more(self):
|
|
|
|
|
|
|
|
from twisted.internet import reactor
|
|
|
|
|
2016-12-14 23:46:06 +01:00
|
|
|
if self.production_paused:
|
|
|
|
return
|
|
|
|
chunk = self.response_buff[:self.CHUNK_SIZE]
|
|
|
|
self.response_buff = self.response_buff[self.CHUNK_SIZE:]
|
|
|
|
if chunk == '':
|
|
|
|
return
|
|
|
|
log.trace("writing %s bytes to the client", len(chunk))
|
|
|
|
self.consumer.write(chunk)
|
|
|
|
reactor.callLater(0, self._produce_more)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
#IConsumer stuff
|
|
|
|
|
|
|
|
def registerProducer(self, producer, streaming):
|
|
|
|
self.producer = producer
|
|
|
|
assert streaming is False
|
|
|
|
producer.resumeProducing()
|
|
|
|
|
|
|
|
def unregisterProducer(self):
|
|
|
|
self.producer = None
|
|
|
|
|
|
|
|
def write(self, data):
|
|
|
|
|
|
|
|
from twisted.internet import reactor
|
|
|
|
|
|
|
|
self.response_buff = self.response_buff + data
|
|
|
|
self._produce_more()
|
|
|
|
|
|
|
|
def get_more_data():
|
|
|
|
if self.producer is not None:
|
2016-12-14 23:46:06 +01:00
|
|
|
log.trace("Requesting more data from the producer")
|
2015-08-20 17:27:15 +02:00
|
|
|
self.producer.resumeProducing()
|
|
|
|
|
|
|
|
reactor.callLater(0, get_more_data)
|
|
|
|
|
|
|
|
#From Protocol
|
|
|
|
|
|
|
|
def data_received(self, data):
|
2015-09-08 21:42:56 +02:00
|
|
|
log.debug("Received data")
|
|
|
|
log.debug("%s", str(data))
|
2015-08-20 17:27:15 +02:00
|
|
|
if self.request_received is False:
|
2016-09-29 20:39:09 +02:00
|
|
|
return self._parse_data_and_maybe_send_blob(data)
|
2015-08-20 17:27:15 +02:00
|
|
|
else:
|
2016-11-30 21:20:45 +01:00
|
|
|
log.warning(
|
|
|
|
"The client sent data when we were uploading a file. This should not happen")
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-09-29 20:39:09 +02:00
|
|
|
def _parse_data_and_maybe_send_blob(self, data):
|
|
|
|
self.request_buff = self.request_buff + data
|
|
|
|
msg = self.try_to_parse_request(self.request_buff)
|
|
|
|
if msg:
|
|
|
|
self.request_buff = ''
|
|
|
|
self._process_msg(msg)
|
|
|
|
else:
|
|
|
|
log.debug("Request buff not a valid json message")
|
|
|
|
log.debug("Request buff: %s", self.request_buff)
|
|
|
|
|
|
|
|
def _process_msg(self, msg):
|
|
|
|
d = self.handle_request(msg)
|
|
|
|
if self.blob_sender:
|
|
|
|
d.addCallback(lambda _: self.blob_sender.send_blob_if_requested(self))
|
|
|
|
d.addCallbacks(lambda _: self.finished_response(), self.request_failure_handler)
|
|
|
|
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
######### IRequestHandler #########
|
|
|
|
|
|
|
|
def register_query_handler(self, query_handler, query_identifiers):
|
|
|
|
self.query_handlers[query_handler] = query_identifiers
|
|
|
|
|
|
|
|
def register_blob_sender(self, blob_sender):
|
|
|
|
self.blob_sender = blob_sender
|
|
|
|
|
|
|
|
#response handling
|
|
|
|
|
|
|
|
def request_failure_handler(self, err):
|
2015-09-08 21:42:56 +02:00
|
|
|
log.warning("An error occurred handling a request. Error: %s", err.getErrorMessage())
|
2015-08-20 17:27:15 +02:00
|
|
|
self.stopProducing()
|
|
|
|
return err
|
|
|
|
|
|
|
|
def finished_response(self):
|
|
|
|
self.request_received = False
|
|
|
|
self._produce_more()
|
|
|
|
|
|
|
|
def send_response(self, msg):
|
|
|
|
m = json.dumps(msg)
|
2016-08-05 12:45:09 +02:00
|
|
|
log.debug("Sending a response of length %s", str(len(m)))
|
2015-09-08 21:42:56 +02:00
|
|
|
log.debug("Response: %s", str(m))
|
2015-08-20 17:27:15 +02:00
|
|
|
self.response_buff = self.response_buff + m
|
|
|
|
self._produce_more()
|
|
|
|
return True
|
|
|
|
|
|
|
|
def handle_request(self, msg):
|
2015-09-08 21:42:56 +02:00
|
|
|
log.debug("Handling a request")
|
|
|
|
log.debug(str(msg))
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def create_response_message(results):
|
|
|
|
response = {}
|
|
|
|
for success, result in results:
|
|
|
|
if success is True:
|
|
|
|
response.update(result)
|
|
|
|
else:
|
|
|
|
# result is a Failure
|
|
|
|
return result
|
2015-09-08 21:42:56 +02:00
|
|
|
log.debug("Finished making the response message. Response: %s", str(response))
|
2015-08-20 17:27:15 +02:00
|
|
|
return response
|
|
|
|
|
|
|
|
def log_errors(err):
|
2016-11-30 21:20:45 +01:00
|
|
|
log.warning(
|
|
|
|
"An error occurred handling a client request. Error message: %s",
|
|
|
|
err.getErrorMessage())
|
2015-08-20 17:27:15 +02:00
|
|
|
return err
|
|
|
|
|
|
|
|
def send_response(response):
|
|
|
|
self.send_response(response)
|
|
|
|
return True
|
|
|
|
|
|
|
|
ds = []
|
2018-07-06 07:17:20 +02:00
|
|
|
for query_handler, query_identifiers in self.query_handlers.items():
|
2015-08-20 17:27:15 +02:00
|
|
|
queries = {q_i: msg[q_i] for q_i in query_identifiers if q_i in msg}
|
|
|
|
d = query_handler.handle_queries(queries)
|
|
|
|
d.addErrback(log_errors)
|
|
|
|
ds.append(d)
|
|
|
|
|
|
|
|
dl = defer.DeferredList(ds)
|
|
|
|
dl.addCallback(create_response_message)
|
|
|
|
dl.addCallback(send_response)
|
|
|
|
return dl
|
|
|
|
|
|
|
|
def try_to_parse_request(self, request_buff):
|
|
|
|
try:
|
|
|
|
msg = json.loads(request_buff)
|
|
|
|
return msg
|
|
|
|
except ValueError:
|
2016-08-05 12:45:09 +02:00
|
|
|
return None
|