1
0
Fork 0
forked from bton/matekasse

tests versuch 2

This commit is contained in:
2000-Trek 2023-07-28 23:30:45 +02:00
parent fdf385fe06
commit c88f7df83a
2363 changed files with 408191 additions and 0 deletions

View file

@ -0,0 +1,23 @@
import sys
from .client import Client
from .middleware import WSGIApp, Middleware
from .server import Server
if sys.version_info >= (3, 5): # pragma: no cover
from .asyncio_server import AsyncServer
from .asyncio_client import AsyncClient
from .async_drivers.asgi import ASGIApp
try:
from .async_drivers.tornado import get_tornado_handler
except ImportError:
get_tornado_handler = None
else: # pragma: no cover
AsyncServer = None
AsyncClient = None
get_tornado_handler = None
ASGIApp = None
__all__ = ['Server', 'WSGIApp', 'Middleware', 'Client']
if AsyncServer is not None: # pragma: no cover
__all__ += ['AsyncServer', 'ASGIApp', 'get_tornado_handler',
'AsyncClient']

View file

@ -0,0 +1,127 @@
import asyncio
import sys
from urllib.parse import urlsplit
from aiohttp.web import Response, WebSocketResponse
def create_route(app, engineio_server, engineio_endpoint):
"""This function sets up the engine.io endpoint as a route for the
application.
Note that both GET and POST requests must be hooked up on the engine.io
endpoint.
"""
app.router.add_get(engineio_endpoint, engineio_server.handle_request)
app.router.add_post(engineio_endpoint, engineio_server.handle_request)
app.router.add_route('OPTIONS', engineio_endpoint,
engineio_server.handle_request)
def translate_request(request):
"""This function takes the arguments passed to the request handler and
uses them to generate a WSGI compatible environ dictionary.
"""
message = request._message
payload = request._payload
uri_parts = urlsplit(message.path)
environ = {
'wsgi.input': payload,
'wsgi.errors': sys.stderr,
'wsgi.version': (1, 0),
'wsgi.async': True,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False,
'SERVER_SOFTWARE': 'aiohttp',
'REQUEST_METHOD': message.method,
'QUERY_STRING': uri_parts.query or '',
'RAW_URI': message.path,
'SERVER_PROTOCOL': 'HTTP/%s.%s' % message.version,
'REMOTE_ADDR': '127.0.0.1',
'REMOTE_PORT': '0',
'SERVER_NAME': 'aiohttp',
'SERVER_PORT': '0',
'aiohttp.request': request
}
for hdr_name, hdr_value in message.headers.items():
hdr_name = hdr_name.upper()
if hdr_name == 'CONTENT-TYPE':
environ['CONTENT_TYPE'] = hdr_value
continue
elif hdr_name == 'CONTENT-LENGTH':
environ['CONTENT_LENGTH'] = hdr_value
continue
key = 'HTTP_%s' % hdr_name.replace('-', '_')
if key in environ:
hdr_value = '%s,%s' % (environ[key], hdr_value)
environ[key] = hdr_value
environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
path_info = uri_parts.path
environ['PATH_INFO'] = path_info
environ['SCRIPT_NAME'] = ''
return environ
def make_response(status, headers, payload, environ):
"""This function generates an appropriate response object for this async
mode.
"""
return Response(body=payload, status=int(status.split()[0]),
headers=headers)
class WebSocket(object): # pragma: no cover
"""
This wrapper class provides a aiohttp WebSocket interface that is
somewhat compatible with eventlet's implementation.
"""
def __init__(self, handler, server):
self.handler = handler
self._sock = None
async def __call__(self, environ):
request = environ['aiohttp.request']
self._sock = WebSocketResponse(max_msg_size=0)
await self._sock.prepare(request)
self.environ = environ
await self.handler(self)
return self._sock
async def close(self):
await self._sock.close()
async def send(self, message):
if isinstance(message, bytes):
f = self._sock.send_bytes
else:
f = self._sock.send_str
if asyncio.iscoroutinefunction(f):
await f(message)
else:
f(message)
async def wait(self):
msg = await self._sock.receive()
if not isinstance(msg.data, bytes) and \
not isinstance(msg.data, str):
raise IOError()
return msg.data
_async = {
'asyncio': True,
'create_route': create_route,
'translate_request': translate_request,
'make_response': make_response,
'websocket': WebSocket,
}

View file

@ -0,0 +1,263 @@
import os
import sys
import asyncio
from engineio.static_files import get_static_file
class ASGIApp:
"""ASGI application middleware for Engine.IO.
This middleware dispatches traffic to an Engine.IO application. It can
also serve a list of static files to the client, or forward unrelated
HTTP traffic to another ASGI application.
:param engineio_server: The Engine.IO server. Must be an instance of the
``engineio.AsyncServer`` class.
:param static_files: A dictionary with static file mapping rules. See the
documentation for details on this argument.
:param other_asgi_app: A separate ASGI app that receives all other traffic.
:param engineio_path: The endpoint where the Engine.IO application should
be installed. The default value is appropriate for
most cases.
:param on_startup: function to be called on application startup; can be
coroutine
:param on_shutdown: function to be called on application shutdown; can be
coroutine
Example usage::
import engineio
import uvicorn
eio = engineio.AsyncServer()
app = engineio.ASGIApp(eio, static_files={
'/': {'content_type': 'text/html', 'filename': 'index.html'},
'/index.html': {'content_type': 'text/html',
'filename': 'index.html'},
})
uvicorn.run(app, '127.0.0.1', 5000)
"""
def __init__(self, engineio_server, other_asgi_app=None,
static_files=None, engineio_path='engine.io',
on_startup=None, on_shutdown=None):
self.engineio_server = engineio_server
self.other_asgi_app = other_asgi_app
self.engineio_path = engineio_path
if not self.engineio_path.startswith('/'):
self.engineio_path = '/' + self.engineio_path
if not self.engineio_path.endswith('/'):
self.engineio_path += '/'
self.static_files = static_files or {}
self.on_startup = on_startup
self.on_shutdown = on_shutdown
async def __call__(self, scope, receive, send):
if scope['type'] in ['http', 'websocket'] and \
scope['path'].startswith(self.engineio_path):
await self.engineio_server.handle_request(scope, receive, send)
else:
static_file = get_static_file(scope['path'], self.static_files) \
if scope['type'] == 'http' and self.static_files else None
if scope['type'] == 'lifespan':
await self.lifespan(scope, receive, send)
elif static_file and os.path.exists(static_file['filename']):
await self.serve_static_file(static_file, receive, send)
elif self.other_asgi_app is not None:
await self.other_asgi_app(scope, receive, send)
else:
await self.not_found(receive, send)
async def serve_static_file(self, static_file, receive,
send): # pragma: no cover
event = await receive()
if event['type'] == 'http.request':
with open(static_file['filename'], 'rb') as f:
payload = f.read()
await send({'type': 'http.response.start',
'status': 200,
'headers': [(b'Content-Type', static_file[
'content_type'].encode('utf-8'))]})
await send({'type': 'http.response.body',
'body': payload})
async def lifespan(self, scope, receive, send):
if self.other_asgi_app is not None and self.on_startup is None and \
self.on_shutdown is None:
# let the other ASGI app handle lifespan events
await self.other_asgi_app(scope, receive, send)
return
while True:
event = await receive()
if event['type'] == 'lifespan.startup':
if self.on_startup:
try:
await self.on_startup() \
if asyncio.iscoroutinefunction(self.on_startup) \
else self.on_startup()
except:
await send({'type': 'lifespan.startup.failed'})
return
await send({'type': 'lifespan.startup.complete'})
elif event['type'] == 'lifespan.shutdown':
if self.on_shutdown:
try:
await self.on_shutdown() \
if asyncio.iscoroutinefunction(self.on_shutdown) \
else self.on_shutdown()
except:
await send({'type': 'lifespan.shutdown.failed'})
return
await send({'type': 'lifespan.shutdown.complete'})
return
async def not_found(self, receive, send):
"""Return a 404 Not Found error to the client."""
await send({'type': 'http.response.start',
'status': 404,
'headers': [(b'Content-Type', b'text/plain')]})
await send({'type': 'http.response.body',
'body': b'Not Found'})
async def translate_request(scope, receive, send):
class AwaitablePayload(object): # pragma: no cover
def __init__(self, payload):
self.payload = payload or b''
async def read(self, length=None):
if length is None:
r = self.payload
self.payload = b''
else:
r = self.payload[:length]
self.payload = self.payload[length:]
return r
event = await receive()
payload = b''
if event['type'] == 'http.request':
payload += event.get('body') or b''
while event.get('more_body'):
event = await receive()
if event['type'] == 'http.request':
payload += event.get('body') or b''
elif event['type'] == 'websocket.connect':
pass
else:
return {}
raw_uri = scope['path'].encode('utf-8')
if 'query_string' in scope and scope['query_string']:
raw_uri += b'?' + scope['query_string']
environ = {
'wsgi.input': AwaitablePayload(payload),
'wsgi.errors': sys.stderr,
'wsgi.version': (1, 0),
'wsgi.async': True,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False,
'SERVER_SOFTWARE': 'asgi',
'REQUEST_METHOD': scope.get('method', 'GET'),
'PATH_INFO': scope['path'],
'QUERY_STRING': scope.get('query_string', b'').decode('utf-8'),
'RAW_URI': raw_uri.decode('utf-8'),
'SCRIPT_NAME': '',
'SERVER_PROTOCOL': 'HTTP/1.1',
'REMOTE_ADDR': '127.0.0.1',
'REMOTE_PORT': '0',
'SERVER_NAME': 'asgi',
'SERVER_PORT': '0',
'asgi.receive': receive,
'asgi.send': send,
'asgi.scope': scope,
}
for hdr_name, hdr_value in scope['headers']:
hdr_name = hdr_name.upper().decode('utf-8')
hdr_value = hdr_value.decode('utf-8')
if hdr_name == 'CONTENT-TYPE':
environ['CONTENT_TYPE'] = hdr_value
continue
elif hdr_name == 'CONTENT-LENGTH':
environ['CONTENT_LENGTH'] = hdr_value
continue
key = 'HTTP_%s' % hdr_name.replace('-', '_')
if key in environ:
hdr_value = '%s,%s' % (environ[key], hdr_value)
environ[key] = hdr_value
environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
return environ
async def make_response(status, headers, payload, environ):
headers = [(h[0].encode('utf-8'), h[1].encode('utf-8')) for h in headers]
if environ['asgi.scope']['type'] == 'websocket':
if status.startswith('200 '):
await environ['asgi.send']({'type': 'websocket.accept',
'headers': headers})
else:
if payload:
reason = payload.decode('utf-8') \
if isinstance(payload, bytes) else str(payload)
await environ['asgi.send']({'type': 'websocket.close',
'reason': reason})
else:
await environ['asgi.send']({'type': 'websocket.close'})
return
await environ['asgi.send']({'type': 'http.response.start',
'status': int(status.split(' ')[0]),
'headers': headers})
await environ['asgi.send']({'type': 'http.response.body',
'body': payload})
class WebSocket(object): # pragma: no cover
"""
This wrapper class provides an asgi WebSocket interface that is
somewhat compatible with eventlet's implementation.
"""
def __init__(self, handler, server):
self.handler = handler
self.asgi_receive = None
self.asgi_send = None
async def __call__(self, environ):
self.asgi_receive = environ['asgi.receive']
self.asgi_send = environ['asgi.send']
await self.asgi_send({'type': 'websocket.accept'})
await self.handler(self)
async def close(self):
await self.asgi_send({'type': 'websocket.close'})
async def send(self, message):
msg_bytes = None
msg_text = None
if isinstance(message, bytes):
msg_bytes = message
else:
msg_text = message
await self.asgi_send({'type': 'websocket.send',
'bytes': msg_bytes,
'text': msg_text})
async def wait(self):
event = await self.asgi_receive()
if event['type'] != 'websocket.receive':
raise IOError()
return event.get('bytes') or event.get('text')
_async = {
'asyncio': True,
'translate_request': translate_request,
'make_response': make_response,
'websocket': WebSocket,
}

View file

@ -0,0 +1,35 @@
from __future__ import absolute_import
from eventlet.green.threading import Thread, Event
from eventlet import queue
from eventlet import sleep
from eventlet.websocket import WebSocketWSGI as _WebSocketWSGI
class WebSocketWSGI(_WebSocketWSGI):
def __init__(self, handler, server):
try:
super().__init__(
handler, max_frame_length=int(server.max_http_buffer_size))
except TypeError: # pragma: no cover
# older versions of eventlet do not support a max frame size
super().__init__(handler)
self._sock = None
def __call__(self, environ, start_response):
if 'eventlet.input' not in environ:
raise RuntimeError('You need to use the eventlet server. '
'See the Deployment section of the '
'documentation for more information.')
self._sock = environ['eventlet.input'].get_socket()
return super().__call__(environ, start_response)
_async = {
'thread': Thread,
'queue': queue.Queue,
'queue_empty': queue.Empty,
'event': Event,
'websocket': WebSocketWSGI,
'sleep': sleep,
}

View file

@ -0,0 +1,63 @@
from __future__ import absolute_import
import gevent
from gevent import queue
from gevent.event import Event
try:
import geventwebsocket # noqa
_websocket_available = True
except ImportError:
_websocket_available = False
class Thread(gevent.Greenlet): # pragma: no cover
"""
This wrapper class provides gevent Greenlet interface that is compatible
with the standard library's Thread class.
"""
def __init__(self, target, args=[], kwargs={}):
super().__init__(target, *args, **kwargs)
def _run(self):
return self.run()
class WebSocketWSGI(object): # pragma: no cover
"""
This wrapper class provides a gevent WebSocket interface that is
compatible with eventlet's implementation.
"""
def __init__(self, handler, server):
self.app = handler
def __call__(self, environ, start_response):
if 'wsgi.websocket' not in environ:
raise RuntimeError('You need to use the gevent-websocket server. '
'See the Deployment section of the '
'documentation for more information.')
self._sock = environ['wsgi.websocket']
self.environ = environ
self.version = self._sock.version
self.path = self._sock.path
self.origin = self._sock.origin
self.protocol = self._sock.protocol
return self.app(self)
def close(self):
return self._sock.close()
def send(self, message):
return self._sock.send(message)
def wait(self):
return self._sock.receive()
_async = {
'thread': Thread,
'queue': queue.JoinableQueue,
'queue_empty': queue.Empty,
'event': Event,
'websocket': WebSocketWSGI if _websocket_available else None,
'sleep': gevent.sleep,
}

View file

@ -0,0 +1,167 @@
import gevent
from gevent import queue
from gevent.event import Event
from gevent import selectors
import uwsgi
_websocket_available = hasattr(uwsgi, 'websocket_handshake')
class Thread(gevent.Greenlet): # pragma: no cover
"""
This wrapper class provides gevent Greenlet interface that is compatible
with the standard library's Thread class.
"""
def __init__(self, target, args=[], kwargs={}):
super().__init__(target, *args, **kwargs)
def _run(self):
return self.run()
class uWSGIWebSocket(object): # pragma: no cover
"""
This wrapper class provides a uWSGI WebSocket interface that is
compatible with eventlet's implementation.
"""
def __init__(self, handler, server):
self.app = handler
self._sock = None
self.received_messages = []
def __call__(self, environ, start_response):
self._sock = uwsgi.connection_fd()
self.environ = environ
uwsgi.websocket_handshake()
self._req_ctx = None
if hasattr(uwsgi, 'request_context'):
# uWSGI >= 2.1.x with support for api access across-greenlets
self._req_ctx = uwsgi.request_context()
else:
# use event and queue for sending messages
self._event = Event()
self._send_queue = queue.Queue()
# spawn a select greenlet
def select_greenlet_runner(fd, event):
"""Sets event when data becomes available to read on fd."""
sel = selectors.DefaultSelector()
sel.register(fd, selectors.EVENT_READ)
try:
while True:
sel.select()
event.set()
except gevent.GreenletExit:
sel.unregister(fd)
self._select_greenlet = gevent.spawn(
select_greenlet_runner,
self._sock,
self._event)
self.app(self)
def close(self):
"""Disconnects uWSGI from the client."""
if self._req_ctx is None:
# better kill it here in case wait() is not called again
self._select_greenlet.kill()
self._event.set()
uwsgi.disconnect()
def _send(self, msg):
"""Transmits message either in binary or UTF-8 text mode,
depending on its type."""
if isinstance(msg, bytes):
method = uwsgi.websocket_send_binary
else:
method = uwsgi.websocket_send
if self._req_ctx is not None:
method(msg, request_context=self._req_ctx)
else:
method(msg)
def _decode_received(self, msg):
"""Returns either bytes or str, depending on message type."""
if not isinstance(msg, bytes):
# already decoded - do nothing
return msg
# only decode from utf-8 if message is not binary data
type = ord(msg[0:1])
if type >= 48: # no binary
return msg.decode('utf-8')
# binary message, don't try to decode
return msg
def send(self, msg):
"""Queues a message for sending. Real transmission is done in
wait method.
Sends directly if uWSGI version is new enough."""
if self._req_ctx is not None:
self._send(msg)
else:
self._send_queue.put(msg)
self._event.set()
def wait(self):
"""Waits and returns received messages.
If running in compatibility mode for older uWSGI versions,
it also sends messages that have been queued by send().
A return value of None means that connection was closed.
This must be called repeatedly. For uWSGI < 2.1.x it must
be called from the main greenlet."""
while True:
if self._req_ctx is not None:
try:
msg = uwsgi.websocket_recv(request_context=self._req_ctx)
except IOError: # connection closed
self.close()
return None
return self._decode_received(msg)
else:
if self.received_messages:
return self.received_messages.pop(0)
# we wake up at least every 3 seconds to let uWSGI
# do its ping/ponging
event_set = self._event.wait(timeout=3)
if event_set:
self._event.clear()
# maybe there is something to send
msgs = []
while True:
try:
msgs.append(self._send_queue.get(block=False))
except gevent.queue.Empty:
break
for msg in msgs:
try:
self._send(msg)
except IOError:
self.close()
return None
# maybe there is something to receive, if not, at least
# ensure uWSGI does its ping/ponging
while True:
try:
msg = uwsgi.websocket_recv_nb()
except IOError: # connection closed
self.close()
return None
if msg: # message available
self.received_messages.append(
self._decode_received(msg))
else:
break
if self.received_messages:
return self.received_messages.pop(0)
_async = {
'thread': Thread,
'queue': queue.JoinableQueue,
'queue_empty': queue.Empty,
'event': Event,
'websocket': uWSGIWebSocket if _websocket_available else None,
'sleep': gevent.sleep,
}

View file

@ -0,0 +1,147 @@
import sys
from urllib.parse import urlsplit
try: # pragma: no cover
from sanic.response import HTTPResponse
try:
from sanic.server.protocols.websocket_protocol import WebSocketProtocol
except ImportError:
print('yay')
from sanic.websocket import WebSocketProtocol
except ImportError:
HTTPResponse = None
WebSocketProtocol = None
def create_route(app, engineio_server, engineio_endpoint): # pragma: no cover
"""This function sets up the engine.io endpoint as a route for the
application.
Note that both GET and POST requests must be hooked up on the engine.io
endpoint.
"""
app.add_route(engineio_server.handle_request, engineio_endpoint,
methods=['GET', 'POST', 'OPTIONS'])
try:
app.enable_websocket()
except AttributeError:
# ignore, this version does not support websocket
pass
def translate_request(request): # pragma: no cover
"""This function takes the arguments passed to the request handler and
uses them to generate a WSGI compatible environ dictionary.
"""
class AwaitablePayload(object):
def __init__(self, payload):
self.payload = payload or b''
async def read(self, length=None):
if length is None:
r = self.payload
self.payload = b''
else:
r = self.payload[:length]
self.payload = self.payload[length:]
return r
uri_parts = urlsplit(request.url)
environ = {
'wsgi.input': AwaitablePayload(request.body),
'wsgi.errors': sys.stderr,
'wsgi.version': (1, 0),
'wsgi.async': True,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False,
'SERVER_SOFTWARE': 'sanic',
'REQUEST_METHOD': request.method,
'QUERY_STRING': uri_parts.query or '',
'RAW_URI': request.url,
'SERVER_PROTOCOL': 'HTTP/' + request.version,
'REMOTE_ADDR': '127.0.0.1',
'REMOTE_PORT': '0',
'SERVER_NAME': 'sanic',
'SERVER_PORT': '0',
'sanic.request': request
}
for hdr_name, hdr_value in request.headers.items():
hdr_name = hdr_name.upper()
if hdr_name == 'CONTENT-TYPE':
environ['CONTENT_TYPE'] = hdr_value
continue
elif hdr_name == 'CONTENT-LENGTH':
environ['CONTENT_LENGTH'] = hdr_value
continue
key = 'HTTP_%s' % hdr_name.replace('-', '_')
if key in environ:
hdr_value = '%s,%s' % (environ[key], hdr_value)
environ[key] = hdr_value
environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
path_info = uri_parts.path
environ['PATH_INFO'] = path_info
environ['SCRIPT_NAME'] = ''
return environ
def make_response(status, headers, payload, environ): # pragma: no cover
"""This function generates an appropriate response object for this async
mode.
"""
headers_dict = {}
content_type = None
for h in headers:
if h[0].lower() == 'content-type':
content_type = h[1]
else:
headers_dict[h[0]] = h[1]
return HTTPResponse(body=payload, content_type=content_type,
status=int(status.split()[0]), headers=headers_dict)
class WebSocket(object): # pragma: no cover
"""
This wrapper class provides a sanic WebSocket interface that is
somewhat compatible with eventlet's implementation.
"""
def __init__(self, handler, server):
self.handler = handler
self._sock = None
async def __call__(self, environ):
request = environ['sanic.request']
protocol = request.transport.get_protocol()
self._sock = await protocol.websocket_handshake(request)
self.environ = environ
await self.handler(self)
async def close(self):
await self._sock.close()
async def send(self, message):
await self._sock.send(message)
async def wait(self):
data = await self._sock.recv()
if not isinstance(data, bytes) and \
not isinstance(data, str):
raise IOError()
return data
_async = {
'asyncio': True,
'create_route': create_route,
'translate_request': translate_request,
'make_response': make_response,
'websocket': WebSocket if WebSocketProtocol else None,
}

View file

@ -0,0 +1,48 @@
from __future__ import absolute_import
import queue
import threading
import time
try:
from simple_websocket import Server, ConnectionClosed
_websocket_available = True
except ImportError: # pragma: no cover
_websocket_available = False
class WebSocketWSGI(object): # pragma: no cover
"""
This wrapper class provides a threading WebSocket interface that is
compatible with eventlet's implementation.
"""
def __init__(self, handler, server):
self.app = handler
def __call__(self, environ, start_response):
self.ws = Server(environ)
return self.app(self)
def close(self):
return self.ws.close()
def send(self, message):
try:
return self.ws.send(message)
except ConnectionClosed:
raise IOError()
def wait(self):
try:
return self.ws.receive()
except ConnectionClosed:
return None
_async = {
'thread': threading.Thread,
'queue': queue.Queue,
'queue_empty': queue.Empty,
'event': threading.Event,
'websocket': WebSocketWSGI if _websocket_available else None,
'sleep': time.sleep,
}

View file

@ -0,0 +1,182 @@
import asyncio
import sys
from urllib.parse import urlsplit
from .. import exceptions
import tornado.web
import tornado.websocket
def get_tornado_handler(engineio_server):
class Handler(tornado.websocket.WebSocketHandler): # pragma: no cover
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if isinstance(engineio_server.cors_allowed_origins, str):
if engineio_server.cors_allowed_origins == '*':
self.allowed_origins = None
else:
self.allowed_origins = [
engineio_server.cors_allowed_origins]
else:
self.allowed_origins = engineio_server.cors_allowed_origins
self.receive_queue = asyncio.Queue()
async def get(self, *args, **kwargs):
if self.request.headers.get('Upgrade', '').lower() == 'websocket':
ret = super().get(*args, **kwargs)
if asyncio.iscoroutine(ret):
await ret
else:
await engineio_server.handle_request(self)
async def open(self, *args, **kwargs):
# this is the handler for the websocket request
asyncio.ensure_future(engineio_server.handle_request(self))
async def post(self, *args, **kwargs):
await engineio_server.handle_request(self)
async def options(self, *args, **kwargs):
await engineio_server.handle_request(self)
async def on_message(self, message):
await self.receive_queue.put(message)
async def get_next_message(self):
return await self.receive_queue.get()
def on_close(self):
self.receive_queue.put_nowait(None)
def check_origin(self, origin):
if self.allowed_origins is None or origin in self.allowed_origins:
return True
return super().check_origin(origin)
def get_compression_options(self):
# enable compression
return {}
return Handler
def translate_request(handler):
"""This function takes the arguments passed to the request handler and
uses them to generate a WSGI compatible environ dictionary.
"""
class AwaitablePayload(object):
def __init__(self, payload):
self.payload = payload or b''
async def read(self, length=None):
if length is None:
r = self.payload
self.payload = b''
else:
r = self.payload[:length]
self.payload = self.payload[length:]
return r
payload = handler.request.body
uri_parts = urlsplit(handler.request.path)
full_uri = handler.request.path
if handler.request.query: # pragma: no cover
full_uri += '?' + handler.request.query
environ = {
'wsgi.input': AwaitablePayload(payload),
'wsgi.errors': sys.stderr,
'wsgi.version': (1, 0),
'wsgi.async': True,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False,
'SERVER_SOFTWARE': 'aiohttp',
'REQUEST_METHOD': handler.request.method,
'QUERY_STRING': handler.request.query or '',
'RAW_URI': full_uri,
'SERVER_PROTOCOL': 'HTTP/%s' % handler.request.version,
'REMOTE_ADDR': '127.0.0.1',
'REMOTE_PORT': '0',
'SERVER_NAME': 'aiohttp',
'SERVER_PORT': '0',
'tornado.handler': handler
}
for hdr_name, hdr_value in handler.request.headers.items():
hdr_name = hdr_name.upper()
if hdr_name == 'CONTENT-TYPE':
environ['CONTENT_TYPE'] = hdr_value
continue
elif hdr_name == 'CONTENT-LENGTH':
environ['CONTENT_LENGTH'] = hdr_value
continue
key = 'HTTP_%s' % hdr_name.replace('-', '_')
environ[key] = hdr_value
environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http')
path_info = uri_parts.path
environ['PATH_INFO'] = path_info
environ['SCRIPT_NAME'] = ''
return environ
def make_response(status, headers, payload, environ):
"""This function generates an appropriate response object for this async
mode.
"""
tornado_handler = environ['tornado.handler']
try:
tornado_handler.set_status(int(status.split()[0]))
except RuntimeError: # pragma: no cover
# for websocket connections Tornado does not accept a response, since
# it already emitted the 101 status code
return
for header, value in headers:
tornado_handler.set_header(header, value)
tornado_handler.write(payload)
tornado_handler.finish()
class WebSocket(object): # pragma: no cover
"""
This wrapper class provides a tornado WebSocket interface that is
somewhat compatible with eventlet's implementation.
"""
def __init__(self, handler, server):
self.handler = handler
self.tornado_handler = None
async def __call__(self, environ):
self.tornado_handler = environ['tornado.handler']
self.environ = environ
await self.handler(self)
async def close(self):
self.tornado_handler.close()
async def send(self, message):
try:
self.tornado_handler.write_message(
message, binary=isinstance(message, bytes))
except tornado.websocket.WebSocketClosedError:
raise exceptions.EngineIOError()
async def wait(self):
msg = await self.tornado_handler.get_next_message()
if not isinstance(msg, bytes) and \
not isinstance(msg, str):
raise IOError()
return msg
_async = {
'asyncio': True,
'translate_request': translate_request,
'make_response': make_response,
'websocket': WebSocket,
}

View file

@ -0,0 +1,640 @@
import asyncio
import signal
import ssl
import threading
try:
import aiohttp
except ImportError: # pragma: no cover
aiohttp = None
from . import client
from . import exceptions
from . import packet
from . import payload
async_signal_handler_set = False
def async_signal_handler():
"""SIGINT handler.
Disconnect all active async clients.
"""
async def _handler(): # pragma: no cover
for c in client.connected_clients[:]:
if c.is_asyncio_based():
await c.disconnect()
# cancel all running tasks
tasks = [task for task in asyncio.all_tasks() if task is not
asyncio.current_task()]
for task in tasks:
task.cancel()
await asyncio.gather(*tasks, return_exceptions=True)
asyncio.get_event_loop().stop()
asyncio.ensure_future(_handler())
class AsyncClient(client.Client):
"""An Engine.IO client for asyncio.
This class implements a fully compliant Engine.IO web client with support
for websocket and long-polling transports, compatible with the asyncio
framework on Python 3.5 or newer.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``. The default is
``False``. Note that fatal errors are logged even when
``logger`` is ``False``.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions.
:param request_timeout: A timeout in seconds for requests. The default is
5 seconds.
:param http_session: an initialized ``aiohttp.ClientSession`` object to be
used when sending requests to the server. Use it if
you need to add special client options such as proxy
servers, SSL certificates, etc.
:param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to
skip SSL certificate verification, allowing
connections to servers with self signed certificates.
The default is ``True``.
:param handle_sigint: Set to ``True`` to automatically handle disconnection
when the process is interrupted, or to ``False`` to
leave interrupt handling to the calling application.
Interrupt handling can only be enabled when the
client instance is created in the main thread.
:param websocket_extra_options: Dictionary containing additional keyword
arguments passed to
``aiohttp.ws_connect()``.
"""
def is_asyncio_based(self):
return True
async def connect(self, url, headers=None, transports=None,
engineio_path='engine.io'):
"""Connect to an Engine.IO server.
:param url: The URL of the Engine.IO server. It can include custom
query string parameters if required by the server.
:param headers: A dictionary with custom headers to send with the
connection request.
:param transports: The list of allowed transports. Valid transports
are ``'polling'`` and ``'websocket'``. If not
given, the polling transport is connected first,
then an upgrade to websocket is attempted.
:param engineio_path: The endpoint where the Engine.IO server is
installed. The default value is appropriate for
most cases.
Note: this method is a coroutine.
Example usage::
eio = engineio.Client()
await eio.connect('http://localhost:5000')
"""
global async_signal_handler_set
if self.handle_sigint and not async_signal_handler_set and \
threading.current_thread() == threading.main_thread():
try:
asyncio.get_event_loop().add_signal_handler(
signal.SIGINT, async_signal_handler)
except NotImplementedError: # pragma: no cover
self.logger.warning('Signal handler is unsupported')
async_signal_handler_set = True
if self.state != 'disconnected':
raise ValueError('Client is not in a disconnected state')
valid_transports = ['polling', 'websocket']
if transports is not None:
if isinstance(transports, str):
transports = [transports]
transports = [transport for transport in transports
if transport in valid_transports]
if not transports:
raise ValueError('No valid transports provided')
self.transports = transports or valid_transports
self.queue = self.create_queue()
return await getattr(self, '_connect_' + self.transports[0])(
url, headers or {}, engineio_path)
async def wait(self):
"""Wait until the connection with the server ends.
Client applications can use this function to block the main thread
during the life of the connection.
Note: this method is a coroutine.
"""
if self.read_loop_task:
await self.read_loop_task
async def send(self, data):
"""Send a message to the server.
:param data: The data to send to the server. Data can be of type
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
or ``dict``, the data will be serialized as JSON.
Note: this method is a coroutine.
"""
await self._send_packet(packet.Packet(packet.MESSAGE, data=data))
async def disconnect(self, abort=False):
"""Disconnect from the server.
:param abort: If set to ``True``, do not wait for background tasks
associated with the connection to end.
Note: this method is a coroutine.
"""
if self.state == 'connected':
await self._send_packet(packet.Packet(packet.CLOSE))
await self.queue.put(None)
self.state = 'disconnecting'
await self._trigger_event('disconnect', run_async=False)
if self.current_transport == 'websocket':
await self.ws.close()
if not abort:
await self.read_loop_task
self.state = 'disconnected'
try:
client.connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
await self._reset()
def start_background_task(self, target, *args, **kwargs):
"""Start a background task.
This is a utility function that applications can use to start a
background task.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
The return value is a ``asyncio.Task`` object.
"""
return asyncio.ensure_future(target(*args, **kwargs))
async def sleep(self, seconds=0):
"""Sleep for the requested amount of time.
Note: this method is a coroutine.
"""
return await asyncio.sleep(seconds)
def create_queue(self):
"""Create a queue object."""
q = asyncio.Queue()
q.Empty = asyncio.QueueEmpty
return q
def create_event(self):
"""Create an event object."""
return asyncio.Event()
async def _reset(self):
super()._reset()
if not self.external_http: # pragma: no cover
if self.http and not self.http.closed:
await self.http.close()
def __del__(self): # pragma: no cover
# try to close the aiohttp session if it is still open
if self.http and not self.http.closed:
try:
loop = asyncio.get_event_loop()
if loop.is_running():
loop.ensure_future(self.http.close())
else:
loop.run_until_complete(self.http.close())
except:
pass
async def _connect_polling(self, url, headers, engineio_path):
"""Establish a long-polling connection to the Engine.IO server."""
if aiohttp is None: # pragma: no cover
self.logger.error('aiohttp not installed -- cannot make HTTP '
'requests!')
return
self.base_url = self._get_engineio_url(url, engineio_path, 'polling')
self.logger.info('Attempting polling connection to ' + self.base_url)
r = await self._send_request(
'GET', self.base_url + self._get_url_timestamp(), headers=headers,
timeout=self.request_timeout)
if r is None or isinstance(r, str):
await self._reset()
raise exceptions.ConnectionError(
r or 'Connection refused by the server')
if r.status < 200 or r.status >= 300:
await self._reset()
try:
arg = await r.json()
except aiohttp.ClientError:
arg = None
raise exceptions.ConnectionError(
'Unexpected status code {} in server response'.format(
r.status), arg)
try:
p = payload.Payload(encoded_payload=(await r.read()).decode(
'utf-8'))
except ValueError:
raise exceptions.ConnectionError(
'Unexpected response from server') from None
open_packet = p.packets[0]
if open_packet.packet_type != packet.OPEN:
raise exceptions.ConnectionError(
'OPEN packet not returned by server')
self.logger.info(
'Polling connection accepted with ' + str(open_packet.data))
self.sid = open_packet.data['sid']
self.upgrades = open_packet.data['upgrades']
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
self.current_transport = 'polling'
self.base_url += '&sid=' + self.sid
self.state = 'connected'
client.connected_clients.append(self)
await self._trigger_event('connect', run_async=False)
for pkt in p.packets[1:]:
await self._receive_packet(pkt)
if 'websocket' in self.upgrades and 'websocket' in self.transports:
# attempt to upgrade to websocket
if await self._connect_websocket(url, headers, engineio_path):
# upgrade to websocket succeeded, we're done here
return
self.write_loop_task = self.start_background_task(self._write_loop)
self.read_loop_task = self.start_background_task(
self._read_loop_polling)
async def _connect_websocket(self, url, headers, engineio_path):
"""Establish or upgrade to a WebSocket connection with the server."""
if aiohttp is None: # pragma: no cover
self.logger.error('aiohttp package not installed')
return False
websocket_url = self._get_engineio_url(url, engineio_path,
'websocket')
if self.sid:
self.logger.info(
'Attempting WebSocket upgrade to ' + websocket_url)
upgrade = True
websocket_url += '&sid=' + self.sid
else:
upgrade = False
self.base_url = websocket_url
self.logger.info(
'Attempting WebSocket connection to ' + websocket_url)
if self.http is None or self.http.closed: # pragma: no cover
self.http = aiohttp.ClientSession()
# extract any new cookies passed in a header so that they can also be
# sent the the WebSocket route
cookies = {}
for header, value in headers.items():
if header.lower() == 'cookie':
cookies = dict(
[cookie.split('=', 1) for cookie in value.split('; ')])
del headers[header]
break
self.http.cookie_jar.update_cookies(cookies)
extra_options = {'timeout': self.request_timeout}
if not self.ssl_verify:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
extra_options['ssl'] = ssl_context
# combine internally generated options with the ones supplied by the
# caller. The caller's options take precedence.
headers.update(self.websocket_extra_options.pop('headers', {}))
extra_options['headers'] = headers
extra_options.update(self.websocket_extra_options)
try:
ws = await self.http.ws_connect(
websocket_url + self._get_url_timestamp(), **extra_options)
except (aiohttp.client_exceptions.WSServerHandshakeError,
aiohttp.client_exceptions.ServerConnectionError,
aiohttp.client_exceptions.ClientConnectionError):
if upgrade:
self.logger.warning(
'WebSocket upgrade failed: connection error')
return False
else:
raise exceptions.ConnectionError('Connection error')
if upgrade:
p = packet.Packet(packet.PING, data='probe').encode()
try:
await ws.send_str(p)
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected send exception: %s',
str(e))
return False
try:
p = (await ws.receive()).data
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected recv exception: %s',
str(e))
return False
pkt = packet.Packet(encoded_packet=p)
if pkt.packet_type != packet.PONG or pkt.data != 'probe':
self.logger.warning(
'WebSocket upgrade failed: no PONG packet')
return False
p = packet.Packet(packet.UPGRADE).encode()
try:
await ws.send_str(p)
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected send exception: %s',
str(e))
return False
self.current_transport = 'websocket'
self.logger.info('WebSocket upgrade was successful')
else:
try:
p = (await ws.receive()).data
except Exception as e: # pragma: no cover
raise exceptions.ConnectionError(
'Unexpected recv exception: ' + str(e))
open_packet = packet.Packet(encoded_packet=p)
if open_packet.packet_type != packet.OPEN:
raise exceptions.ConnectionError('no OPEN packet')
self.logger.info(
'WebSocket connection accepted with ' + str(open_packet.data))
self.sid = open_packet.data['sid']
self.upgrades = open_packet.data['upgrades']
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
self.current_transport = 'websocket'
self.state = 'connected'
client.connected_clients.append(self)
await self._trigger_event('connect', run_async=False)
self.ws = ws
self.write_loop_task = self.start_background_task(self._write_loop)
self.read_loop_task = self.start_background_task(
self._read_loop_websocket)
return True
async def _receive_packet(self, pkt):
"""Handle incoming packets from the server."""
packet_name = packet.packet_names[pkt.packet_type] \
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
self.logger.info(
'Received packet %s data %s', packet_name,
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
if pkt.packet_type == packet.MESSAGE:
await self._trigger_event('message', pkt.data, run_async=True)
elif pkt.packet_type == packet.PING:
await self._send_packet(packet.Packet(packet.PONG, pkt.data))
elif pkt.packet_type == packet.CLOSE:
await self.disconnect(abort=True)
elif pkt.packet_type == packet.NOOP:
pass
else:
self.logger.error('Received unexpected packet of type %s',
pkt.packet_type)
async def _send_packet(self, pkt):
"""Queue a packet to be sent to the server."""
if self.state != 'connected':
return
await self.queue.put(pkt)
self.logger.info(
'Sending packet %s data %s',
packet.packet_names[pkt.packet_type],
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
async def _send_request(
self, method, url, headers=None, body=None,
timeout=None): # pragma: no cover
if self.http is None or self.http.closed:
self.http = aiohttp.ClientSession()
http_method = getattr(self.http, method.lower())
try:
if not self.ssl_verify:
return await http_method(
url, headers=headers, data=body,
timeout=aiohttp.ClientTimeout(total=timeout), ssl=False)
else:
return await http_method(
url, headers=headers, data=body,
timeout=aiohttp.ClientTimeout(total=timeout))
except (aiohttp.ClientError, asyncio.TimeoutError) as exc:
self.logger.info('HTTP %s request to %s failed with error %s.',
method, url, exc)
return str(exc)
async def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
ret = None
if event in self.handlers:
if asyncio.iscoroutinefunction(self.handlers[event]) is True:
if run_async:
return self.start_background_task(self.handlers[event],
*args)
else:
try:
ret = await self.handlers[event](*args)
except asyncio.CancelledError: # pragma: no cover
pass
except:
self.logger.exception(event + ' async handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
else:
if run_async:
async def async_handler():
return self.handlers[event](*args)
return self.start_background_task(async_handler)
else:
try:
ret = self.handlers[event](*args)
except:
self.logger.exception(event + ' handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
return ret
async def _read_loop_polling(self):
"""Read packets by polling the Engine.IO server."""
while self.state == 'connected':
self.logger.info(
'Sending polling GET request to ' + self.base_url)
r = await self._send_request(
'GET', self.base_url + self._get_url_timestamp(),
timeout=max(self.ping_interval, self.ping_timeout) + 5)
if r is None or isinstance(r, str):
self.logger.warning(
r or 'Connection refused by the server, aborting')
await self.queue.put(None)
break
if r.status < 200 or r.status >= 300:
self.logger.warning('Unexpected status code %s in server '
'response, aborting', r.status)
await self.queue.put(None)
break
try:
p = payload.Payload(encoded_payload=(await r.read()).decode(
'utf-8'))
except ValueError:
self.logger.warning(
'Unexpected packet from server, aborting')
await self.queue.put(None)
break
for pkt in p.packets:
await self._receive_packet(pkt)
self.logger.info('Waiting for write loop task to end')
await self.write_loop_task
if self.state == 'connected':
await self._trigger_event('disconnect', run_async=False)
try:
client.connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
await self._reset()
self.logger.info('Exiting read loop task')
async def _read_loop_websocket(self):
"""Read packets from the Engine.IO WebSocket connection."""
while self.state == 'connected':
p = None
try:
p = await asyncio.wait_for(
self.ws.receive(),
timeout=self.ping_interval + self.ping_timeout)
if not isinstance(p.data, (str, bytes)): # pragma: no cover
self.logger.warning(
'Server sent unexpected packet %s data %s, aborting',
str(p.type), str(p.data))
await self.queue.put(None)
break # the connection is broken
p = p.data
except asyncio.TimeoutError:
self.logger.warning(
'Server has stopped communicating, aborting')
await self.queue.put(None)
break
except aiohttp.client_exceptions.ServerDisconnectedError:
self.logger.info(
'Read loop: WebSocket connection was closed, aborting')
await self.queue.put(None)
break
except Exception as e:
self.logger.info(
'Unexpected error receiving packet: "%s", aborting',
str(e))
await self.queue.put(None)
break
try:
pkt = packet.Packet(encoded_packet=p)
except Exception as e: # pragma: no cover
self.logger.info(
'Unexpected error decoding packet: "%s", aborting', str(e))
await self.queue.put(None)
break
await self._receive_packet(pkt)
self.logger.info('Waiting for write loop task to end')
await self.write_loop_task
if self.state == 'connected':
await self._trigger_event('disconnect', run_async=False)
try:
client.connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
await self._reset()
self.logger.info('Exiting read loop task')
async def _write_loop(self):
"""This background task sends packages to the server as they are
pushed to the send queue.
"""
while self.state == 'connected':
# to simplify the timeout handling, use the maximum of the
# ping interval and ping timeout as timeout, with an extra 5
# seconds grace period
timeout = max(self.ping_interval, self.ping_timeout) + 5
packets = None
try:
packets = [await asyncio.wait_for(self.queue.get(), timeout)]
except (self.queue.Empty, asyncio.TimeoutError):
self.logger.error('packet queue is empty, aborting')
break
except asyncio.CancelledError: # pragma: no cover
break
if packets == [None]:
self.queue.task_done()
packets = []
else:
while True:
try:
packets.append(self.queue.get_nowait())
except self.queue.Empty:
break
if packets[-1] is None:
packets = packets[:-1]
self.queue.task_done()
break
if not packets:
# empty packet list returned -> connection closed
break
if self.current_transport == 'polling':
p = payload.Payload(packets=packets)
r = await self._send_request(
'POST', self.base_url, body=p.encode(),
headers={'Content-Type': 'text/plain'},
timeout=self.request_timeout)
for pkt in packets:
self.queue.task_done()
if r is None or isinstance(r, str):
self.logger.warning(
r or 'Connection refused by the server, aborting')
break
if r.status < 200 or r.status >= 300:
self.logger.warning('Unexpected status code %s in server '
'response, aborting', r.status)
await self._reset()
break
else:
# websocket
try:
for pkt in packets:
if pkt.binary:
await self.ws.send_bytes(pkt.encode())
else:
await self.ws.send_str(pkt.encode())
self.queue.task_done()
except (aiohttp.client_exceptions.ServerDisconnectedError,
BrokenPipeError, OSError):
self.logger.info(
'Write loop: WebSocket connection was closed, '
'aborting')
break
self.logger.info('Exiting write loop task')

View file

@ -0,0 +1,513 @@
import asyncio
import urllib
from . import exceptions
from . import packet
from . import server
from . import asyncio_socket
class AsyncServer(server.Server):
"""An Engine.IO server for asyncio.
This class implements a fully compliant Engine.IO web server with support
for websocket and long-polling transports, compatible with the asyncio
framework on Python 3.5 or newer.
:param async_mode: The asynchronous model to use. See the Deployment
section in the documentation for a description of the
available options. Valid async modes are "aiohttp",
"sanic", "tornado" and "asgi". If this argument is not
given, "aiohttp" is tried first, followed by "sanic",
"tornado", and finally "asgi". The first async mode that
has all its dependencies installed is the one that is
chosen.
:param ping_interval: The interval in seconds at which the server pings
the client. The default is 25 seconds. For advanced
control, a two element tuple can be given, where
the first number is the ping interval and the second
is a grace period added by the server.
:param ping_timeout: The time in seconds that the client waits for the
server to respond before disconnecting. The default
is 20 seconds.
:param max_http_buffer_size: The maximum size of a message. The default
is 1,000,000 bytes.
:param allow_upgrades: Whether to allow transport upgrades or not.
:param http_compression: Whether to compress packages when using the
polling transport.
:param compression_threshold: Only compress messages when their byte size
is greater than this value.
:param cookie: If set to a string, it is the name of the HTTP cookie the
server sends back tot he client containing the client
session id. If set to a dictionary, the ``'name'`` key
contains the cookie name and other keys define cookie
attributes, where the value of each attribute can be a
string, a callable with no arguments, or a boolean. If set
to ``None`` (the default), a cookie is not sent to the
client.
:param cors_allowed_origins: Origin or list of origins that are allowed to
connect to this server. Only the same origin
is allowed by default. Set this argument to
``'*'`` to allow all origins, or to ``[]`` to
disable CORS handling.
:param cors_credentials: Whether credentials (cookies, authentication) are
allowed in requests to this server.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``. Note that fatal
errors are logged even when ``logger`` is ``False``.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions.
:param async_handlers: If set to ``True``, run message event handlers in
non-blocking threads. To run handlers synchronously,
set to ``False``. The default is ``True``.
:param transports: The list of allowed transports. Valid transports
are ``'polling'`` and ``'websocket'``. Defaults to
``['polling', 'websocket']``.
:param kwargs: Reserved for future extensions, any additional parameters
given as keyword arguments will be silently ignored.
"""
def is_asyncio_based(self):
return True
def async_modes(self):
return ['aiohttp', 'sanic', 'tornado', 'asgi']
def attach(self, app, engineio_path='engine.io'):
"""Attach the Engine.IO server to an application."""
engineio_path = engineio_path.strip('/')
self._async['create_route'](app, self, '/{}/'.format(engineio_path))
async def send(self, sid, data):
"""Send a message to a client.
:param sid: The session id of the recipient client.
:param data: The data to send to the client. Data can be of type
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
or ``dict``, the data will be serialized as JSON.
Note: this method is a coroutine.
"""
try:
socket = self._get_socket(sid)
except KeyError:
# the socket is not available
self.logger.warning('Cannot send to sid %s', sid)
return
await socket.send(packet.Packet(packet.MESSAGE, data=data))
async def get_session(self, sid):
"""Return the user session for a client.
:param sid: The session id of the client.
The return value is a dictionary. Modifications made to this
dictionary are not guaranteed to be preserved. If you want to modify
the user session, use the ``session`` context manager instead.
"""
socket = self._get_socket(sid)
return socket.session
async def save_session(self, sid, session):
"""Store the user session for a client.
:param sid: The session id of the client.
:param session: The session dictionary.
"""
socket = self._get_socket(sid)
socket.session = session
def session(self, sid):
"""Return the user session for a client with context manager syntax.
:param sid: The session id of the client.
This is a context manager that returns the user session dictionary for
the client. Any changes that are made to this dictionary inside the
context manager block are saved back to the session. Example usage::
@eio.on('connect')
def on_connect(sid, environ):
username = authenticate_user(environ)
if not username:
return False
with eio.session(sid) as session:
session['username'] = username
@eio.on('message')
def on_message(sid, msg):
async with eio.session(sid) as session:
print('received message from ', session['username'])
"""
class _session_context_manager(object):
def __init__(self, server, sid):
self.server = server
self.sid = sid
self.session = None
async def __aenter__(self):
self.session = await self.server.get_session(sid)
return self.session
async def __aexit__(self, *args):
await self.server.save_session(sid, self.session)
return _session_context_manager(self, sid)
async def disconnect(self, sid=None):
"""Disconnect a client.
:param sid: The session id of the client to close. If this parameter
is not given, then all clients are closed.
Note: this method is a coroutine.
"""
if sid is not None:
try:
socket = self._get_socket(sid)
except KeyError: # pragma: no cover
# the socket was already closed or gone
pass
else:
await socket.close()
if sid in self.sockets: # pragma: no cover
del self.sockets[sid]
else:
await asyncio.wait([asyncio.create_task(client.close())
for client in self.sockets.values()])
self.sockets = {}
async def handle_request(self, *args, **kwargs):
"""Handle an HTTP request from the client.
This is the entry point of the Engine.IO application. This function
returns the HTTP response to deliver to the client.
Note: this method is a coroutine.
"""
translate_request = self._async['translate_request']
if asyncio.iscoroutinefunction(translate_request):
environ = await translate_request(*args, **kwargs)
else:
environ = translate_request(*args, **kwargs)
if self.cors_allowed_origins != []:
# Validate the origin header if present
# This is important for WebSocket more than for HTTP, since
# browsers only apply CORS controls to HTTP.
origin = environ.get('HTTP_ORIGIN')
if origin:
allowed_origins = self._cors_allowed_origins(environ)
if allowed_origins is not None and origin not in \
allowed_origins:
self._log_error_once(
origin + ' is not an accepted origin.', 'bad-origin')
return await self._make_response(
self._bad_request(
origin + ' is not an accepted origin.'),
environ)
method = environ['REQUEST_METHOD']
query = urllib.parse.parse_qs(environ.get('QUERY_STRING', ''))
sid = query['sid'][0] if 'sid' in query else None
jsonp = False
jsonp_index = None
# make sure the client uses an allowed transport
transport = query.get('transport', ['polling'])[0]
if transport not in self.transports:
self._log_error_once('Invalid transport', 'bad-transport')
return await self._make_response(
self._bad_request('Invalid transport'), environ)
# make sure the client speaks a compatible Engine.IO version
sid = query['sid'][0] if 'sid' in query else None
if sid is None and query.get('EIO') != ['4']:
self._log_error_once(
'The client is using an unsupported version of the Socket.IO '
'or Engine.IO protocols', 'bad-version'
)
return await self._make_response(self._bad_request(
'The client is using an unsupported version of the Socket.IO '
'or Engine.IO protocols'
), environ)
if 'j' in query:
jsonp = True
try:
jsonp_index = int(query['j'][0])
except (ValueError, KeyError, IndexError):
# Invalid JSONP index number
pass
if jsonp and jsonp_index is None:
self._log_error_once('Invalid JSONP index number',
'bad-jsonp-index')
r = self._bad_request('Invalid JSONP index number')
elif method == 'GET':
if sid is None:
# transport must be one of 'polling' or 'websocket'.
# if 'websocket', the HTTP_UPGRADE header must match.
upgrade_header = environ.get('HTTP_UPGRADE').lower() \
if 'HTTP_UPGRADE' in environ else None
if transport == 'polling' \
or transport == upgrade_header == 'websocket':
r = await self._handle_connect(environ, transport,
jsonp_index)
else:
self._log_error_once('Invalid websocket upgrade',
'bad-upgrade')
r = self._bad_request('Invalid websocket upgrade')
else:
if sid not in self.sockets:
self._log_error_once('Invalid session ' + sid, 'bad-sid')
r = self._bad_request('Invalid session ' + sid)
else:
socket = self._get_socket(sid)
try:
packets = await socket.handle_get_request(environ)
if isinstance(packets, list):
r = self._ok(packets, jsonp_index=jsonp_index)
else:
r = packets
except exceptions.EngineIOError:
if sid in self.sockets: # pragma: no cover
await self.disconnect(sid)
r = self._bad_request()
if sid in self.sockets and self.sockets[sid].closed:
del self.sockets[sid]
elif method == 'POST':
if sid is None or sid not in self.sockets:
self._log_error_once('Invalid session ' + sid, 'bad-sid')
r = self._bad_request('Invalid session ' + sid)
else:
socket = self._get_socket(sid)
try:
await socket.handle_post_request(environ)
r = self._ok(jsonp_index=jsonp_index)
except exceptions.EngineIOError:
if sid in self.sockets: # pragma: no cover
await self.disconnect(sid)
r = self._bad_request()
except: # pragma: no cover
# for any other unexpected errors, we log the error
# and keep going
self.logger.exception('post request handler error')
r = self._ok(jsonp_index=jsonp_index)
elif method == 'OPTIONS':
r = self._ok()
else:
self.logger.warning('Method %s not supported', method)
r = self._method_not_found()
if not isinstance(r, dict):
return r
if self.http_compression and \
len(r['response']) >= self.compression_threshold:
encodings = [e.split(';')[0].strip() for e in
environ.get('HTTP_ACCEPT_ENCODING', '').split(',')]
for encoding in encodings:
if encoding in self.compression_methods:
r['response'] = \
getattr(self, '_' + encoding)(r['response'])
r['headers'] += [('Content-Encoding', encoding)]
break
return await self._make_response(r, environ)
def start_background_task(self, target, *args, **kwargs):
"""Start a background task using the appropriate async model.
This is a utility function that applications can use to start a
background task using the method that is compatible with the
selected async mode.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
The return value is a ``asyncio.Task`` object.
"""
return asyncio.ensure_future(target(*args, **kwargs))
async def sleep(self, seconds=0):
"""Sleep for the requested amount of time using the appropriate async
model.
This is a utility function that applications can use to put a task to
sleep without having to worry about using the correct call for the
selected async mode.
Note: this method is a coroutine.
"""
return await asyncio.sleep(seconds)
def create_queue(self, *args, **kwargs):
"""Create a queue object using the appropriate async model.
This is a utility function that applications can use to create a queue
without having to worry about using the correct call for the selected
async mode. For asyncio based async modes, this returns an instance of
``asyncio.Queue``.
"""
return asyncio.Queue(*args, **kwargs)
def get_queue_empty_exception(self):
"""Return the queue empty exception for the appropriate async model.
This is a utility function that applications can use to work with a
queue without having to worry about using the correct call for the
selected async mode. For asyncio based async modes, this returns an
instance of ``asyncio.QueueEmpty``.
"""
return asyncio.QueueEmpty
def create_event(self, *args, **kwargs):
"""Create an event object using the appropriate async model.
This is a utility function that applications can use to create an
event without having to worry about using the correct call for the
selected async mode. For asyncio based async modes, this returns
an instance of ``asyncio.Event``.
"""
return asyncio.Event(*args, **kwargs)
async def _make_response(self, response_dict, environ):
cors_headers = self._cors_headers(environ)
make_response = self._async['make_response']
if asyncio.iscoroutinefunction(make_response):
response = await make_response(
response_dict['status'],
response_dict['headers'] + cors_headers,
response_dict['response'], environ)
else:
response = make_response(
response_dict['status'],
response_dict['headers'] + cors_headers,
response_dict['response'], environ)
return response
async def _handle_connect(self, environ, transport, jsonp_index=None):
"""Handle a client connection request."""
if self.start_service_task:
# start the service task to monitor connected clients
self.start_service_task = False
self.start_background_task(self._service_task)
sid = self.generate_id()
s = asyncio_socket.AsyncSocket(self, sid)
self.sockets[sid] = s
pkt = packet.Packet(
packet.OPEN, {'sid': sid,
'upgrades': self._upgrades(sid, transport),
'pingTimeout': int(self.ping_timeout * 1000),
'pingInterval': int(self.ping_interval * 1000)})
await s.send(pkt)
s.schedule_ping()
ret = await self._trigger_event('connect', sid, environ,
run_async=False)
if ret is not None and ret is not True:
del self.sockets[sid]
self.logger.warning('Application rejected connection')
return self._unauthorized(ret or None)
if transport == 'websocket':
ret = await s.handle_get_request(environ)
if s.closed and sid in self.sockets:
# websocket connection ended, so we are done
del self.sockets[sid]
return ret
else:
s.connected = True
headers = None
if self.cookie:
if isinstance(self.cookie, dict):
headers = [(
'Set-Cookie',
self._generate_sid_cookie(sid, self.cookie)
)]
else:
headers = [(
'Set-Cookie',
self._generate_sid_cookie(sid, {
'name': self.cookie, 'path': '/', 'SameSite': 'Lax'
})
)]
try:
return self._ok(await s.poll(), headers=headers,
jsonp_index=jsonp_index)
except exceptions.QueueEmpty:
return self._bad_request()
async def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
ret = None
if event in self.handlers:
if asyncio.iscoroutinefunction(self.handlers[event]) is True:
if run_async:
return self.start_background_task(self.handlers[event],
*args)
else:
try:
ret = await self.handlers[event](*args)
except asyncio.CancelledError: # pragma: no cover
pass
except:
self.logger.exception(event + ' async handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
else:
if run_async:
async def async_handler():
return self.handlers[event](*args)
return self.start_background_task(async_handler)
else:
try:
ret = self.handlers[event](*args)
except:
self.logger.exception(event + ' handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
return ret
async def _service_task(self): # pragma: no cover
"""Monitor connected clients and clean up those that time out."""
while True:
if len(self.sockets) == 0:
# nothing to do
await self.sleep(self.ping_timeout)
continue
# go through the entire client list in a ping interval cycle
sleep_interval = self.ping_timeout / len(self.sockets)
try:
# iterate over the current clients
for socket in self.sockets.copy().values():
if not socket.closing and not socket.closed:
await socket.check_ping_timeout()
await self.sleep(sleep_interval)
except (
SystemExit,
KeyboardInterrupt,
asyncio.CancelledError,
GeneratorExit,
):
self.logger.info('service task canceled')
break
except:
if asyncio.get_event_loop().is_closed():
self.logger.info('event loop is closed, exiting service '
'task')
break
# an unexpected exception has occurred, log it and continue
self.logger.exception('service task exception')

View file

@ -0,0 +1,252 @@
import asyncio
import sys
import time
from . import exceptions
from . import packet
from . import payload
from . import socket
class AsyncSocket(socket.Socket):
async def poll(self):
"""Wait for packets to send to the client."""
try:
packets = [await asyncio.wait_for(
self.queue.get(),
self.server.ping_interval + self.server.ping_timeout)]
self.queue.task_done()
except (asyncio.TimeoutError, asyncio.CancelledError):
raise exceptions.QueueEmpty()
if packets == [None]:
return []
while True:
try:
pkt = self.queue.get_nowait()
self.queue.task_done()
if pkt is None:
self.queue.put_nowait(None)
break
packets.append(pkt)
except asyncio.QueueEmpty:
break
return packets
async def receive(self, pkt):
"""Receive packet from the client."""
self.server.logger.info('%s: Received packet %s data %s',
self.sid, packet.packet_names[pkt.packet_type],
pkt.data if not isinstance(pkt.data, bytes)
else '<binary>')
if pkt.packet_type == packet.PONG:
self.schedule_ping()
elif pkt.packet_type == packet.MESSAGE:
await self.server._trigger_event(
'message', self.sid, pkt.data,
run_async=self.server.async_handlers)
elif pkt.packet_type == packet.UPGRADE:
await self.send(packet.Packet(packet.NOOP))
elif pkt.packet_type == packet.CLOSE:
await self.close(wait=False, abort=True)
else:
raise exceptions.UnknownPacketError()
async def check_ping_timeout(self):
"""Make sure the client is still sending pings."""
if self.closed:
raise exceptions.SocketIsClosedError()
if self.last_ping and \
time.time() - self.last_ping > self.server.ping_timeout:
self.server.logger.info('%s: Client is gone, closing socket',
self.sid)
# Passing abort=False here will cause close() to write a
# CLOSE packet. This has the effect of updating half-open sockets
# to their correct state of disconnected
await self.close(wait=False, abort=False)
return False
return True
async def send(self, pkt):
"""Send a packet to the client."""
if not await self.check_ping_timeout():
return
else:
await self.queue.put(pkt)
self.server.logger.info('%s: Sending packet %s data %s',
self.sid, packet.packet_names[pkt.packet_type],
pkt.data if not isinstance(pkt.data, bytes)
else '<binary>')
async def handle_get_request(self, environ):
"""Handle a long-polling GET request from the client."""
connections = [
s.strip()
for s in environ.get('HTTP_CONNECTION', '').lower().split(',')]
transport = environ.get('HTTP_UPGRADE', '').lower()
if 'upgrade' in connections and transport in self.upgrade_protocols:
self.server.logger.info('%s: Received request to upgrade to %s',
self.sid, transport)
return await getattr(self, '_upgrade_' + transport)(environ)
if self.upgrading or self.upgraded:
# we are upgrading to WebSocket, do not return any more packets
# through the polling endpoint
return [packet.Packet(packet.NOOP)]
try:
packets = await self.poll()
except exceptions.QueueEmpty:
exc = sys.exc_info()
await self.close(wait=False)
raise exc[1].with_traceback(exc[2])
return packets
async def handle_post_request(self, environ):
"""Handle a long-polling POST request from the client."""
length = int(environ.get('CONTENT_LENGTH', '0'))
if length > self.server.max_http_buffer_size:
raise exceptions.ContentTooLongError()
else:
body = (await environ['wsgi.input'].read(length)).decode('utf-8')
p = payload.Payload(encoded_payload=body)
for pkt in p.packets:
await self.receive(pkt)
async def close(self, wait=True, abort=False):
"""Close the socket connection."""
if not self.closed and not self.closing:
self.closing = True
await self.server._trigger_event('disconnect', self.sid)
if not abort:
await self.send(packet.Packet(packet.CLOSE))
self.closed = True
if wait:
await self.queue.join()
def schedule_ping(self):
async def send_ping():
self.last_ping = None
await asyncio.sleep(self.server.ping_interval)
if not self.closing and not self.closed:
self.last_ping = time.time()
await self.send(packet.Packet(packet.PING))
self.server.start_background_task(send_ping)
async def _upgrade_websocket(self, environ):
"""Upgrade the connection from polling to websocket."""
if self.upgraded:
raise IOError('Socket has been upgraded already')
if self.server._async['websocket'] is None:
# the selected async mode does not support websocket
return self.server._bad_request()
ws = self.server._async['websocket'](
self._websocket_handler, self.server)
return await ws(environ)
async def _websocket_handler(self, ws):
"""Engine.IO handler for websocket transport."""
async def websocket_wait():
data = await ws.wait()
if data and len(data) > self.server.max_http_buffer_size:
raise ValueError('packet is too large')
return data
if self.connected:
# the socket was already connected, so this is an upgrade
self.upgrading = True # hold packet sends during the upgrade
try:
pkt = await websocket_wait()
except IOError: # pragma: no cover
return
decoded_pkt = packet.Packet(encoded_packet=pkt)
if decoded_pkt.packet_type != packet.PING or \
decoded_pkt.data != 'probe':
self.server.logger.info(
'%s: Failed websocket upgrade, no PING packet', self.sid)
self.upgrading = False
return
await ws.send(packet.Packet(packet.PONG, data='probe').encode())
await self.queue.put(packet.Packet(packet.NOOP)) # end poll
try:
pkt = await websocket_wait()
except IOError: # pragma: no cover
self.upgrading = False
return
decoded_pkt = packet.Packet(encoded_packet=pkt)
if decoded_pkt.packet_type != packet.UPGRADE:
self.upgraded = False
self.server.logger.info(
('%s: Failed websocket upgrade, expected UPGRADE packet, '
'received %s instead.'),
self.sid, pkt)
self.upgrading = False
return
self.upgraded = True
self.upgrading = False
else:
self.connected = True
self.upgraded = True
# start separate writer thread
async def writer():
while True:
packets = None
try:
packets = await self.poll()
except exceptions.QueueEmpty:
break
if not packets:
# empty packet list returned -> connection closed
break
try:
for pkt in packets:
await ws.send(pkt.encode())
except:
break
writer_task = asyncio.ensure_future(writer())
self.server.logger.info(
'%s: Upgrade to websocket successful', self.sid)
while True:
p = None
wait_task = asyncio.ensure_future(websocket_wait())
try:
p = await asyncio.wait_for(
wait_task,
self.server.ping_interval + self.server.ping_timeout)
except asyncio.CancelledError: # pragma: no cover
# there is a bug (https://bugs.python.org/issue30508) in
# asyncio that causes a "Task exception never retrieved" error
# to appear when wait_task raises an exception before it gets
# cancelled. Calling wait_task.exception() prevents the error
# from being issued in Python 3.6, but causes other errors in
# other versions, so we run it with all errors suppressed and
# hope for the best.
try:
wait_task.exception()
except:
pass
break
except:
break
if p is None:
# connection closed by client
break
pkt = packet.Packet(encoded_packet=p)
try:
await self.receive(pkt)
except exceptions.UnknownPacketError: # pragma: no cover
pass
except exceptions.SocketIsClosedError: # pragma: no cover
self.server.logger.info('Receive error -- socket is closed')
break
except: # pragma: no cover
# if we get an unexpected exception we log the error and exit
# the connection properly
self.server.logger.exception('Unknown receive error')
await self.queue.put(None) # unlock the writer task so it can exit
await asyncio.wait_for(writer_task, timeout=None)
await self.close(wait=False, abort=True)

View file

@ -0,0 +1,727 @@
from base64 import b64encode
from engineio.json import JSONDecodeError
import logging
import queue
import signal
import ssl
import threading
import time
import urllib
try:
import requests
except ImportError: # pragma: no cover
requests = None
try:
import websocket
except ImportError: # pragma: no cover
websocket = None
from . import exceptions
from . import packet
from . import payload
default_logger = logging.getLogger('engineio.client')
connected_clients = []
def signal_handler(sig, frame):
"""SIGINT handler.
Disconnect all active clients and then invoke the original signal handler.
"""
for client in connected_clients[:]:
if not client.is_asyncio_based():
client.disconnect()
if callable(original_signal_handler):
return original_signal_handler(sig, frame)
else: # pragma: no cover
# Handle case where no original SIGINT handler was present.
return signal.default_int_handler(sig, frame)
original_signal_handler = None
class Client(object):
"""An Engine.IO client.
This class implements a fully compliant Engine.IO web client with support
for websocket and long-polling transports.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``. The default is
``False``. Note that fatal errors are logged even when
``logger`` is ``False``.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions.
:param request_timeout: A timeout in seconds for requests. The default is
5 seconds.
:param http_session: an initialized ``requests.Session`` object to be used
when sending requests to the server. Use it if you
need to add special client options such as proxy
servers, SSL certificates, custom CA bundle, etc.
:param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to
skip SSL certificate verification, allowing
connections to servers with self signed certificates.
The default is ``True``.
:param handle_sigint: Set to ``True`` to automatically handle disconnection
when the process is interrupted, or to ``False`` to
leave interrupt handling to the calling application.
Interrupt handling can only be enabled when the
client instance is created in the main thread.
:param websocket_extra_options: Dictionary containing additional keyword
arguments passed to
``websocket.create_connection()``.
"""
event_names = ['connect', 'disconnect', 'message']
def __init__(self, logger=False, json=None, request_timeout=5,
http_session=None, ssl_verify=True, handle_sigint=True,
websocket_extra_options=None):
global original_signal_handler
if handle_sigint and original_signal_handler is None and \
threading.current_thread() == threading.main_thread():
original_signal_handler = signal.signal(signal.SIGINT,
signal_handler)
self.handlers = {}
self.base_url = None
self.transports = None
self.current_transport = None
self.sid = None
self.upgrades = None
self.ping_interval = None
self.ping_timeout = None
self.http = http_session
self.external_http = http_session is not None
self.handle_sigint = handle_sigint
self.ws = None
self.read_loop_task = None
self.write_loop_task = None
self.queue = None
self.state = 'disconnected'
self.ssl_verify = ssl_verify
self.websocket_extra_options = websocket_extra_options or {}
if json is not None:
packet.Packet.json = json
if not isinstance(logger, bool):
self.logger = logger
else:
self.logger = default_logger
if self.logger.level == logging.NOTSET:
if logger:
self.logger.setLevel(logging.INFO)
else:
self.logger.setLevel(logging.ERROR)
self.logger.addHandler(logging.StreamHandler())
self.request_timeout = request_timeout
def is_asyncio_based(self):
return False
def on(self, event, handler=None):
"""Register an event handler.
:param event: The event name. Can be ``'connect'``, ``'message'`` or
``'disconnect'``.
:param handler: The function that should be invoked to handle the
event. When this parameter is not given, the method
acts as a decorator for the handler function.
Example usage::
# as a decorator:
@eio.on('connect')
def connect_handler():
print('Connection request')
# as a method:
def message_handler(msg):
print('Received message: ', msg)
eio.send('response')
eio.on('message', message_handler)
"""
if event not in self.event_names:
raise ValueError('Invalid event')
def set_handler(handler):
self.handlers[event] = handler
return handler
if handler is None:
return set_handler
set_handler(handler)
def connect(self, url, headers=None, transports=None,
engineio_path='engine.io'):
"""Connect to an Engine.IO server.
:param url: The URL of the Engine.IO server. It can include custom
query string parameters if required by the server.
:param headers: A dictionary with custom headers to send with the
connection request.
:param transports: The list of allowed transports. Valid transports
are ``'polling'`` and ``'websocket'``. If not
given, the polling transport is connected first,
then an upgrade to websocket is attempted.
:param engineio_path: The endpoint where the Engine.IO server is
installed. The default value is appropriate for
most cases.
Example usage::
eio = engineio.Client()
eio.connect('http://localhost:5000')
"""
if self.state != 'disconnected':
raise ValueError('Client is not in a disconnected state')
valid_transports = ['polling', 'websocket']
if transports is not None:
if isinstance(transports, str):
transports = [transports]
transports = [transport for transport in transports
if transport in valid_transports]
if not transports:
raise ValueError('No valid transports provided')
self.transports = transports or valid_transports
self.queue = self.create_queue()
return getattr(self, '_connect_' + self.transports[0])(
url, headers or {}, engineio_path)
def wait(self):
"""Wait until the connection with the server ends.
Client applications can use this function to block the main thread
during the life of the connection.
"""
if self.read_loop_task:
self.read_loop_task.join()
def send(self, data):
"""Send a message to the server.
:param data: The data to send to the server. Data can be of type
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
or ``dict``, the data will be serialized as JSON.
"""
self._send_packet(packet.Packet(packet.MESSAGE, data=data))
def disconnect(self, abort=False):
"""Disconnect from the server.
:param abort: If set to ``True``, do not wait for background tasks
associated with the connection to end.
"""
if self.state == 'connected':
self._send_packet(packet.Packet(packet.CLOSE))
self.queue.put(None)
self.state = 'disconnecting'
self._trigger_event('disconnect', run_async=False)
if self.current_transport == 'websocket':
self.ws.close()
if not abort:
self.read_loop_task.join()
self.state = 'disconnected'
try:
connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
self._reset()
def transport(self):
"""Return the name of the transport currently in use.
The possible values returned by this function are ``'polling'`` and
``'websocket'``.
"""
return self.current_transport
def start_background_task(self, target, *args, **kwargs):
"""Start a background task.
This is a utility function that applications can use to start a
background task.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
This function returns an object that represents the background task,
on which the ``join()`` method can be invoked to wait for the task to
complete.
"""
th = threading.Thread(target=target, args=args, kwargs=kwargs)
th.start()
return th
def sleep(self, seconds=0):
"""Sleep for the requested amount of time."""
return time.sleep(seconds)
def create_queue(self, *args, **kwargs):
"""Create a queue object."""
q = queue.Queue(*args, **kwargs)
q.Empty = queue.Empty
return q
def create_event(self, *args, **kwargs):
"""Create an event object."""
return threading.Event(*args, **kwargs)
def _reset(self):
self.state = 'disconnected'
self.sid = None
def _connect_polling(self, url, headers, engineio_path):
"""Establish a long-polling connection to the Engine.IO server."""
if requests is None: # pragma: no cover
# not installed
self.logger.error('requests package is not installed -- cannot '
'send HTTP requests!')
return
self.base_url = self._get_engineio_url(url, engineio_path, 'polling')
self.logger.info('Attempting polling connection to ' + self.base_url)
r = self._send_request(
'GET', self.base_url + self._get_url_timestamp(), headers=headers,
timeout=self.request_timeout)
if r is None or isinstance(r, str):
self._reset()
raise exceptions.ConnectionError(
r or 'Connection refused by the server')
if r.status_code < 200 or r.status_code >= 300:
self._reset()
try:
arg = r.json()
except JSONDecodeError:
arg = None
raise exceptions.ConnectionError(
'Unexpected status code {} in server response'.format(
r.status_code), arg)
try:
p = payload.Payload(encoded_payload=r.content.decode('utf-8'))
except ValueError:
raise exceptions.ConnectionError(
'Unexpected response from server') from None
open_packet = p.packets[0]
if open_packet.packet_type != packet.OPEN:
raise exceptions.ConnectionError(
'OPEN packet not returned by server')
self.logger.info(
'Polling connection accepted with ' + str(open_packet.data))
self.sid = open_packet.data['sid']
self.upgrades = open_packet.data['upgrades']
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
self.current_transport = 'polling'
self.base_url += '&sid=' + self.sid
self.state = 'connected'
connected_clients.append(self)
self._trigger_event('connect', run_async=False)
for pkt in p.packets[1:]:
self._receive_packet(pkt)
if 'websocket' in self.upgrades and 'websocket' in self.transports:
# attempt to upgrade to websocket
if self._connect_websocket(url, headers, engineio_path):
# upgrade to websocket succeeded, we're done here
return
# start background tasks associated with this client
self.write_loop_task = self.start_background_task(self._write_loop)
self.read_loop_task = self.start_background_task(
self._read_loop_polling)
def _connect_websocket(self, url, headers, engineio_path):
"""Establish or upgrade to a WebSocket connection with the server."""
if websocket is None: # pragma: no cover
# not installed
self.logger.error('websocket-client package not installed, only '
'polling transport is available')
return False
websocket_url = self._get_engineio_url(url, engineio_path, 'websocket')
if self.sid:
self.logger.info(
'Attempting WebSocket upgrade to ' + websocket_url)
upgrade = True
websocket_url += '&sid=' + self.sid
else:
upgrade = False
self.base_url = websocket_url
self.logger.info(
'Attempting WebSocket connection to ' + websocket_url)
# get cookies and other settings from the long-polling connection
# so that they are preserved when connecting to the WebSocket route
cookies = None
extra_options = {}
if self.http:
# cookies
cookies = '; '.join(["{}={}".format(cookie.name, cookie.value)
for cookie in self.http.cookies])
for header, value in headers.items():
if header.lower() == 'cookie':
if cookies:
cookies += '; '
cookies += value
del headers[header]
break
# auth
if 'Authorization' not in headers and self.http.auth is not None:
if not isinstance(self.http.auth, tuple): # pragma: no cover
raise ValueError('Only basic authentication is supported')
basic_auth = '{}:{}'.format(
self.http.auth[0], self.http.auth[1]).encode('utf-8')
basic_auth = b64encode(basic_auth).decode('utf-8')
headers['Authorization'] = 'Basic ' + basic_auth
# cert
# this can be given as ('certfile', 'keyfile') or just 'certfile'
if isinstance(self.http.cert, tuple):
extra_options['sslopt'] = {
'certfile': self.http.cert[0],
'keyfile': self.http.cert[1]}
elif self.http.cert:
extra_options['sslopt'] = {'certfile': self.http.cert}
# proxies
if self.http.proxies:
proxy_url = None
if websocket_url.startswith('ws://'):
proxy_url = self.http.proxies.get(
'ws', self.http.proxies.get('http'))
else: # wss://
proxy_url = self.http.proxies.get(
'wss', self.http.proxies.get('https'))
if proxy_url:
parsed_url = urllib.parse.urlparse(
proxy_url if '://' in proxy_url
else 'scheme://' + proxy_url)
extra_options['http_proxy_host'] = parsed_url.hostname
extra_options['http_proxy_port'] = parsed_url.port
extra_options['http_proxy_auth'] = (
(parsed_url.username, parsed_url.password)
if parsed_url.username or parsed_url.password
else None)
# verify
if isinstance(self.http.verify, str):
if 'sslopt' in extra_options:
extra_options['sslopt']['ca_certs'] = self.http.verify
else:
extra_options['sslopt'] = {'ca_certs': self.http.verify}
elif not self.http.verify:
self.ssl_verify = False
if not self.ssl_verify:
extra_options['sslopt'] = {"cert_reqs": ssl.CERT_NONE}
# combine internally generated options with the ones supplied by the
# caller. The caller's options take precedence.
headers.update(self.websocket_extra_options.pop('header', {}))
extra_options['header'] = headers
extra_options['cookie'] = cookies
extra_options['enable_multithread'] = True
extra_options['timeout'] = self.request_timeout
extra_options.update(self.websocket_extra_options)
try:
ws = websocket.create_connection(
websocket_url + self._get_url_timestamp(), **extra_options)
except (ConnectionError, IOError, websocket.WebSocketException):
if upgrade:
self.logger.warning(
'WebSocket upgrade failed: connection error')
return False
else:
raise exceptions.ConnectionError('Connection error')
if upgrade:
p = packet.Packet(packet.PING, data='probe').encode()
try:
ws.send(p)
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected send exception: %s',
str(e))
return False
try:
p = ws.recv()
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected recv exception: %s',
str(e))
return False
pkt = packet.Packet(encoded_packet=p)
if pkt.packet_type != packet.PONG or pkt.data != 'probe':
self.logger.warning(
'WebSocket upgrade failed: no PONG packet')
return False
p = packet.Packet(packet.UPGRADE).encode()
try:
ws.send(p)
except Exception as e: # pragma: no cover
self.logger.warning(
'WebSocket upgrade failed: unexpected send exception: %s',
str(e))
return False
self.current_transport = 'websocket'
self.logger.info('WebSocket upgrade was successful')
else:
try:
p = ws.recv()
except Exception as e: # pragma: no cover
raise exceptions.ConnectionError(
'Unexpected recv exception: ' + str(e))
open_packet = packet.Packet(encoded_packet=p)
if open_packet.packet_type != packet.OPEN:
raise exceptions.ConnectionError('no OPEN packet')
self.logger.info(
'WebSocket connection accepted with ' + str(open_packet.data))
self.sid = open_packet.data['sid']
self.upgrades = open_packet.data['upgrades']
self.ping_interval = int(open_packet.data['pingInterval']) / 1000.0
self.ping_timeout = int(open_packet.data['pingTimeout']) / 1000.0
self.current_transport = 'websocket'
self.state = 'connected'
connected_clients.append(self)
self._trigger_event('connect', run_async=False)
self.ws = ws
self.ws.settimeout(self.ping_interval + self.ping_timeout)
# start background tasks associated with this client
self.write_loop_task = self.start_background_task(self._write_loop)
self.read_loop_task = self.start_background_task(
self._read_loop_websocket)
return True
def _receive_packet(self, pkt):
"""Handle incoming packets from the server."""
packet_name = packet.packet_names[pkt.packet_type] \
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
self.logger.info(
'Received packet %s data %s', packet_name,
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
if pkt.packet_type == packet.MESSAGE:
self._trigger_event('message', pkt.data, run_async=True)
elif pkt.packet_type == packet.PING:
self._send_packet(packet.Packet(packet.PONG, pkt.data))
elif pkt.packet_type == packet.CLOSE:
self.disconnect(abort=True)
elif pkt.packet_type == packet.NOOP:
pass
else:
self.logger.error('Received unexpected packet of type %s',
pkt.packet_type)
def _send_packet(self, pkt):
"""Queue a packet to be sent to the server."""
if self.state != 'connected':
return
self.queue.put(pkt)
self.logger.info(
'Sending packet %s data %s',
packet.packet_names[pkt.packet_type],
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
def _send_request(
self, method, url, headers=None, body=None,
timeout=None): # pragma: no cover
if self.http is None:
self.http = requests.Session()
if not self.ssl_verify:
self.http.verify = False
try:
return self.http.request(method, url, headers=headers, data=body,
timeout=timeout)
except requests.exceptions.RequestException as exc:
self.logger.info('HTTP %s request to %s failed with error %s.',
method, url, exc)
return str(exc)
def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
if event in self.handlers:
if run_async:
return self.start_background_task(self.handlers[event], *args)
else:
try:
return self.handlers[event](*args)
except:
self.logger.exception(event + ' handler error')
def _get_engineio_url(self, url, engineio_path, transport):
"""Generate the Engine.IO connection URL."""
engineio_path = engineio_path.strip('/')
parsed_url = urllib.parse.urlparse(url)
if transport == 'polling':
scheme = 'http'
elif transport == 'websocket':
scheme = 'ws'
else: # pragma: no cover
raise ValueError('invalid transport')
if parsed_url.scheme in ['https', 'wss']:
scheme += 's'
return ('{scheme}://{netloc}/{path}/?{query}'
'{sep}transport={transport}&EIO=4').format(
scheme=scheme, netloc=parsed_url.netloc,
path=engineio_path, query=parsed_url.query,
sep='&' if parsed_url.query else '',
transport=transport)
def _get_url_timestamp(self):
"""Generate the Engine.IO query string timestamp."""
return '&t=' + str(time.time())
def _read_loop_polling(self):
"""Read packets by polling the Engine.IO server."""
while self.state == 'connected':
self.logger.info(
'Sending polling GET request to ' + self.base_url)
r = self._send_request(
'GET', self.base_url + self._get_url_timestamp(),
timeout=max(self.ping_interval, self.ping_timeout) + 5)
if r is None or isinstance(r, str):
self.logger.warning(
r or 'Connection refused by the server, aborting')
self.queue.put(None)
break
if r.status_code < 200 or r.status_code >= 300:
self.logger.warning('Unexpected status code %s in server '
'response, aborting', r.status_code)
self.queue.put(None)
break
try:
p = payload.Payload(encoded_payload=r.content.decode('utf-8'))
except ValueError:
self.logger.warning(
'Unexpected packet from server, aborting')
self.queue.put(None)
break
for pkt in p.packets:
self._receive_packet(pkt)
self.logger.info('Waiting for write loop task to end')
self.write_loop_task.join()
if self.state == 'connected':
self._trigger_event('disconnect', run_async=False)
try:
connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
self._reset()
self.logger.info('Exiting read loop task')
def _read_loop_websocket(self):
"""Read packets from the Engine.IO WebSocket connection."""
while self.state == 'connected':
p = None
try:
p = self.ws.recv()
except websocket.WebSocketTimeoutException:
self.logger.warning(
'Server has stopped communicating, aborting')
self.queue.put(None)
break
except websocket.WebSocketConnectionClosedException:
self.logger.warning(
'WebSocket connection was closed, aborting')
self.queue.put(None)
break
except Exception as e:
self.logger.info(
'Unexpected error receiving packet: "%s", aborting',
str(e))
self.queue.put(None)
break
try:
pkt = packet.Packet(encoded_packet=p)
except Exception as e: # pragma: no cover
self.logger.info(
'Unexpected error decoding packet: "%s", aborting', str(e))
self.queue.put(None)
break
self._receive_packet(pkt)
self.logger.info('Waiting for write loop task to end')
self.write_loop_task.join()
if self.state == 'connected':
self._trigger_event('disconnect', run_async=False)
try:
connected_clients.remove(self)
except ValueError: # pragma: no cover
pass
self._reset()
self.logger.info('Exiting read loop task')
def _write_loop(self):
"""This background task sends packages to the server as they are
pushed to the send queue.
"""
while self.state == 'connected':
# to simplify the timeout handling, use the maximum of the
# ping interval and ping timeout as timeout, with an extra 5
# seconds grace period
timeout = max(self.ping_interval, self.ping_timeout) + 5
packets = None
try:
packets = [self.queue.get(timeout=timeout)]
except self.queue.Empty:
self.logger.error('packet queue is empty, aborting')
break
if packets == [None]:
self.queue.task_done()
packets = []
else:
while True:
try:
packets.append(self.queue.get(block=False))
except self.queue.Empty:
break
if packets[-1] is None:
packets = packets[:-1]
self.queue.task_done()
break
if not packets:
# empty packet list returned -> connection closed
break
if self.current_transport == 'polling':
p = payload.Payload(packets=packets)
r = self._send_request(
'POST', self.base_url, body=p.encode(),
headers={'Content-Type': 'text/plain'},
timeout=self.request_timeout)
for pkt in packets:
self.queue.task_done()
if r is None or isinstance(r, str):
self.logger.warning(
r or 'Connection refused by the server, aborting')
break
if r.status_code < 200 or r.status_code >= 300:
self.logger.warning('Unexpected status code %s in server '
'response, aborting', r.status_code)
self._reset()
break
else:
# websocket
try:
for pkt in packets:
encoded_packet = pkt.encode()
if pkt.binary:
self.ws.send_binary(encoded_packet)
else:
self.ws.send(encoded_packet)
self.queue.task_done()
except (websocket.WebSocketConnectionClosedException,
BrokenPipeError, OSError):
self.logger.warning(
'WebSocket connection was closed, aborting')
break
self.logger.info('Exiting write loop task')

View file

@ -0,0 +1,22 @@
class EngineIOError(Exception):
pass
class ContentTooLongError(EngineIOError):
pass
class UnknownPacketError(EngineIOError):
pass
class QueueEmpty(EngineIOError):
pass
class SocketIsClosedError(EngineIOError):
pass
class ConnectionError(EngineIOError):
pass

View file

@ -0,0 +1,16 @@
"""JSON-compatible module with sane defaults."""
from json import * # noqa: F401, F403
from json import loads as original_loads
def _safe_int(s):
if len(s) > 100:
raise ValueError('Integer is too large')
return int(s)
def loads(*args, **kwargs):
if 'parse_int' not in kwargs: # pragma: no cover
kwargs['parse_int'] = _safe_int
return original_loads(*args, **kwargs)

View file

@ -0,0 +1,86 @@
import os
from engineio.static_files import get_static_file
class WSGIApp(object):
"""WSGI application middleware for Engine.IO.
This middleware dispatches traffic to an Engine.IO application. It can
also serve a list of static files to the client, or forward unrelated
HTTP traffic to another WSGI application.
:param engineio_app: The Engine.IO server. Must be an instance of the
``engineio.Server`` class.
:param wsgi_app: The WSGI app that receives all other traffic.
:param static_files: A dictionary with static file mapping rules. See the
documentation for details on this argument.
:param engineio_path: The endpoint where the Engine.IO application should
be installed. The default value is appropriate for
most cases.
Example usage::
import engineio
import eventlet
eio = engineio.Server()
app = engineio.WSGIApp(eio, static_files={
'/': {'content_type': 'text/html', 'filename': 'index.html'},
'/index.html': {'content_type': 'text/html',
'filename': 'index.html'},
})
eventlet.wsgi.server(eventlet.listen(('', 8000)), app)
"""
def __init__(self, engineio_app, wsgi_app=None, static_files=None,
engineio_path='engine.io'):
self.engineio_app = engineio_app
self.wsgi_app = wsgi_app
self.engineio_path = engineio_path
if not self.engineio_path.startswith('/'):
self.engineio_path = '/' + self.engineio_path
if not self.engineio_path.endswith('/'):
self.engineio_path += '/'
self.static_files = static_files or {}
def __call__(self, environ, start_response):
if 'gunicorn.socket' in environ:
# gunicorn saves the socket under environ['gunicorn.socket'], while
# eventlet saves it under environ['eventlet.input']. Eventlet also
# stores the socket inside a wrapper class, while gunicon writes it
# directly into the environment. To give eventlet's WebSocket
# module access to this socket when running under gunicorn, here we
# copy the socket to the eventlet format.
class Input(object):
def __init__(self, socket):
self.socket = socket
def get_socket(self):
return self.socket
environ['eventlet.input'] = Input(environ['gunicorn.socket'])
path = environ['PATH_INFO']
if path is not None and path.startswith(self.engineio_path):
return self.engineio_app.handle_request(environ, start_response)
else:
static_file = get_static_file(path, self.static_files) \
if self.static_files else None
if static_file and os.path.exists(static_file['filename']):
start_response(
'200 OK',
[('Content-Type', static_file['content_type'])])
with open(static_file['filename'], 'rb') as f:
return [f.read()]
elif self.wsgi_app is not None:
return self.wsgi_app(environ, start_response)
return self.not_found(start_response)
def not_found(self, start_response):
start_response("404 Not Found", [('Content-Type', 'text/plain')])
return [b'Not Found']
class Middleware(WSGIApp):
"""This class has been renamed to ``WSGIApp`` and is now deprecated."""
def __init__(self, engineio_app, wsgi_app=None,
engineio_path='engine.io'):
super().__init__(engineio_app, wsgi_app, engineio_path=engineio_path)

View file

@ -0,0 +1,74 @@
import base64
from engineio import json as _json
(OPEN, CLOSE, PING, PONG, MESSAGE, UPGRADE, NOOP) = (0, 1, 2, 3, 4, 5, 6)
packet_names = ['OPEN', 'CLOSE', 'PING', 'PONG', 'MESSAGE', 'UPGRADE', 'NOOP']
binary_types = (bytes, bytearray)
class Packet(object):
"""Engine.IO packet."""
json = _json
def __init__(self, packet_type=NOOP, data=None, encoded_packet=None):
self.packet_type = packet_type
self.data = data
if isinstance(data, str):
self.binary = False
elif isinstance(data, binary_types):
self.binary = True
else:
self.binary = False
if self.binary and self.packet_type != MESSAGE:
raise ValueError('Binary packets can only be of type MESSAGE')
if encoded_packet is not None:
self.decode(encoded_packet)
def encode(self, b64=False):
"""Encode the packet for transmission."""
if self.binary:
if b64:
encoded_packet = 'b' + base64.b64encode(self.data).decode(
'utf-8')
else:
encoded_packet = self.data
else:
encoded_packet = str(self.packet_type)
if isinstance(self.data, str):
encoded_packet += self.data
elif isinstance(self.data, dict) or isinstance(self.data, list):
encoded_packet += self.json.dumps(self.data,
separators=(',', ':'))
elif self.data is not None:
encoded_packet += str(self.data)
return encoded_packet
def decode(self, encoded_packet):
"""Decode a transmitted package."""
self.binary = isinstance(encoded_packet, binary_types)
if not self.binary and len(encoded_packet) == 0:
raise ValueError('Invalid empty packet received')
b64 = not self.binary and encoded_packet[0] == 'b'
if b64:
self.binary = True
self.packet_type = MESSAGE
self.data = base64.b64decode(encoded_packet[1:])
else:
if self.binary and not isinstance(encoded_packet, bytes):
encoded_packet = bytes(encoded_packet)
if self.binary:
self.packet_type = MESSAGE
self.data = encoded_packet
else:
self.packet_type = int(encoded_packet[0])
try:
self.data = self.json.loads(encoded_packet[1:])
if isinstance(self.data, int):
# do not allow integer payloads, see
# github.com/miguelgrinberg/python-engineio/issues/75
# for background on this decision
raise ValueError
except ValueError:
self.data = encoded_packet[1:]

View file

@ -0,0 +1,46 @@
import urllib
from . import packet
class Payload(object):
"""Engine.IO payload."""
max_decode_packets = 16
def __init__(self, packets=None, encoded_payload=None):
self.packets = packets or []
if encoded_payload is not None:
self.decode(encoded_payload)
def encode(self, jsonp_index=None):
"""Encode the payload for transmission."""
encoded_payload = ''
for pkt in self.packets:
if encoded_payload:
encoded_payload += '\x1e'
encoded_payload += pkt.encode(b64=True)
if jsonp_index is not None:
encoded_payload = '___eio[' + \
str(jsonp_index) + \
']("' + \
encoded_payload.replace('"', '\\"') + \
'");'
return encoded_payload
def decode(self, encoded_payload):
"""Decode a transmitted payload."""
self.packets = []
if len(encoded_payload) == 0:
return
# JSONP POST payload starts with 'd='
if encoded_payload.startswith('d='):
encoded_payload = urllib.parse.parse_qs(
encoded_payload)['d'][0]
encoded_packets = encoded_payload.split('\x1e')
if len(encoded_packets) > self.max_decode_packets:
raise ValueError('Too many packets in payload')
self.packets = [packet.Packet(encoded_packet=encoded_packet)
for encoded_packet in encoded_packets]

View file

@ -0,0 +1,770 @@
import base64
import gzip
import importlib
import io
import logging
import secrets
import urllib
import zlib
from . import exceptions
from . import packet
from . import payload
from . import socket
default_logger = logging.getLogger('engineio.server')
class Server(object):
"""An Engine.IO server.
This class implements a fully compliant Engine.IO web server with support
for websocket and long-polling transports.
:param async_mode: The asynchronous model to use. See the Deployment
section in the documentation for a description of the
available options. Valid async modes are "threading",
"eventlet", "gevent" and "gevent_uwsgi". If this
argument is not given, "eventlet" is tried first, then
"gevent_uwsgi", then "gevent", and finally "threading".
The first async mode that has all its dependencies
installed is the one that is chosen.
:param ping_interval: The interval in seconds at which the server pings
the client. The default is 25 seconds. For advanced
control, a two element tuple can be given, where
the first number is the ping interval and the second
is a grace period added by the server.
:param ping_timeout: The time in seconds that the client waits for the
server to respond before disconnecting. The default
is 20 seconds.
:param max_http_buffer_size: The maximum size of a message. The default
is 1,000,000 bytes.
:param allow_upgrades: Whether to allow transport upgrades or not. The
default is ``True``.
:param http_compression: Whether to compress packages when using the
polling transport. The default is ``True``.
:param compression_threshold: Only compress messages when their byte size
is greater than this value. The default is
1024 bytes.
:param cookie: If set to a string, it is the name of the HTTP cookie the
server sends back tot he client containing the client
session id. If set to a dictionary, the ``'name'`` key
contains the cookie name and other keys define cookie
attributes, where the value of each attribute can be a
string, a callable with no arguments, or a boolean. If set
to ``None`` (the default), a cookie is not sent to the
client.
:param cors_allowed_origins: Origin or list of origins that are allowed to
connect to this server. Only the same origin
is allowed by default. Set this argument to
``'*'`` to allow all origins, or to ``[]`` to
disable CORS handling.
:param cors_credentials: Whether credentials (cookies, authentication) are
allowed in requests to this server. The default
is ``True``.
:param logger: To enable logging set to ``True`` or pass a logger object to
use. To disable logging set to ``False``. The default is
``False``. Note that fatal errors are logged even when
``logger`` is ``False``.
:param json: An alternative json module to use for encoding and decoding
packets. Custom json modules must have ``dumps`` and ``loads``
functions that are compatible with the standard library
versions.
:param async_handlers: If set to ``True``, run message event handlers in
non-blocking threads. To run handlers synchronously,
set to ``False``. The default is ``True``.
:param monitor_clients: If set to ``True``, a background task will ensure
inactive clients are closed. Set to ``False`` to
disable the monitoring task (not recommended). The
default is ``True``.
:param transports: The list of allowed transports. Valid transports
are ``'polling'`` and ``'websocket'``. Defaults to
``['polling', 'websocket']``.
:param kwargs: Reserved for future extensions, any additional parameters
given as keyword arguments will be silently ignored.
"""
compression_methods = ['gzip', 'deflate']
event_names = ['connect', 'disconnect', 'message']
valid_transports = ['polling', 'websocket']
_default_monitor_clients = True
sequence_number = 0
def __init__(self, async_mode=None, ping_interval=25, ping_timeout=20,
max_http_buffer_size=1000000, allow_upgrades=True,
http_compression=True, compression_threshold=1024,
cookie=None, cors_allowed_origins=None,
cors_credentials=True, logger=False, json=None,
async_handlers=True, monitor_clients=None, transports=None,
**kwargs):
self.ping_timeout = ping_timeout
if isinstance(ping_interval, tuple):
self.ping_interval = ping_interval[0]
self.ping_interval_grace_period = ping_interval[1]
else:
self.ping_interval = ping_interval
self.ping_interval_grace_period = 0
self.max_http_buffer_size = max_http_buffer_size
self.allow_upgrades = allow_upgrades
self.http_compression = http_compression
self.compression_threshold = compression_threshold
self.cookie = cookie
self.cors_allowed_origins = cors_allowed_origins
self.cors_credentials = cors_credentials
self.async_handlers = async_handlers
self.sockets = {}
self.handlers = {}
self.log_message_keys = set()
self.start_service_task = monitor_clients \
if monitor_clients is not None else self._default_monitor_clients
if json is not None:
packet.Packet.json = json
if not isinstance(logger, bool):
self.logger = logger
else:
self.logger = default_logger
if self.logger.level == logging.NOTSET:
if logger:
self.logger.setLevel(logging.INFO)
else:
self.logger.setLevel(logging.ERROR)
self.logger.addHandler(logging.StreamHandler())
modes = self.async_modes()
if async_mode is not None:
modes = [async_mode] if async_mode in modes else []
self._async = None
self.async_mode = None
for mode in modes:
try:
self._async = importlib.import_module(
'engineio.async_drivers.' + mode)._async
asyncio_based = self._async['asyncio'] \
if 'asyncio' in self._async else False
if asyncio_based != self.is_asyncio_based():
continue # pragma: no cover
self.async_mode = mode
break
except ImportError:
pass
if self.async_mode is None:
raise ValueError('Invalid async_mode specified')
if self.is_asyncio_based() and \
('asyncio' not in self._async or not
self._async['asyncio']): # pragma: no cover
raise ValueError('The selected async_mode is not asyncio '
'compatible')
if not self.is_asyncio_based() and 'asyncio' in self._async and \
self._async['asyncio']: # pragma: no cover
raise ValueError('The selected async_mode requires asyncio and '
'must use the AsyncServer class')
if transports is not None:
if isinstance(transports, str):
transports = [transports]
transports = [transport for transport in transports
if transport in self.valid_transports]
if not transports:
raise ValueError('No valid transports provided')
self.transports = transports or self.valid_transports
self.logger.info('Server initialized for %s.', self.async_mode)
def is_asyncio_based(self):
return False
def async_modes(self):
return ['eventlet', 'gevent_uwsgi', 'gevent', 'threading']
def on(self, event, handler=None):
"""Register an event handler.
:param event: The event name. Can be ``'connect'``, ``'message'`` or
``'disconnect'``.
:param handler: The function that should be invoked to handle the
event. When this parameter is not given, the method
acts as a decorator for the handler function.
Example usage::
# as a decorator:
@eio.on('connect')
def connect_handler(sid, environ):
print('Connection request')
if environ['REMOTE_ADDR'] in blacklisted:
return False # reject
# as a method:
def message_handler(sid, msg):
print('Received message: ', msg)
eio.send(sid, 'response')
eio.on('message', message_handler)
The handler function receives the ``sid`` (session ID) for the
client as first argument. The ``'connect'`` event handler receives the
WSGI environment as a second argument, and can return ``False`` to
reject the connection. The ``'message'`` handler receives the message
payload as a second argument. The ``'disconnect'`` handler does not
take a second argument.
"""
if event not in self.event_names:
raise ValueError('Invalid event')
def set_handler(handler):
self.handlers[event] = handler
return handler
if handler is None:
return set_handler
set_handler(handler)
def send(self, sid, data):
"""Send a message to a client.
:param sid: The session id of the recipient client.
:param data: The data to send to the client. Data can be of type
``str``, ``bytes``, ``list`` or ``dict``. If a ``list``
or ``dict``, the data will be serialized as JSON.
"""
try:
socket = self._get_socket(sid)
except KeyError:
# the socket is not available
self.logger.warning('Cannot send to sid %s', sid)
return
socket.send(packet.Packet(packet.MESSAGE, data=data))
def get_session(self, sid):
"""Return the user session for a client.
:param sid: The session id of the client.
The return value is a dictionary. Modifications made to this
dictionary are not guaranteed to be preserved unless
``save_session()`` is called, or when the ``session`` context manager
is used.
"""
socket = self._get_socket(sid)
return socket.session
def save_session(self, sid, session):
"""Store the user session for a client.
:param sid: The session id of the client.
:param session: The session dictionary.
"""
socket = self._get_socket(sid)
socket.session = session
def session(self, sid):
"""Return the user session for a client with context manager syntax.
:param sid: The session id of the client.
This is a context manager that returns the user session dictionary for
the client. Any changes that are made to this dictionary inside the
context manager block are saved back to the session. Example usage::
@eio.on('connect')
def on_connect(sid, environ):
username = authenticate_user(environ)
if not username:
return False
with eio.session(sid) as session:
session['username'] = username
@eio.on('message')
def on_message(sid, msg):
with eio.session(sid) as session:
print('received message from ', session['username'])
"""
class _session_context_manager(object):
def __init__(self, server, sid):
self.server = server
self.sid = sid
self.session = None
def __enter__(self):
self.session = self.server.get_session(sid)
return self.session
def __exit__(self, *args):
self.server.save_session(sid, self.session)
return _session_context_manager(self, sid)
def disconnect(self, sid=None):
"""Disconnect a client.
:param sid: The session id of the client to close. If this parameter
is not given, then all clients are closed.
"""
if sid is not None:
try:
socket = self._get_socket(sid)
except KeyError: # pragma: no cover
# the socket was already closed or gone
pass
else:
socket.close()
if sid in self.sockets: # pragma: no cover
del self.sockets[sid]
else:
for client in self.sockets.values():
client.close()
self.sockets = {}
def transport(self, sid):
"""Return the name of the transport used by the client.
The two possible values returned by this function are ``'polling'``
and ``'websocket'``.
:param sid: The session of the client.
"""
return 'websocket' if self._get_socket(sid).upgraded else 'polling'
def handle_request(self, environ, start_response):
"""Handle an HTTP request from the client.
This is the entry point of the Engine.IO application, using the same
interface as a WSGI application. For the typical usage, this function
is invoked by the :class:`Middleware` instance, but it can be invoked
directly when the middleware is not used.
:param environ: The WSGI environment.
:param start_response: The WSGI ``start_response`` function.
This function returns the HTTP response body to deliver to the client
as a byte sequence.
"""
if self.cors_allowed_origins != []:
# Validate the origin header if present
# This is important for WebSocket more than for HTTP, since
# browsers only apply CORS controls to HTTP.
origin = environ.get('HTTP_ORIGIN')
if origin:
allowed_origins = self._cors_allowed_origins(environ)
if allowed_origins is not None and origin not in \
allowed_origins:
self._log_error_once(
origin + ' is not an accepted origin.', 'bad-origin')
r = self._bad_request('Not an accepted origin.')
start_response(r['status'], r['headers'])
return [r['response']]
method = environ['REQUEST_METHOD']
query = urllib.parse.parse_qs(environ.get('QUERY_STRING', ''))
jsonp = False
jsonp_index = None
# make sure the client uses an allowed transport
transport = query.get('transport', ['polling'])[0]
if transport not in self.transports:
self._log_error_once('Invalid transport', 'bad-transport')
r = self._bad_request('Invalid transport')
start_response(r['status'], r['headers'])
return [r['response']]
# make sure the client speaks a compatible Engine.IO version
sid = query['sid'][0] if 'sid' in query else None
if sid is None and query.get('EIO') != ['4']:
self._log_error_once(
'The client is using an unsupported version of the Socket.IO '
'or Engine.IO protocols', 'bad-version')
r = self._bad_request(
'The client is using an unsupported version of the Socket.IO '
'or Engine.IO protocols')
start_response(r['status'], r['headers'])
return [r['response']]
if 'j' in query:
jsonp = True
try:
jsonp_index = int(query['j'][0])
except (ValueError, KeyError, IndexError):
# Invalid JSONP index number
pass
if jsonp and jsonp_index is None:
self._log_error_once('Invalid JSONP index number',
'bad-jsonp-index')
r = self._bad_request('Invalid JSONP index number')
elif method == 'GET':
if sid is None:
# transport must be one of 'polling' or 'websocket'.
# if 'websocket', the HTTP_UPGRADE header must match.
upgrade_header = environ.get('HTTP_UPGRADE').lower() \
if 'HTTP_UPGRADE' in environ else None
if transport == 'polling' \
or transport == upgrade_header == 'websocket':
r = self._handle_connect(environ, start_response,
transport, jsonp_index)
else:
self._log_error_once('Invalid websocket upgrade',
'bad-upgrade')
r = self._bad_request('Invalid websocket upgrade')
else:
if sid not in self.sockets:
self._log_error_once('Invalid session ' + sid, 'bad-sid')
r = self._bad_request('Invalid session')
else:
socket = self._get_socket(sid)
try:
packets = socket.handle_get_request(
environ, start_response)
if isinstance(packets, list):
r = self._ok(packets, jsonp_index=jsonp_index)
else:
r = packets
except exceptions.EngineIOError:
if sid in self.sockets: # pragma: no cover
self.disconnect(sid)
r = self._bad_request()
if sid in self.sockets and self.sockets[sid].closed:
del self.sockets[sid]
elif method == 'POST':
if sid is None or sid not in self.sockets:
self._log_error_once(
'Invalid session ' + (sid or 'None'), 'bad-sid')
r = self._bad_request('Invalid session')
else:
socket = self._get_socket(sid)
try:
socket.handle_post_request(environ)
r = self._ok(jsonp_index=jsonp_index)
except exceptions.EngineIOError:
if sid in self.sockets: # pragma: no cover
self.disconnect(sid)
r = self._bad_request()
except: # pragma: no cover
# for any other unexpected errors, we log the error
# and keep going
self.logger.exception('post request handler error')
r = self._ok(jsonp_index=jsonp_index)
elif method == 'OPTIONS':
r = self._ok()
else:
self.logger.warning('Method %s not supported', method)
r = self._method_not_found()
if not isinstance(r, dict):
return r or []
if self.http_compression and \
len(r['response']) >= self.compression_threshold:
encodings = [e.split(';')[0].strip() for e in
environ.get('HTTP_ACCEPT_ENCODING', '').split(',')]
for encoding in encodings:
if encoding in self.compression_methods:
r['response'] = \
getattr(self, '_' + encoding)(r['response'])
r['headers'] += [('Content-Encoding', encoding)]
break
cors_headers = self._cors_headers(environ)
start_response(r['status'], r['headers'] + cors_headers)
return [r['response']]
def start_background_task(self, target, *args, **kwargs):
"""Start a background task using the appropriate async model.
This is a utility function that applications can use to start a
background task using the method that is compatible with the
selected async mode.
:param target: the target function to execute.
:param args: arguments to pass to the function.
:param kwargs: keyword arguments to pass to the function.
This function returns an object that represents the background task,
on which the ``join()`` methond can be invoked to wait for the task to
complete.
"""
th = self._async['thread'](target=target, args=args, kwargs=kwargs)
th.start()
return th # pragma: no cover
def sleep(self, seconds=0):
"""Sleep for the requested amount of time using the appropriate async
model.
This is a utility function that applications can use to put a task to
sleep without having to worry about using the correct call for the
selected async mode.
"""
return self._async['sleep'](seconds)
def create_queue(self, *args, **kwargs):
"""Create a queue object using the appropriate async model.
This is a utility function that applications can use to create a queue
without having to worry about using the correct call for the selected
async mode.
"""
return self._async['queue'](*args, **kwargs)
def get_queue_empty_exception(self):
"""Return the queue empty exception for the appropriate async model.
This is a utility function that applications can use to work with a
queue without having to worry about using the correct call for the
selected async mode.
"""
return self._async['queue_empty']
def create_event(self, *args, **kwargs):
"""Create an event object using the appropriate async model.
This is a utility function that applications can use to create an
event without having to worry about using the correct call for the
selected async mode.
"""
return self._async['event'](*args, **kwargs)
def generate_id(self):
"""Generate a unique session id."""
id = base64.b64encode(
secrets.token_bytes(12) + self.sequence_number.to_bytes(3, 'big'))
self.sequence_number = (self.sequence_number + 1) & 0xffffff
return id.decode('utf-8').replace('/', '_').replace('+', '-')
def _generate_sid_cookie(self, sid, attributes):
"""Generate the sid cookie."""
cookie = attributes.get('name', 'io') + '=' + sid
for attribute, value in attributes.items():
if attribute == 'name':
continue
if callable(value):
value = value()
if value is True:
cookie += '; ' + attribute
else:
cookie += '; ' + attribute + '=' + value
return cookie
def _handle_connect(self, environ, start_response, transport,
jsonp_index=None):
"""Handle a client connection request."""
if self.start_service_task:
# start the service task to monitor connected clients
self.start_service_task = False
self.start_background_task(self._service_task)
sid = self.generate_id()
s = socket.Socket(self, sid)
self.sockets[sid] = s
pkt = packet.Packet(packet.OPEN, {
'sid': sid,
'upgrades': self._upgrades(sid, transport),
'pingTimeout': int(self.ping_timeout * 1000),
'pingInterval': int(
self.ping_interval + self.ping_interval_grace_period) * 1000})
s.send(pkt)
s.schedule_ping()
# NOTE: some sections below are marked as "no cover" to workaround
# what seems to be a bug in the coverage package. All the lines below
# are covered by tests, but some are not reported as such for some
# reason
ret = self._trigger_event('connect', sid, environ, run_async=False)
if ret is not None and ret is not True: # pragma: no cover
del self.sockets[sid]
self.logger.warning('Application rejected connection')
return self._unauthorized(ret or None)
if transport == 'websocket': # pragma: no cover
ret = s.handle_get_request(environ, start_response)
if s.closed and sid in self.sockets:
# websocket connection ended, so we are done
del self.sockets[sid]
return ret
else: # pragma: no cover
s.connected = True
headers = None
if self.cookie:
if isinstance(self.cookie, dict):
headers = [(
'Set-Cookie',
self._generate_sid_cookie(sid, self.cookie)
)]
else:
headers = [(
'Set-Cookie',
self._generate_sid_cookie(sid, {
'name': self.cookie, 'path': '/', 'SameSite': 'Lax'
})
)]
try:
return self._ok(s.poll(), headers=headers,
jsonp_index=jsonp_index)
except exceptions.QueueEmpty:
return self._bad_request()
def _upgrades(self, sid, transport):
"""Return the list of possible upgrades for a client connection."""
if not self.allow_upgrades or self._get_socket(sid).upgraded or \
transport == 'websocket':
return []
if self._async['websocket'] is None: # pragma: no cover
self._log_error_once(
'The WebSocket transport is not available, you must install a '
'WebSocket server that is compatible with your async mode to '
'enable it. See the documentation for details.',
'no-websocket')
return []
return ['websocket']
def _trigger_event(self, event, *args, **kwargs):
"""Invoke an event handler."""
run_async = kwargs.pop('run_async', False)
if event in self.handlers:
if run_async:
return self.start_background_task(self.handlers[event], *args)
else:
try:
return self.handlers[event](*args)
except:
self.logger.exception(event + ' handler error')
if event == 'connect':
# if connect handler raised error we reject the
# connection
return False
def _get_socket(self, sid):
"""Return the socket object for a given session."""
try:
s = self.sockets[sid]
except KeyError:
raise KeyError('Session not found')
if s.closed:
del self.sockets[sid]
raise KeyError('Session is disconnected')
return s
def _ok(self, packets=None, headers=None, jsonp_index=None):
"""Generate a successful HTTP response."""
if packets is not None:
if headers is None:
headers = []
headers += [('Content-Type', 'text/plain; charset=UTF-8')]
return {'status': '200 OK',
'headers': headers,
'response': payload.Payload(packets=packets).encode(
jsonp_index=jsonp_index).encode('utf-8')}
else:
return {'status': '200 OK',
'headers': [('Content-Type', 'text/plain')],
'response': b'OK'}
def _bad_request(self, message=None):
"""Generate a bad request HTTP error response."""
if message is None:
message = 'Bad Request'
message = packet.Packet.json.dumps(message)
return {'status': '400 BAD REQUEST',
'headers': [('Content-Type', 'text/plain')],
'response': message.encode('utf-8')}
def _method_not_found(self):
"""Generate a method not found HTTP error response."""
return {'status': '405 METHOD NOT FOUND',
'headers': [('Content-Type', 'text/plain')],
'response': b'Method Not Found'}
def _unauthorized(self, message=None):
"""Generate a unauthorized HTTP error response."""
if message is None:
message = 'Unauthorized'
message = packet.Packet.json.dumps(message)
return {'status': '401 UNAUTHORIZED',
'headers': [('Content-Type', 'application/json')],
'response': message.encode('utf-8')}
def _cors_allowed_origins(self, environ):
default_origins = []
if 'wsgi.url_scheme' in environ and 'HTTP_HOST' in environ:
default_origins.append('{scheme}://{host}'.format(
scheme=environ['wsgi.url_scheme'], host=environ['HTTP_HOST']))
if 'HTTP_X_FORWARDED_PROTO' in environ or \
'HTTP_X_FORWARDED_HOST' in environ:
scheme = environ.get(
'HTTP_X_FORWARDED_PROTO',
environ['wsgi.url_scheme']).split(',')[0].strip()
default_origins.append('{scheme}://{host}'.format(
scheme=scheme, host=environ.get(
'HTTP_X_FORWARDED_HOST', environ['HTTP_HOST']).split(
',')[0].strip()))
if self.cors_allowed_origins is None:
allowed_origins = default_origins
elif self.cors_allowed_origins == '*':
allowed_origins = None
elif isinstance(self.cors_allowed_origins, str):
allowed_origins = [self.cors_allowed_origins]
elif callable(self.cors_allowed_origins):
origin = environ.get('HTTP_ORIGIN')
allowed_origins = [origin] \
if self.cors_allowed_origins(origin) else []
else:
allowed_origins = self.cors_allowed_origins
return allowed_origins
def _cors_headers(self, environ):
"""Return the cross-origin-resource-sharing headers."""
if self.cors_allowed_origins == []:
# special case, CORS handling is completely disabled
return []
headers = []
allowed_origins = self._cors_allowed_origins(environ)
if 'HTTP_ORIGIN' in environ and \
(allowed_origins is None or environ['HTTP_ORIGIN'] in
allowed_origins):
headers = [('Access-Control-Allow-Origin', environ['HTTP_ORIGIN'])]
if environ['REQUEST_METHOD'] == 'OPTIONS':
headers += [('Access-Control-Allow-Methods', 'OPTIONS, GET, POST')]
if 'HTTP_ACCESS_CONTROL_REQUEST_HEADERS' in environ:
headers += [('Access-Control-Allow-Headers',
environ['HTTP_ACCESS_CONTROL_REQUEST_HEADERS'])]
if self.cors_credentials:
headers += [('Access-Control-Allow-Credentials', 'true')]
return headers
def _gzip(self, response):
"""Apply gzip compression to a response."""
bytesio = io.BytesIO()
with gzip.GzipFile(fileobj=bytesio, mode='w') as gz:
gz.write(response)
return bytesio.getvalue()
def _deflate(self, response):
"""Apply deflate compression to a response."""
return zlib.compress(response)
def _log_error_once(self, message, message_key):
"""Log message with logging.ERROR level the first time, then log
with given level."""
if message_key not in self.log_message_keys:
self.logger.error(message + ' (further occurrences of this error '
'will be logged with level INFO)')
self.log_message_keys.add(message_key)
else:
self.logger.info(message)
def _service_task(self): # pragma: no cover
"""Monitor connected clients and clean up those that time out."""
while True:
if len(self.sockets) == 0:
# nothing to do
self.sleep(self.ping_timeout)
continue
# go through the entire client list in a ping interval cycle
sleep_interval = float(self.ping_timeout) / len(self.sockets)
try:
# iterate over the current clients
for s in self.sockets.copy().values():
if not s.closing and not s.closed:
s.check_ping_timeout()
self.sleep(sleep_interval)
except (SystemExit, KeyboardInterrupt):
self.logger.info('service task canceled')
break
except:
# an unexpected exception has occurred, log it and continue
self.logger.exception('service task exception')

View file

@ -0,0 +1,261 @@
import sys
import time
from . import exceptions
from . import packet
from . import payload
class Socket(object):
"""An Engine.IO socket."""
upgrade_protocols = ['websocket']
def __init__(self, server, sid):
self.server = server
self.sid = sid
self.queue = self.server.create_queue()
self.last_ping = None
self.connected = False
self.upgrading = False
self.upgraded = False
self.closing = False
self.closed = False
self.session = {}
def poll(self):
"""Wait for packets to send to the client."""
queue_empty = self.server.get_queue_empty_exception()
try:
packets = [self.queue.get(
timeout=self.server.ping_interval + self.server.ping_timeout)]
self.queue.task_done()
except queue_empty:
raise exceptions.QueueEmpty()
if packets == [None]:
return []
while True:
try:
pkt = self.queue.get(block=False)
self.queue.task_done()
if pkt is None:
self.queue.put(None)
break
packets.append(pkt)
except queue_empty:
break
return packets
def receive(self, pkt):
"""Receive packet from the client."""
packet_name = packet.packet_names[pkt.packet_type] \
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
self.server.logger.info('%s: Received packet %s data %s',
self.sid, packet_name,
pkt.data if not isinstance(pkt.data, bytes)
else '<binary>')
if pkt.packet_type == packet.PONG:
self.schedule_ping()
elif pkt.packet_type == packet.MESSAGE:
self.server._trigger_event('message', self.sid, pkt.data,
run_async=self.server.async_handlers)
elif pkt.packet_type == packet.UPGRADE:
self.send(packet.Packet(packet.NOOP))
elif pkt.packet_type == packet.CLOSE:
self.close(wait=False, abort=True)
else:
raise exceptions.UnknownPacketError()
def check_ping_timeout(self):
"""Make sure the client is still responding to pings."""
if self.closed:
raise exceptions.SocketIsClosedError()
if self.last_ping and \
time.time() - self.last_ping > self.server.ping_timeout:
self.server.logger.info('%s: Client is gone, closing socket',
self.sid)
# Passing abort=False here will cause close() to write a
# CLOSE packet. This has the effect of updating half-open sockets
# to their correct state of disconnected
self.close(wait=False, abort=False)
return False
return True
def send(self, pkt):
"""Send a packet to the client."""
if not self.check_ping_timeout():
return
else:
self.queue.put(pkt)
self.server.logger.info('%s: Sending packet %s data %s',
self.sid, packet.packet_names[pkt.packet_type],
pkt.data if not isinstance(pkt.data, bytes)
else '<binary>')
def handle_get_request(self, environ, start_response):
"""Handle a long-polling GET request from the client."""
connections = [
s.strip()
for s in environ.get('HTTP_CONNECTION', '').lower().split(',')]
transport = environ.get('HTTP_UPGRADE', '').lower()
if 'upgrade' in connections and transport in self.upgrade_protocols:
self.server.logger.info('%s: Received request to upgrade to %s',
self.sid, transport)
return getattr(self, '_upgrade_' + transport)(environ,
start_response)
if self.upgrading or self.upgraded:
# we are upgrading to WebSocket, do not return any more packets
# through the polling endpoint
return [packet.Packet(packet.NOOP)]
try:
packets = self.poll()
except exceptions.QueueEmpty:
exc = sys.exc_info()
self.close(wait=False)
raise exc[1].with_traceback(exc[2])
return packets
def handle_post_request(self, environ):
"""Handle a long-polling POST request from the client."""
length = int(environ.get('CONTENT_LENGTH', '0'))
if length > self.server.max_http_buffer_size:
raise exceptions.ContentTooLongError()
else:
body = environ['wsgi.input'].read(length).decode('utf-8')
p = payload.Payload(encoded_payload=body)
for pkt in p.packets:
self.receive(pkt)
def close(self, wait=True, abort=False):
"""Close the socket connection."""
if not self.closed and not self.closing:
self.closing = True
self.server._trigger_event('disconnect', self.sid, run_async=False)
if not abort:
self.send(packet.Packet(packet.CLOSE))
self.closed = True
self.queue.put(None)
if wait:
self.queue.join()
def schedule_ping(self):
def send_ping():
self.last_ping = None
self.server.sleep(self.server.ping_interval)
if not self.closing and not self.closed:
self.last_ping = time.time()
self.send(packet.Packet(packet.PING))
self.server.start_background_task(send_ping)
def _upgrade_websocket(self, environ, start_response):
"""Upgrade the connection from polling to websocket."""
if self.upgraded:
raise IOError('Socket has been upgraded already')
if self.server._async['websocket'] is None:
# the selected async mode does not support websocket
return self.server._bad_request()
ws = self.server._async['websocket'](
self._websocket_handler, self.server)
return ws(environ, start_response)
def _websocket_handler(self, ws):
"""Engine.IO handler for websocket transport."""
def websocket_wait():
data = ws.wait()
if data and len(data) > self.server.max_http_buffer_size:
raise ValueError('packet is too large')
return data
# try to set a socket timeout matching the configured ping interval
# and timeout
for attr in ['_sock', 'socket']: # pragma: no cover
if hasattr(ws, attr) and hasattr(getattr(ws, attr), 'settimeout'):
getattr(ws, attr).settimeout(
self.server.ping_interval + self.server.ping_timeout)
if self.connected:
# the socket was already connected, so this is an upgrade
self.upgrading = True # hold packet sends during the upgrade
pkt = websocket_wait()
decoded_pkt = packet.Packet(encoded_packet=pkt)
if decoded_pkt.packet_type != packet.PING or \
decoded_pkt.data != 'probe':
self.server.logger.info(
'%s: Failed websocket upgrade, no PING packet', self.sid)
self.upgrading = False
return []
ws.send(packet.Packet(packet.PONG, data='probe').encode())
self.queue.put(packet.Packet(packet.NOOP)) # end poll
pkt = websocket_wait()
decoded_pkt = packet.Packet(encoded_packet=pkt)
if decoded_pkt.packet_type != packet.UPGRADE:
self.upgraded = False
self.server.logger.info(
('%s: Failed websocket upgrade, expected UPGRADE packet, '
'received %s instead.'),
self.sid, pkt)
self.upgrading = False
return []
self.upgraded = True
self.upgrading = False
else:
self.connected = True
self.upgraded = True
# start separate writer thread
def writer():
while True:
packets = None
try:
packets = self.poll()
except exceptions.QueueEmpty:
break
if not packets:
# empty packet list returned -> connection closed
break
try:
for pkt in packets:
ws.send(pkt.encode())
except:
break
writer_task = self.server.start_background_task(writer)
self.server.logger.info(
'%s: Upgrade to websocket successful', self.sid)
while True:
p = None
try:
p = websocket_wait()
except Exception as e:
# if the socket is already closed, we can assume this is a
# downstream error of that
if not self.closed: # pragma: no cover
self.server.logger.info(
'%s: Unexpected error "%s", closing connection',
self.sid, str(e))
break
if p is None:
# connection closed by client
break
pkt = packet.Packet(encoded_packet=p)
try:
self.receive(pkt)
except exceptions.UnknownPacketError: # pragma: no cover
pass
except exceptions.SocketIsClosedError: # pragma: no cover
self.server.logger.info('Receive error -- socket is closed')
break
except: # pragma: no cover
# if we get an unexpected exception we log the error and exit
# the connection properly
self.server.logger.exception('Unknown receive error')
break
self.queue.put(None) # unlock the writer task so that it can exit
writer_task.join()
self.close(wait=False, abort=True)
return []

View file

@ -0,0 +1,60 @@
content_types = {
'css': 'text/css',
'gif': 'image/gif',
'html': 'text/html',
'jpg': 'image/jpeg',
'js': 'application/javascript',
'json': 'application/json',
'png': 'image/png',
'txt': 'text/plain',
}
def get_static_file(path, static_files):
"""Return the local filename and content type for the requested static
file URL.
:param path: the path portion of the requested URL.
:param static_files: a static file configuration dictionary.
This function returns a dictionary with two keys, "filename" and
"content_type". If the requested URL does not match any static file, the
return value is None.
"""
extra_path = ''
if path in static_files:
f = static_files[path]
else:
f = None
while path != '':
path, last = path.rsplit('/', 1)
extra_path = '/' + last + extra_path
if path in static_files:
f = static_files[path]
break
elif path + '/' in static_files:
f = static_files[path + '/']
break
if f:
if isinstance(f, str):
f = {'filename': f}
else:
f = f.copy() # in case it is mutated below
if f['filename'].endswith('/') and extra_path.startswith('/'):
extra_path = extra_path[1:]
f['filename'] += extra_path
if f['filename'].endswith('/'):
if '' in static_files:
if isinstance(static_files[''], str):
f['filename'] += static_files['']
else:
f['filename'] += static_files['']['filename']
if 'content_type' in static_files['']:
f['content_type'] = static_files['']['content_type']
else:
f['filename'] += 'index.html'
if 'content_type' not in f:
ext = f['filename'].rsplit('.')[-1]
f['content_type'] = content_types.get(
ext, 'application/octet-stream')
return f