summaryrefslogtreecommitdiffstats
path: root/module/lib
diff options
context:
space:
mode:
authorGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2011-02-02 21:46:15 +0100
committerGravatar RaNaN <Mast3rRaNaN@hotmail.de> 2011-02-02 21:46:15 +0100
commit7a503302fbe6fcc23af94de2fe313298c3a6d95c (patch)
tree724a8581008d3d78fed0be965161e330eb4b1064 /module/lib
parentcloses #226, #227 (diff)
downloadpyload-7a503302fbe6fcc23af94de2fe313298c3a6d95c.tar.xz
removed django => now using bottle, new builtin threaded ssl server
Dont forget to install jinja and beaker !
Diffstat (limited to 'module/lib')
-rw-r--r--module/lib/bottle.py1934
-rw-r--r--module/lib/wsgiserver/LICENSE.txt25
-rw-r--r--module/lib/wsgiserver/__init__.py1794
3 files changed, 3753 insertions, 0 deletions
diff --git a/module/lib/bottle.py b/module/lib/bottle.py
new file mode 100644
index 000000000..8f2be9e81
--- /dev/null
+++ b/module/lib/bottle.py
@@ -0,0 +1,1934 @@
+# -*- coding: utf-8 -*-
+"""
+Bottle is a fast and simple micro-framework for small web applications. It
+offers request dispatching (Routes) with url parameter support, templates,
+a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
+template engines - all in a single file and with no dependencies other than the
+Python Standard Library.
+
+Homepage and documentation: http://bottle.paws.de/
+
+Licence (MIT)
+-------------
+
+ Copyright (c) 2009, Marcel Hellkamp.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
+
+
+Example
+-------
+
+This is an example::
+
+ from bottle import route, run, request, response, static_file, abort
+
+ @route('/')
+ def hello_world():
+ return 'Hello World!'
+
+ @route('/hello/:name')
+ def hello_name(name):
+ return 'Hello %s!' % name
+
+ @route('/hello', method='POST')
+ def hello_post():
+ name = request.POST['name']
+ return 'Hello %s!' % name
+
+ @route('/static/:filename#.*#')
+ def static(filename):
+ return static_file(filename, root='/path/to/static/files/')
+
+ run(host='localhost', port=8080)
+"""
+
+from __future__ import with_statement
+
+__author__ = 'Marcel Hellkamp'
+__version__ = '0.8.5'
+__license__ = 'MIT'
+
+import base64
+import cgi
+import email.utils
+import functools
+import hmac
+import inspect
+import itertools
+import mimetypes
+import os
+import re
+import subprocess
+import sys
+import thread
+import threading
+import time
+import tokenize
+import tempfile
+
+from Cookie import SimpleCookie
+from tempfile import TemporaryFile
+from traceback import format_exc
+from urllib import quote as urlquote
+from urlparse import urlunsplit, urljoin
+
+try:
+ from collections import MutableMapping as DictMixin
+except ImportError: # pragma: no cover
+ from UserDict import DictMixin
+
+try:
+ from urlparse import parse_qs
+except ImportError: # pragma: no cover
+ from cgi import parse_qs
+
+try:
+ import cPickle as pickle
+except ImportError: # pragma: no cover
+ import pickle
+
+try:
+ try:
+ from json import dumps as json_dumps
+ except ImportError: # pragma: no cover
+ from simplejson import dumps as json_dumps
+except ImportError: # pragma: no cover
+ json_dumps = None
+
+if sys.version_info >= (3,0,0): # pragma: no cover
+ # See Request.POST
+ from io import BytesIO
+ from io import TextIOWrapper
+ class NCTextIOWrapper(TextIOWrapper):
+ ''' Garbage collecting an io.TextIOWrapper(buffer) instance closes the
+ wrapped buffer. This subclass keeps it open. '''
+ def close(self): pass
+ StringType = bytes
+ def touni(x, enc='utf8'): # Convert anything to unicode (py3)
+ return str(x, encoding=enc) if isinstance(x, bytes) else str(x)
+else:
+ from StringIO import StringIO as BytesIO
+ from types import StringType
+ NCTextIOWrapper = None
+ def touni(x, enc='utf8'): # Convert anything to unicode (py2)
+ return x if isinstance(x, unicode) else unicode(str(x), encoding=enc)
+
+def tob(data, enc='utf8'): # Convert strings to bytes (py2 and py3)
+ return data.encode(enc) if isinstance(data, unicode) else data
+
+# Background compatibility
+import warnings
+def depr(message, critical=False):
+ if critical: raise DeprecationWarning(message)
+ warnings.warn(message, DeprecationWarning, stacklevel=3)
+
+
+
+
+
+
+# Exceptions and Events
+
+class BottleException(Exception):
+ """ A base class for exceptions used by bottle. """
+ pass
+
+
+class HTTPResponse(BottleException):
+ """ Used to break execution and immediately finish the response """
+ def __init__(self, output='', status=200, header=None):
+ super(BottleException, self).__init__("HTTP Response %d" % status)
+ self.status = int(status)
+ self.output = output
+ self.headers = HeaderDict(header) if header else None
+
+ def apply(self, response):
+ if self.headers:
+ for key, value in self.headers.iterallitems():
+ response.headers[key] = value
+ response.status = self.status
+
+
+class HTTPError(HTTPResponse):
+ """ Used to generate an error page """
+ def __init__(self, code=500, output='Unknown Error', exception=None, traceback=None, header=None):
+ super(HTTPError, self).__init__(output, code, header)
+ self.exception = exception
+ self.traceback = traceback
+
+ def __repr__(self):
+ return ''.join(ERROR_PAGE_TEMPLATE.render(e=self))
+
+
+
+
+
+
+# Routing
+
+class RouteError(BottleException):
+ """ This is a base class for all routing related exceptions """
+
+
+class RouteSyntaxError(RouteError):
+ """ The route parser found something not supported by this router """
+
+
+class RouteBuildError(RouteError):
+ """ The route could not been build """
+
+
+class Route(object):
+ ''' Represents a single route and can parse the dynamic route syntax '''
+ syntax = re.compile(r'(.*?)(?<!\\):([a-zA-Z_]+)?(?:#(.*?)#)?')
+ default = '[^/]+'
+
+ def __init__(self, route, target=None, name=None, static=False):
+ """ Create a Route. The route string may contain `:key`,
+ `:key#regexp#` or `:#regexp#` tokens for each dynamic part of the
+ route. These can be escaped with a backslash infront of the `:`
+ and are compleately ignored if static is true. A name may be used
+ to refer to this route later (depends on Router)
+ """
+ self.route = route
+ self.target = target
+ self.name = name
+ if static:
+ self.route = self.route.replace(':','\\:')
+ self._tokens = None
+
+ def tokens(self):
+ """ Return a list of (type, value) tokens. """
+ if not self._tokens:
+ self._tokens = list(self.tokenise(self.route))
+ return self._tokens
+
+ @classmethod
+ def tokenise(cls, route):
+ ''' Split a string into an iterator of (type, value) tokens. '''
+ match = None
+ for match in cls.syntax.finditer(route):
+ pre, name, rex = match.groups()
+ if pre: yield ('TXT', pre.replace('\\:',':'))
+ if rex and name: yield ('VAR', (rex, name))
+ elif name: yield ('VAR', (cls.default, name))
+ elif rex: yield ('ANON', rex)
+ if not match:
+ yield ('TXT', route.replace('\\:',':'))
+ elif match.end() < len(route):
+ yield ('TXT', route[match.end():].replace('\\:',':'))
+
+ def group_re(self):
+ ''' Return a regexp pattern with named groups '''
+ out = ''
+ for token, data in self.tokens():
+ if token == 'TXT': out += re.escape(data)
+ elif token == 'VAR': out += '(?P<%s>%s)' % (data[1], data[0])
+ elif token == 'ANON': out += '(?:%s)' % data
+ return out
+
+ def flat_re(self):
+ ''' Return a regexp pattern with non-grouping parentheses '''
+ rf = lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:'
+ return re.sub(r'(\\*)(\(\?P<[^>]*>|\((?!\?))', rf, self.group_re())
+
+ def format_str(self):
+ ''' Return a format string with named fields. '''
+ out, i = '', 0
+ for token, value in self.tokens():
+ if token == 'TXT': out += value.replace('%','%%')
+ elif token == 'ANON': out += '%%(anon%d)s' % i; i+=1
+ elif token == 'VAR': out += '%%(%s)s' % value[1]
+ return out
+
+ @property
+ def static(self):
+ return not self.is_dynamic()
+
+ def is_dynamic(self):
+ ''' Return true if the route contains dynamic parts '''
+ for token, value in self.tokens():
+ if token != 'TXT':
+ return True
+ return False
+
+ def __repr__(self):
+ return "<Route(%s) />" % repr(self.route)
+
+ def __eq__(self, other):
+ return self.route == other.route
+
+class Router(object):
+ ''' A route associates a string (e.g. URL) with an object (e.g. function)
+ Some dynamic routes may extract parts of the string and provide them as
+ a dictionary. This router matches a string against multiple routes and
+ returns the associated object along with the extracted data.
+ '''
+
+ def __init__(self):
+ self.routes = [] # List of all installed routes
+ self.named = {} # Cache for named routes and their format strings
+ self.static = {} # Cache for static routes
+ self.dynamic = [] # Search structure for dynamic routes
+
+ def add(self, route, target=None, **ka):
+ """ Add a route->target pair or a :class:`Route` object to the Router.
+ Return the Route object. See :class:`Route` for details.
+ """
+ if not isinstance(route, Route):
+ route = Route(route, target, **ka)
+ if self.get_route(route):
+ return RouteError('Route %s is not uniqe.' % route)
+ self.routes.append(route)
+ return route
+
+ def get_route(self, route, target=None, **ka):
+ ''' Get a route from the router by specifying either the same
+ parameters as in :meth:`add` or comparing to an instance of
+ :class:`Route`. Note that not all parameters are considered by the
+ compare function. '''
+ if not isinstance(route, Route):
+ route = Route(route, **ka)
+ for known in self.routes:
+ if route == known:
+ return known
+ return None
+
+ def match(self, uri):
+ ''' Match an URI and return a (target, urlargs) tuple '''
+ if uri in self.static:
+ return self.static[uri], {}
+ for combined, subroutes in self.dynamic:
+ match = combined.match(uri)
+ if not match: continue
+ target, args_re = subroutes[match.lastindex - 1]
+ args = args_re.match(uri).groupdict() if args_re else {}
+ return target, args
+ return None, {}
+
+ def build(self, _name, **args):
+ ''' Build an URI out of a named route and values for te wildcards. '''
+ try:
+ return self.named[_name] % args
+ except KeyError:
+ raise RouteBuildError("No route found with name '%s'." % _name)
+
+ def compile(self):
+ ''' Build the search structures. Call this before actually using the
+ router.'''
+ self.named = {}
+ self.static = {}
+ self.dynamic = []
+ for route in self.routes:
+ if route.name:
+ self.named[route.name] = route.format_str()
+ if route.static:
+ self.static[route.route] = route.target
+ continue
+ gpatt = route.group_re()
+ fpatt = route.flat_re()
+ try:
+ gregexp = re.compile('^(%s)$' % gpatt) if '(?P' in gpatt else None
+ combined = '%s|(^%s$)' % (self.dynamic[-1][0].pattern, fpatt)
+ self.dynamic[-1] = (re.compile(combined), self.dynamic[-1][1])
+ self.dynamic[-1][1].append((route.target, gregexp))
+ except (AssertionError, IndexError), e: # AssertionError: Too many groups
+ self.dynamic.append((re.compile('(^%s$)'%fpatt),[(route.target, gregexp)]))
+ except re.error, e:
+ raise RouteSyntaxError("Could not add Route: %s (%s)" % (route, e))
+
+ def __eq__(self, other):
+ return self.routes == other.routes
+
+
+
+
+
+# WSGI abstraction: Application, Request and Response objects
+
+class Bottle(object):
+ """ WSGI application """
+
+ def __init__(self, catchall=True, autojson=True, config=None):
+ """ Create a new bottle instance.
+ You usually don't do that. Use `bottle.app.push()` instead.
+ """
+ self.routes = Router()
+ self.mounts = {}
+ self.error_handler = {}
+ self.catchall = catchall
+ self.config = config or {}
+ self.serve = True
+ self.castfilter = []
+ if autojson and json_dumps:
+ self.add_filter(dict, dict2json)
+
+ def optimize(self, *a, **ka):
+ depr("Bottle.optimize() is obsolete.")
+
+ def mount(self, app, script_path):
+ ''' Mount a Bottle application to a specific URL prefix '''
+ if not isinstance(app, Bottle):
+ raise TypeError('Only Bottle instances are supported for now.')
+ script_path = '/'.join(filter(None, script_path.split('/')))
+ path_depth = script_path.count('/') + 1
+ if not script_path:
+ raise TypeError('Empty script_path. Perhaps you want a merge()?')
+ for other in self.mounts:
+ if other.startswith(script_path):
+ raise TypeError('Conflict with existing mount: %s' % other)
+ @self.route('/%s/:#.*#' % script_path, method="ANY")
+ def mountpoint():
+ request.path_shift(path_depth)
+ return app.handle(request.path, request.method)
+ self.mounts[script_path] = app
+
+ def add_filter(self, ftype, func):
+ ''' Register a new output filter. Whenever bottle hits a handler output
+ matching `ftype`, `func` is applied to it. '''
+ if not isinstance(ftype, type):
+ raise TypeError("Expected type object, got %s" % type(ftype))
+ self.castfilter = [(t, f) for (t, f) in self.castfilter if t != ftype]
+ self.castfilter.append((ftype, func))
+ self.castfilter.sort()
+
+ def match_url(self, path, method='GET'):
+ """ Find a callback bound to a path and a specific HTTP method.
+ Return (callback, param) tuple or raise HTTPError.
+ method: HEAD falls back to GET. All methods fall back to ANY.
+ """
+ path, method = path.strip().lstrip('/'), method.upper()
+ callbacks, args = self.routes.match(path)
+ if not callbacks:
+ raise HTTPError(404, "Not found: " + path)
+ if method in callbacks:
+ return callbacks[method], args
+ if method == 'HEAD' and 'GET' in callbacks:
+ return callbacks['GET'], args
+ if 'ANY' in callbacks:
+ return callbacks['ANY'], args
+ allow = [m for m in callbacks if m != 'ANY']
+ if 'GET' in allow and 'HEAD' not in allow:
+ allow.append('HEAD')
+ raise HTTPError(405, "Method not allowed.",
+ header=[('Allow',",".join(allow))])
+
+ def get_url(self, routename, **kargs):
+ """ Return a string that matches a named route """
+ scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
+ location = self.routes.build(routename, **kargs).lstrip('/')
+ return urljoin(urljoin('/', scriptname), location)
+
+ def route(self, path=None, method='GET', **kargs):
+ """ Decorator: bind a function to a GET request path.
+
+ If the path parameter is None, the signature of the decorated
+ function is used to generate the paths. See yieldroutes()
+ for details.
+
+ The method parameter (default: GET) specifies the HTTP request
+ method to listen to. You can specify a list of methods too.
+ """
+ def wrapper(callback):
+ routes = [path] if path else yieldroutes(callback)
+ methods = method.split(';') if isinstance(method, str) else method
+ for r in routes:
+ for m in methods:
+ r, m = r.strip().lstrip('/'), m.strip().upper()
+ old = self.routes.get_route(r, **kargs)
+ if old:
+ old.target[m] = callback
+ else:
+ self.routes.add(r, {m: callback}, **kargs)
+ self.routes.compile()
+ return callback
+ return wrapper
+
+ def get(self, path=None, method='GET', **kargs):
+ """ Decorator: Bind a function to a GET request path.
+ See :meth:'route' for details. """
+ return self.route(path, method, **kargs)
+
+ def post(self, path=None, method='POST', **kargs):
+ """ Decorator: Bind a function to a POST request path.
+ See :meth:'route' for details. """
+ return self.route(path, method, **kargs)
+
+ def put(self, path=None, method='PUT', **kargs):
+ """ Decorator: Bind a function to a PUT request path.
+ See :meth:'route' for details. """
+ return self.route(path, method, **kargs)
+
+ def delete(self, path=None, method='DELETE', **kargs):
+ """ Decorator: Bind a function to a DELETE request path.
+ See :meth:'route' for details. """
+ return self.route(path, method, **kargs)
+
+ def error(self, code=500):
+ """ Decorator: Registrer an output handler for a HTTP error code"""
+ def wrapper(handler):
+ self.error_handler[int(code)] = handler
+ return handler
+ return wrapper
+
+ def handle(self, url, method):
+ """ Execute the handler bound to the specified url and method and return
+ its output. If catchall is true, exceptions are catched and returned as
+ HTTPError(500) objects. """
+ if not self.serve:
+ return HTTPError(503, "Server stopped")
+ try:
+ handler, args = self.match_url(url, method)
+ return handler(**args)
+ except HTTPResponse, e:
+ return e
+ except Exception, e:
+ if isinstance(e, (KeyboardInterrupt, SystemExit, MemoryError))\
+ or not self.catchall:
+ raise
+ return HTTPError(500, 'Unhandled exception', e, format_exc(10))
+
+ def _cast(self, out, request, response, peek=None):
+ """ Try to convert the parameter into something WSGI compatible and set
+ correct HTTP headers when possible.
+ Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
+ iterable of strings and iterable of unicodes
+ """
+ # Filtered types (recursive, because they may return anything)
+ for testtype, filterfunc in self.castfilter:
+ if isinstance(out, testtype):
+ return self._cast(filterfunc(out), request, response)
+
+ # Empty output is done here
+ if not out:
+ response.headers['Content-Length'] = 0
+ return []
+ # Join lists of byte or unicode strings. Mixed lists are NOT supported
+ if isinstance(out, (tuple, list))\
+ and isinstance(out[0], (StringType, unicode)):
+ out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
+ # Encode unicode strings
+ if isinstance(out, unicode):
+ out = out.encode(response.charset)
+ # Byte Strings are just returned
+ if isinstance(out, StringType):
+ response.headers['Content-Length'] = str(len(out))
+ return [out]
+ # HTTPError or HTTPException (recursive, because they may wrap anything)
+ if isinstance(out, HTTPError):
+ out.apply(response)
+ return self._cast(self.error_handler.get(out.status, repr)(out), request, response)
+ if isinstance(out, HTTPResponse):
+ out.apply(response)
+ return self._cast(out.output, request, response)
+
+ # File-like objects.
+ if hasattr(out, 'read'):
+ if 'wsgi.file_wrapper' in request.environ:
+ return request.environ['wsgi.file_wrapper'](out)
+ elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
+ return WSGIFileWrapper(out)
+
+ # Handle Iterables. We peek into them to detect their inner type.
+ try:
+ out = iter(out)
+ first = out.next()
+ while not first:
+ first = out.next()
+ except StopIteration:
+ return self._cast('', request, response)
+ except HTTPResponse, e:
+ first = e
+ except Exception, e:
+ first = HTTPError(500, 'Unhandled exception', e, format_exc(10))
+ if isinstance(e, (KeyboardInterrupt, SystemExit, MemoryError))\
+ or not self.catchall:
+ raise
+ # These are the inner types allowed in iterator or generator objects.
+ if isinstance(first, HTTPResponse):
+ return self._cast(first, request, response)
+ if isinstance(first, StringType):
+ return itertools.chain([first], out)
+ if isinstance(first, unicode):
+ return itertools.imap(lambda x: x.encode(response.charset),
+ itertools.chain([first], out))
+ return self._cast(HTTPError(500, 'Unsupported response type: %s'\
+ % type(first)), request, response)
+
+ def __call__(self, environ, start_response):
+ """ The bottle WSGI-interface. """
+ try:
+ environ['bottle.app'] = self
+ request.bind(environ)
+ response.bind(self)
+ out = self.handle(request.path, request.method)
+ out = self._cast(out, request, response)
+ # rfc2616 section 4.3
+ if response.status in (100, 101, 204, 304) or request.method == 'HEAD':
+ out = []
+ status = '%d %s' % (response.status, HTTP_CODES[response.status])
+ start_response(status, response.headerlist)
+ return out
+ except (KeyboardInterrupt, SystemExit, MemoryError):
+ raise
+ except Exception, e:
+ if not self.catchall:
+ raise
+ err = '<h1>Critical error while processing request: %s</h1>' \
+ % environ.get('PATH_INFO', '/')
+ if DEBUG:
+ err += '<h2>Error:</h2>\n<pre>%s</pre>\n' % repr(e)
+ err += '<h2>Traceback:</h2>\n<pre>%s</pre>\n' % format_exc(10)
+ environ['wsgi.errors'].write(err) #TODO: wsgi.error should not get html
+ start_response('500 INTERNAL SERVER ERROR', [('Content-Type', 'text/html')])
+ return [tob(err)]
+
+
+class Request(threading.local, DictMixin):
+ """ Represents a single HTTP request using thread-local attributes.
+ The Request object wraps a WSGI environment and can be used as such.
+ """
+ def __init__(self, environ=None, config=None):
+ """ Create a new Request instance.
+
+ You usually don't do this but use the global `bottle.request`
+ instance instead.
+ """
+ self.bind(environ or {}, config)
+
+ def bind(self, environ, config=None):
+ """ Bind a new WSGI enviroment.
+
+ This is done automatically for the global `bottle.request`
+ instance on every request.
+ """
+ self.environ = environ
+ self.config = config or {}
+ # These attributes are used anyway, so it is ok to compute them here
+ self.path = '/' + environ.get('PATH_INFO', '/').lstrip('/')
+ self.method = environ.get('REQUEST_METHOD', 'GET').upper()
+
+ @property
+ def _environ(self):
+ depr("Request._environ renamed to Request.environ")
+ return self.environ
+
+ def copy(self):
+ ''' Returns a copy of self '''
+ return Request(self.environ.copy(), self.config)
+
+ def path_shift(self, shift=1):
+ ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
+
+ :param shift: The number of path fragments to shift. May be negative to
+ change the shift direction. (default: 1)
+ '''
+ script_name = self.environ.get('SCRIPT_NAME','/')
+ self['SCRIPT_NAME'], self.path = path_shift(script_name, self.path, shift)
+ self['PATH_INFO'] = self.path
+
+ def __getitem__(self, key): return self.environ[key]
+ def __delitem__(self, key): self[key] = ""; del(self.environ[key])
+ def __iter__(self): return iter(self.environ)
+ def __len__(self): return len(self.environ)
+ def keys(self): return self.environ.keys()
+ def __setitem__(self, key, value):
+ """ Shortcut for Request.environ.__setitem__ """
+ self.environ[key] = value
+ todelete = []
+ if key in ('PATH_INFO','REQUEST_METHOD'):
+ self.bind(self.environ, self.config)
+ elif key == 'wsgi.input': todelete = ('body','forms','files','params')
+ elif key == 'QUERY_STRING': todelete = ('get','params')
+ elif key.startswith('HTTP_'): todelete = ('headers', 'cookies')
+ for key in todelete:
+ if 'bottle.' + key in self.environ:
+ del self.environ['bottle.' + key]
+
+ @property
+ def query_string(self):
+ """ The content of the QUERY_STRING environment variable. """
+ return self.environ.get('QUERY_STRING', '')
+
+ @property
+ def fullpath(self):
+ """ Request path including SCRIPT_NAME (if present) """
+ return self.environ.get('SCRIPT_NAME', '').rstrip('/') + self.path
+
+ @property
+ def url(self):
+ """ Full URL as requested by the client (computed).
+
+ This value is constructed out of different environment variables
+ and includes scheme, host, port, scriptname, path and query string.
+ """
+ scheme = self.environ.get('wsgi.url_scheme', 'http')
+ host = self.environ.get('HTTP_X_FORWARDED_HOST', self.environ.get('HTTP_HOST', None))
+ if not host:
+ host = self.environ.get('SERVER_NAME')
+ port = self.environ.get('SERVER_PORT', '80')
+ if scheme + port not in ('https443', 'http80'):
+ host += ':' + port
+ parts = (scheme, host, urlquote(self.fullpath), self.query_string, '')
+ return urlunsplit(parts)
+
+ @property
+ def content_length(self):
+ """ Content-Length header as an integer, -1 if not specified """
+ return int(self.environ.get('CONTENT_LENGTH','') or -1)
+
+ @property
+ def header(self):
+ ''' :class:`HeaderDict` filled with request headers.
+
+ HeaderDict keys are case insensitive str.title()d
+ '''
+ if 'bottle.headers' not in self.environ:
+ header = self.environ['bottle.headers'] = HeaderDict()
+ for key, value in self.environ.iteritems():
+ if key.startswith('HTTP_'):
+ key = key[5:].replace('_','-').title()
+ header[key] = value
+ return self.environ['bottle.headers']
+
+ @property
+ def GET(self):
+ """ The QUERY_STRING parsed into a MultiDict.
+
+ Keys and values are strings. Multiple values per key are possible.
+ See MultiDict for details.
+ """
+ if 'bottle.get' not in self.environ:
+ data = parse_qs(self.query_string, keep_blank_values=True)
+ get = self.environ['bottle.get'] = MultiDict()
+ for key, values in data.iteritems():
+ for value in values:
+ get[key] = value
+ return self.environ['bottle.get']
+
+ @property
+ def POST(self):
+ """ Property: The HTTP POST body parsed into a MultiDict.
+
+ This supports urlencoded and multipart POST requests. Multipart
+ is commonly used for file uploads and may result in some of the
+ values being cgi.FieldStorage objects instead of strings.
+
+ Multiple values per key are possible. See MultiDict for details.
+ """
+ if 'bottle.post' not in self.environ:
+ self.environ['bottle.post'] = MultiDict()
+ self.environ['bottle.forms'] = MultiDict()
+ self.environ['bottle.files'] = MultiDict()
+ safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
+ for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
+ if key in self.environ: safe_env[key] = self.environ[key]
+ if NCTextIOWrapper:
+ fb = NCTextIOWrapper(self.body, encoding='ISO-8859-1', newline='\n')
+ # TODO: Content-Length may be wrong now. Does cgi.FieldStorage
+ # use it at all? I think not, because all tests pass.
+ else:
+ fb = self.body
+ data = cgi.FieldStorage(fp=fb, environ=safe_env, keep_blank_values=True)
+ for item in data.list or []:
+ if item.filename:
+ self.environ['bottle.post'][item.name] = item
+ self.environ['bottle.files'][item.name] = item
+ else:
+ self.environ['bottle.post'][item.name] = item.value
+ self.environ['bottle.forms'][item.name] = item.value
+ return self.environ['bottle.post']
+
+ @property
+ def forms(self):
+ """ Property: HTTP POST form data parsed into a MultiDict. """
+ if 'bottle.forms' not in self.environ: self.POST
+ return self.environ['bottle.forms']
+
+ @property
+ def files(self):
+ """ Property: HTTP POST file uploads parsed into a MultiDict. """
+ if 'bottle.files' not in self.environ: self.POST
+ return self.environ['bottle.files']
+
+ @property
+ def params(self):
+ """ A combined MultiDict with POST and GET parameters. """
+ if 'bottle.params' not in self.environ:
+ self.environ['bottle.params'] = MultiDict(self.GET)
+ self.environ['bottle.params'].update(dict(self.forms))
+ return self.environ['bottle.params']
+
+ @property
+ def body(self):
+ """ The HTTP request body as a seekable buffer object.
+
+ This property returns a copy of the `wsgi.input` stream and should
+ be used instead of `environ['wsgi.input']`.
+ """
+ if 'bottle.body' not in self.environ:
+ maxread = max(0, self.content_length)
+ stream = self.environ['wsgi.input']
+ body = BytesIO() if maxread < MEMFILE_MAX else TemporaryFile(mode='w+b')
+ while maxread > 0:
+ part = stream.read(min(maxread, MEMFILE_MAX))
+ if not part: #TODO: Wrong content_length. Error? Do nothing?
+ break
+ body.write(part)
+ maxread -= len(part)
+ self.environ['wsgi.input'] = body
+ self.environ['bottle.body'] = body
+ self.environ['bottle.body'].seek(0)
+ return self.environ['bottle.body']
+
+ @property
+ def auth(self): #TODO: Tests and docs. Add support for digest. namedtuple?
+ """ HTTP authorisation data as a (user, passwd) tuple. (experimental)
+
+ This implementation currently only supports basic auth and returns
+ None on errors.
+ """
+ return parse_auth(self.environ.get('HTTP_AUTHORIZATION',''))
+
+ @property
+ def COOKIES(self):
+ """ Cookie information parsed into a dictionary.
+
+ Secure cookies are NOT decoded automatically. See
+ Request.get_cookie() for details.
+ """
+ if 'bottle.cookies' not in self.environ:
+ raw_dict = SimpleCookie(self.environ.get('HTTP_COOKIE',''))
+ self.environ['bottle.cookies'] = {}
+ for cookie in raw_dict.itervalues():
+ self.environ['bottle.cookies'][cookie.key] = cookie.value
+ return self.environ['bottle.cookies']
+
+ def get_cookie(self, name, secret=None):
+ """ Return the (decoded) value of a cookie. """
+ value = self.COOKIES.get(name)
+ dec = cookie_decode(value, secret) if secret else None
+ return dec or value
+
+ @property
+ def is_ajax(self):
+ ''' True if the request was generated using XMLHttpRequest '''
+ #TODO: write tests
+ return self.header.get('X-Requested-With') == 'XMLHttpRequest'
+
+
+
+class Response(threading.local):
+ """ Represents a single HTTP response using thread-local attributes.
+ """
+
+ def __init__(self, config=None):
+ self.bind(config)
+
+ def bind(self, config=None):
+ """ Resets the Response object to its factory defaults. """
+ self._COOKIES = None
+ self.status = 200
+ self.headers = HeaderDict()
+ self.content_type = 'text/html; charset=UTF-8'
+ self.config = config or {}
+
+ @property
+ def header(self):
+ depr("Response.header renamed to Response.headers")
+ return self.headers
+
+ def copy(self):
+ ''' Returns a copy of self '''
+ copy = Response(self.config)
+ copy.status = self.status
+ copy.headers = self.headers.copy()
+ copy.content_type = self.content_type
+ return copy
+
+ def wsgiheader(self):
+ ''' Returns a wsgi conform list of header/value pairs. '''
+ for c in self.COOKIES.values():
+ if c.OutputString() not in self.headers.getall('Set-Cookie'):
+ self.headers.append('Set-Cookie', c.OutputString())
+ # rfc2616 section 10.2.3, 10.3.5
+ if self.status in (204, 304) and 'content-type' in self.headers:
+ del self.headers['content-type']
+ if self.status == 304:
+ for h in ('allow', 'content-encoding', 'content-language',
+ 'content-length', 'content-md5', 'content-range',
+ 'content-type', 'last-modified'): # + c-location, expires?
+ if h in self.headers:
+ del self.headers[h]
+ return list(self.headers.iterallitems())
+ headerlist = property(wsgiheader)
+
+ @property
+ def charset(self):
+ """ Return the charset specified in the content-type header.
+
+ This defaults to `UTF-8`.
+ """
+ if 'charset=' in self.content_type:
+ return self.content_type.split('charset=')[-1].split(';')[0].strip()
+ return 'UTF-8'
+
+ @property
+ def COOKIES(self):
+ """ A dict-like SimpleCookie instance. Use Response.set_cookie() instead. """
+ if not self._COOKIES:
+ self._COOKIES = SimpleCookie()
+ return self._COOKIES
+
+ def set_cookie(self, key, value, secret=None, **kargs):
+ """ Add a new cookie with various options.
+
+ If the cookie value is not a string, a secure cookie is created.
+
+ Possible options are:
+ expires, path, comment, domain, max_age, secure, version, httponly
+ See http://de.wikipedia.org/wiki/HTTP-Cookie#Aufbau for details
+ """
+ if not isinstance(value, basestring):
+ if not secret:
+ raise TypeError('Cookies must be strings when secret is not set')
+ value = cookie_encode(value, secret).decode('ascii') #2to3 hack
+ self.COOKIES[key] = value
+ for k, v in kargs.iteritems():
+ self.COOKIES[key][k.replace('_', '-')] = v
+
+ def get_content_type(self):
+ """ Current 'Content-Type' header. """
+ return self.headers['Content-Type']
+
+ def set_content_type(self, value):
+ self.headers['Content-Type'] = value
+
+ content_type = property(get_content_type, set_content_type, None,
+ get_content_type.__doc__)
+
+
+
+
+
+
+# Data Structures
+
+class MultiDict(DictMixin):
+ """ A dict that remembers old values for each key """
+ # collections.MutableMapping would be better for Python >= 2.6
+ def __init__(self, *a, **k):
+ self.dict = dict()
+ for k, v in dict(*a, **k).iteritems():
+ self[k] = v
+
+ def __len__(self): return len(self.dict)
+ def __iter__(self): return iter(self.dict)
+ def __contains__(self, key): return key in self.dict
+ def __delitem__(self, key): del self.dict[key]
+ def keys(self): return self.dict.keys()
+ def __getitem__(self, key): return self.get(key, KeyError, -1)
+ def __setitem__(self, key, value): self.append(key, value)
+
+ def append(self, key, value): self.dict.setdefault(key, []).append(value)
+ def replace(self, key, value): self.dict[key] = [value]
+ def getall(self, key): return self.dict.get(key) or []
+
+ def get(self, key, default=None, index=-1):
+ if key not in self.dict and default != KeyError:
+ return [default][index]
+ return self.dict[key][index]
+
+ def iterallitems(self):
+ for key, values in self.dict.iteritems():
+ for value in values:
+ yield key, value
+
+
+class HeaderDict(MultiDict):
+ """ Same as :class:`MultiDict`, but title()s the keys and overwrites by default. """
+ def __contains__(self, key): return MultiDict.__contains__(self, self.httpkey(key))
+ def __getitem__(self, key): return MultiDict.__getitem__(self, self.httpkey(key))
+ def __delitem__(self, key): return MultiDict.__delitem__(self, self.httpkey(key))
+ def __setitem__(self, key, value): self.replace(key, value)
+ def get(self, key, default=None, index=-1): return MultiDict.get(self, self.httpkey(key), default, index)
+ def append(self, key, value): return MultiDict.append(self, self.httpkey(key), str(value))
+ def replace(self, key, value): return MultiDict.replace(self, self.httpkey(key), str(value))
+ def getall(self, key): return MultiDict.getall(self, self.httpkey(key))
+ def httpkey(self, key): return str(key).replace('_','-').title()
+
+
+class AppStack(list):
+ """ A stack implementation. """
+
+ def __call__(self):
+ """ Return the current default app. """
+ return self[-1]
+
+ def push(self, value=None):
+ """ Add a new Bottle instance to the stack """
+ if not isinstance(value, Bottle):
+ value = Bottle()
+ self.append(value)
+ return value
+
+class WSGIFileWrapper(object):
+
+ def __init__(self, fp, buffer_size=1024*64):
+ self.fp, self.buffer_size = fp, buffer_size
+ for attr in ('fileno', 'close', 'read', 'readlines'):
+ if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
+
+ def __iter__(self):
+ read, buff = self.fp.read, self.buffer_size
+ while True:
+ part = read(buff)
+ if not part: break
+ yield part
+
+
+
+# Module level functions
+
+# Output filter
+
+def dict2json(d):
+ response.content_type = 'application/json'
+ return json_dumps(d)
+
+
+def abort(code=500, text='Unknown Error: Appliction stopped.'):
+ """ Aborts execution and causes a HTTP error. """
+ raise HTTPError(code, text)
+
+
+def redirect(url, code=303):
+ """ Aborts execution and causes a 303 redirect """
+ scriptname = request.environ.get('SCRIPT_NAME', '').rstrip('/') + '/'
+ location = urljoin(request.url, urljoin(scriptname, url))
+ raise HTTPResponse("", status=code, header=dict(Location=location))
+
+
+def send_file(*a, **k): #BC 0.6.4
+ """ Raises the output of static_file(). (deprecated) """
+ raise static_file(*a, **k)
+
+
+def static_file(filename, root, guessmime=True, mimetype=None, download=False):
+ """ Opens a file in a safe way and returns a HTTPError object with status
+ code 200, 305, 401 or 404. Sets Content-Type, Content-Length and
+ Last-Modified header. Obeys If-Modified-Since header and HEAD requests.
+ """
+ root = os.path.abspath(root) + os.sep
+ filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
+ header = dict()
+
+ if not filename.startswith(root):
+ return HTTPError(403, "Access denied.")
+ if not os.path.exists(filename) or not os.path.isfile(filename):
+ return HTTPError(404, "File does not exist.")
+ if not os.access(filename, os.R_OK):
+ return HTTPError(403, "You do not have permission to access this file.")
+
+ if not mimetype and guessmime:
+ header['Content-Type'] = mimetypes.guess_type(filename)[0]
+ else:
+ header['Content-Type'] = mimetype if mimetype else 'text/plain'
+
+ if download == True:
+ download = os.path.basename(filename)
+ if download:
+ header['Content-Disposition'] = 'attachment; filename="%s"' % download
+
+ stats = os.stat(filename)
+ lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
+ header['Last-Modified'] = lm
+ ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
+ if ims:
+ ims = ims.split(";")[0].strip() # IE sends "<date>; length=146"
+ ims = parse_date(ims)
+ if ims is not None and ims >= int(stats.st_mtime):
+ header['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
+ return HTTPResponse(status=304, header=header)
+ header['Content-Length'] = stats.st_size
+ if request.method == 'HEAD':
+ return HTTPResponse('', header=header)
+ else:
+ return HTTPResponse(open(filename, 'rb'), header=header)
+
+
+
+
+
+
+# Utilities
+
+def debug(mode=True):
+ """ Change the debug level.
+ There is only one debug level supported at the moment."""
+ global DEBUG
+ DEBUG = bool(mode)
+
+
+def parse_date(ims):
+ """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
+ try:
+ ts = email.utils.parsedate_tz(ims)
+ return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone
+ except (TypeError, ValueError, IndexError):
+ return None
+
+
+def parse_auth(header):
+ """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
+ try:
+ method, data = header.split(None, 1)
+ if method.lower() == 'basic':
+ name, pwd = base64.b64decode(data).split(':', 1)
+ return name, pwd
+ except (KeyError, ValueError, TypeError):
+ return None
+
+
+def _lscmp(a, b):
+ ''' Compares two strings in a cryptographically save way:
+ Runtime is not affected by a common prefix. '''
+ return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b)
+
+
+def cookie_encode(data, key):
+ ''' Encode and sign a pickle-able object. Return a string '''
+ msg = base64.b64encode(pickle.dumps(data, -1))
+ sig = base64.b64encode(hmac.new(key, msg).digest())
+ return tob('!') + sig + tob('?') + msg
+
+
+def cookie_decode(data, key):
+ ''' Verify and decode an encoded string. Return an object or None'''
+ data = tob(data)
+ if cookie_is_encoded(data):
+ sig, msg = data.split(tob('?'), 1)
+ if _lscmp(sig[1:], base64.b64encode(hmac.new(key, msg).digest())):
+ return pickle.loads(base64.b64decode(msg))
+ return None
+
+
+def cookie_is_encoded(data):
+ ''' Return True if the argument looks like a encoded cookie.'''
+ return bool(data.startswith(tob('!')) and tob('?') in data)
+
+
+def tonativefunc(enc='utf-8'):
+ ''' Returns a function that turns everything into 'native' strings using enc '''
+ if sys.version_info >= (3,0,0):
+ return lambda x: x.decode(enc) if isinstance(x, bytes) else str(x)
+ return lambda x: x.encode(enc) if isinstance(x, unicode) else str(x)
+
+
+def yieldroutes(func):
+ """ Return a generator for routes that match the signature (name, args)
+ of the func parameter. This may yield more than one route if the function
+ takes optional keyword arguments. The output is best described by example:
+ a() -> '/a'
+ b(x, y) -> '/b/:x/:y'
+ c(x, y=5) -> '/c/:x' and '/c/:x/:y'
+ d(x=5, y=6) -> '/d' and '/d/:x' and '/d/:x/:y'
+ """
+ path = func.__name__.replace('__','/').lstrip('/')
+ spec = inspect.getargspec(func)
+ argc = len(spec[0]) - len(spec[3] or [])
+ path += ('/:%s' * argc) % tuple(spec[0][:argc])
+ yield path
+ for arg in spec[0][argc:]:
+ path += '/:%s' % arg
+ yield path
+
+def path_shift(script_name, path_info, shift=1):
+ ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
+
+ :return: The modified paths.
+ :param script_name: The SCRIPT_NAME path.
+ :param script_name: The PATH_INFO path.
+ :param shift: The number of path fragments to shift. May be negative to
+ change ths shift direction. (default: 1)
+ '''
+ if shift == 0: return script_name, path_info
+ pathlist = path_info.strip('/').split('/')
+ scriptlist = script_name.strip('/').split('/')
+ if pathlist and pathlist[0] == '': pathlist = []
+ if scriptlist and scriptlist[0] == '': scriptlist = []
+ if shift > 0 and shift <= len(pathlist):
+ moved = pathlist[:shift]
+ scriptlist = scriptlist + moved
+ pathlist = pathlist[shift:]
+ elif shift < 0 and shift >= -len(scriptlist):
+ moved = scriptlist[shift:]
+ pathlist = moved + pathlist
+ scriptlist = scriptlist[:shift]
+ else:
+ empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
+ raise AssertionError("Cannot shift. Nothing left from %s" % empty)
+ new_script_name = '/' + '/'.join(scriptlist)
+ new_path_info = '/' + '/'.join(pathlist)
+ if path_info.endswith('/') and pathlist: new_path_info += '/'
+ return new_script_name, new_path_info
+
+
+
+
+# Decorators
+#TODO: Replace default_app() with app()
+
+def validate(**vkargs):
+ """
+ Validates and manipulates keyword arguments by user defined callables.
+ Handles ValueError and missing arguments by raising HTTPError(403).
+ """
+ def decorator(func):
+ def wrapper(**kargs):
+ for key, value in vkargs.iteritems():
+ if key not in kargs:
+ abort(403, 'Missing parameter: %s' % key)
+ try:
+ kargs[key] = value(kargs[key])
+ except ValueError:
+ abort(403, 'Wrong parameter format for: %s' % key)
+ return func(**kargs)
+ return wrapper
+ return decorator
+
+
+route = functools.wraps(Bottle.route)(lambda *a, **ka: app().route(*a, **ka))
+get = functools.wraps(Bottle.get)(lambda *a, **ka: app().get(*a, **ka))
+post = functools.wraps(Bottle.post)(lambda *a, **ka: app().post(*a, **ka))
+put = functools.wraps(Bottle.put)(lambda *a, **ka: app().put(*a, **ka))
+delete = functools.wraps(Bottle.delete)(lambda *a, **ka: app().delete(*a, **ka))
+error = functools.wraps(Bottle.error)(lambda *a, **ka: app().error(*a, **ka))
+url = functools.wraps(Bottle.get_url)(lambda *a, **ka: app().get_url(*a, **ka))
+mount = functools.wraps(Bottle.mount)(lambda *a, **ka: app().mount(*a, **ka))
+
+def default():
+ depr("The default() decorator is deprecated. Use @error(404) instead.")
+ return error(404)
+
+
+
+
+
+
+# Server adapter
+
+class ServerAdapter(object):
+ quiet = False
+
+ def __init__(self, host='127.0.0.1', port=8080, **kargs):
+ self.options = kargs
+ self.host = host
+ self.port = int(port)
+
+ def run(self, handler): # pragma: no cover
+ pass
+
+ def __repr__(self):
+ args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
+ return "%s(%s)" % (self.__class__.__name__, args)
+
+
+class CGIServer(ServerAdapter):
+ quiet = True
+ def run(self, handler): # pragma: no cover
+ from wsgiref.handlers import CGIHandler
+ CGIHandler().run(handler) # Just ignore host and port here
+
+
+class FlupFCGIServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ import flup.server.fcgi
+ flup.server.fcgi.WSGIServer(handler, bindAddress=(self.host, self.port)).run()
+
+
+class WSGIRefServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ from wsgiref.simple_server import make_server, WSGIRequestHandler
+ if self.quiet:
+ class QuietHandler(WSGIRequestHandler):
+ def log_request(*args, **kw): pass
+ self.options['handler_class'] = QuietHandler
+ srv = make_server(self.host, self.port, handler, **self.options)
+ srv.serve_forever()
+
+
+class CherryPyServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ from cherrypy import wsgiserver
+ server = wsgiserver.CherryPyWSGIServer((self.host, self.port), handler)
+ server.start()
+
+
+class PasteServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ from paste import httpserver
+ from paste.translogger import TransLogger
+ app = TransLogger(handler)
+ httpserver.serve(app, host=self.host, port=str(self.port), **self.options)
+
+
+class FapwsServer(ServerAdapter):
+ """
+ Extremly fast webserver using libev.
+ See http://william-os4y.livejournal.com/
+ """
+ def run(self, handler): # pragma: no cover
+ import fapws._evwsgi as evwsgi
+ from fapws import base
+ evwsgi.start(self.host, self.port)
+ evwsgi.set_base_module(base)
+ def app(environ, start_response):
+ environ['wsgi.multiprocess'] = False
+ return handler(environ, start_response)
+ evwsgi.wsgi_cb(('',app))
+ evwsgi.run()
+
+
+class TornadoServer(ServerAdapter):
+ """ Untested. As described here:
+ http://github.com/facebook/tornado/blob/master/tornado/wsgi.py#L187 """
+ def run(self, handler): # pragma: no cover
+ import tornado.wsgi
+ import tornado.httpserver
+ import tornado.ioloop
+ container = tornado.wsgi.WSGIContainer(handler)
+ server = tornado.httpserver.HTTPServer(container)
+ server.listen(port=self.port)
+ tornado.ioloop.IOLoop.instance().start()
+
+
+class AppEngineServer(ServerAdapter):
+ """ Untested. """
+ quiet = True
+ def run(self, handler):
+ from google.appengine.ext.webapp import util
+ util.run_wsgi_app(handler)
+
+
+class TwistedServer(ServerAdapter):
+ """ Untested. """
+ def run(self, handler):
+ from twisted.web import server, wsgi
+ from twisted.python.threadpool import ThreadPool
+ from twisted.internet import reactor
+ thread_pool = ThreadPool()
+ thread_pool.start()
+ reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
+ factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
+ reactor.listenTCP(self.port, factory, interface=self.host)
+ reactor.run()
+
+
+class DieselServer(ServerAdapter):
+ """ Untested. """
+ def run(self, handler):
+ from diesel.protocols.wsgi import WSGIApplication
+ app = WSGIApplication(handler, port=self.port)
+ app.run()
+
+
+class GunicornServer(ServerAdapter):
+ """ Untested. """
+ def run(self, handler):
+ import gunicorn.arbiter
+ gunicorn.arbiter.Arbiter((self.host, self.port), 4, handler).run()
+
+
+class EventletServer(ServerAdapter):
+ """ Untested """
+ def run(self, handler):
+ from eventlet import wsgi, listen
+ wsgi.server(listen((self.host, self.port)), handler)
+
+
+class RocketServer(ServerAdapter):
+ """ Untested. As requested in issue 63
+ http://github.com/defnull/bottle/issues/#issue/63 """
+ def run(self, handler):
+ from rocket import Rocket
+ server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
+ server.start()
+
+
+class AutoServer(ServerAdapter):
+ """ Untested. """
+ adapters = [CherryPyServer, PasteServer, TwistedServer, WSGIRefServer]
+ def run(self, handler):
+ for sa in self.adapters:
+ try:
+ return sa(self.host, self.port, **self.options).run(handler)
+ except ImportError:
+ pass
+
+
+def run(app=None, server=WSGIRefServer, host='127.0.0.1', port=8080,
+ interval=1, reloader=False, quiet=False, **kargs):
+ """ Runs bottle as a web server. """
+ app = app if app else default_app()
+ # Instantiate server, if it is a class instead of an instance
+ if isinstance(server, type):
+ server = server(host=host, port=port, **kargs)
+ if not isinstance(server, ServerAdapter):
+ raise RuntimeError("Server must be a subclass of WSGIAdapter")
+ server.quiet = server.quiet or quiet
+ if not server.quiet and not os.environ.get('BOTTLE_CHILD'):
+ print "Bottle server starting up (using %s)..." % repr(server)
+ print "Listening on http://%s:%d/" % (server.host, server.port)
+ print "Use Ctrl-C to quit."
+ print
+ try:
+ if reloader:
+ interval = min(interval, 1)
+ if os.environ.get('BOTTLE_CHILD'):
+ _reloader_child(server, app, interval)
+ else:
+ _reloader_observer(server, app, interval)
+ else:
+ server.run(app)
+ except KeyboardInterrupt: pass
+ if not server.quiet and not os.environ.get('BOTTLE_CHILD'):
+ print "Shutting down..."
+
+
+class FileCheckerThread(threading.Thread):
+ ''' Thread that periodically checks for changed module files. '''
+
+ def __init__(self, lockfile, interval):
+ threading.Thread.__init__(self)
+ self.lockfile, self.interval = lockfile, interval
+ #1: lockfile to old; 2: lockfile missing
+ #3: module file changed; 5: external exit
+ self.status = 0
+
+ def run(self):
+ exists = os.path.exists
+ mtime = lambda path: os.stat(path).st_mtime
+ files = dict()
+ for module in sys.modules.values():
+ try:
+ path = inspect.getsourcefile(module)
+ if path and exists(path): files[path] = mtime(path)
+ except TypeError: pass
+ while not self.status:
+ for path, lmtime in files.iteritems():
+ if not exists(path) or mtime(path) > lmtime:
+ self.status = 3
+ if not exists(self.lockfile):
+ self.status = 2
+ elif mtime(self.lockfile) < time.time() - self.interval - 5:
+ self.status = 1
+ if not self.status:
+ time.sleep(self.interval)
+ if self.status != 5:
+ thread.interrupt_main()
+
+
+def _reloader_child(server, app, interval):
+ ''' Start the server and check for modified files in a background thread.
+ As soon as an update is detected, KeyboardInterrupt is thrown in
+ the main thread to exit the server loop. The process exists with status
+ code 3 to request a reload by the observer process. If the lockfile
+ is not modified in 2*interval second or missing, we assume that the
+ observer process died and exit with status code 1 or 2.
+ '''
+ lockfile = os.environ.get('BOTTLE_LOCKFILE')
+ bgcheck = FileCheckerThread(lockfile, interval)
+ try:
+ bgcheck.start()
+ server.run(app)
+ except KeyboardInterrupt, e: pass
+ bgcheck.status, status = 5, bgcheck.status
+ bgcheck.join() # bgcheck.status == 5 --> silent exit
+ if status: sys.exit(status)
+
+
+def _reloader_observer(server, app, interval):
+ ''' Start a child process with identical commandline arguments and restart
+ it as long as it exists with status code 3. Also create a lockfile and
+ touch it (update mtime) every interval seconds.
+ '''
+ fd, lockfile = tempfile.mkstemp(prefix='bottle-reloader.', suffix='.lock')
+ os.close(fd) # We only need this file to exist. We never write to it
+ try:
+ while os.path.exists(lockfile):
+ args = [sys.executable] + sys.argv
+ environ = os.environ.copy()
+ environ['BOTTLE_CHILD'] = 'true'
+ environ['BOTTLE_LOCKFILE'] = lockfile
+ p = subprocess.Popen(args, env=environ)
+ while p.poll() is None: # Busy wait...
+ os.utime(lockfile, None) # I am alive!
+ time.sleep(interval)
+ if p.poll() != 3:
+ if os.path.exists(lockfile): os.unlink(lockfile)
+ sys.exit(p.poll())
+ elif not server.quiet:
+ print "Reloading server..."
+ except KeyboardInterrupt: pass
+ if os.path.exists(lockfile): os.unlink(lockfile)
+
+
+
+# Templates
+
+class TemplateError(HTTPError):
+ def __init__(self, message):
+ HTTPError.__init__(self, 500, message)
+
+
+class BaseTemplate(object):
+ """ Base class and minimal API for template adapters """
+ extentions = ['tpl','html','thtml','stpl']
+ settings = {} #used in prepare()
+ defaults = {} #used in render()
+
+ def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings):
+ """ Create a new template.
+ If the source parameter (str or buffer) is missing, the name argument
+ is used to guess a template filename. Subclasses can assume that
+ self.source and/or self.filename are set. Both are strings.
+ The lookup, encoding and settings parameters are stored as instance
+ variables.
+ The lookup parameter stores a list containing directory paths.
+ The encoding parameter should be used to decode byte strings or files.
+ The settings parameter contains a dict for engine-specific settings.
+ """
+ self.name = name
+ self.source = source.read() if hasattr(source, 'read') else source
+ self.filename = source.filename if hasattr(source, 'filename') else None
+ self.lookup = map(os.path.abspath, lookup)
+ self.encoding = encoding
+ self.settings = self.settings.copy() # Copy from class variable
+ self.settings.update(settings) # Apply
+ if not self.source and self.name:
+ self.filename = self.search(self.name, self.lookup)
+ if not self.filename:
+ raise TemplateError('Template %s not found.' % repr(name))
+ if not self.source and not self.filename:
+ raise TemplateError('No template specified.')
+ self.prepare(**self.settings)
+
+ @classmethod
+ def search(cls, name, lookup=[]):
+ """ Search name in all directories specified in lookup.
+ First without, then with common extensions. Return first hit. """
+ if os.path.isfile(name): return name
+ for spath in lookup:
+ fname = os.path.join(spath, name)
+ if os.path.isfile(fname):
+ return fname
+ for ext in cls.extentions:
+ if os.path.isfile('%s.%s' % (fname, ext)):
+ return '%s.%s' % (fname, ext)
+
+ @classmethod
+ def global_config(cls, key, *args):
+ ''' This reads or sets the global settings stored in class.settings. '''
+ if args:
+ cls.settings[key] = args[0]
+ else:
+ return cls.settings[key]
+
+ def prepare(self, **options):
+ """ Run preparations (parsing, caching, ...).
+ It should be possible to call this again to refresh a template or to
+ update settings.
+ """
+ raise NotImplementedError
+
+ def render(self, **args):
+ """ Render the template with the specified local variables and return
+ a single byte or unicode string. If it is a byte string, the encoding
+ must match self.encoding. This method must be thread-safe!
+ """
+ raise NotImplementedError
+
+
+class MakoTemplate(BaseTemplate):
+ def prepare(self, **options):
+ from mako.template import Template
+ from mako.lookup import TemplateLookup
+ options.update({'input_encoding':self.encoding})
+ #TODO: This is a hack... http://github.com/defnull/bottle/issues#issue/8
+ mylookup = TemplateLookup(directories=['.']+self.lookup, **options)
+ if self.source:
+ self.tpl = Template(self.source, lookup=mylookup)
+ else: #mako cannot guess extentions. We can, but only at top level...
+ name = self.name
+ if not os.path.splitext(name)[1]:
+ name += os.path.splitext(self.filename)[1]
+ self.tpl = mylookup.get_template(name)
+
+ def render(self, **args):
+ _defaults = self.defaults.copy()
+ _defaults.update(args)
+ return self.tpl.render(**_defaults)
+
+
+class CheetahTemplate(BaseTemplate):
+ def prepare(self, **options):
+ from Cheetah.Template import Template
+ self.context = threading.local()
+ self.context.vars = {}
+ options['searchList'] = [self.context.vars]
+ if self.source:
+ self.tpl = Template(source=self.source, **options)
+ else:
+ self.tpl = Template(file=self.filename, **options)
+
+ def render(self, **args):
+ self.context.vars.update(self.defaults)
+ self.context.vars.update(args)
+ out = str(self.tpl)
+ self.context.vars.clear()
+ return [out]
+
+
+class Jinja2Template(BaseTemplate):
+ def prepare(self, filters=None, tests=None, **kwargs):
+ from jinja2 import Environment, FunctionLoader
+ if 'prefix' in kwargs: # TODO: to be removed after a while
+ raise RuntimeError('The keyword argument `prefix` has been removed. '
+ 'Use the full jinja2 environment name line_statement_prefix instead.')
+ self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
+ if filters: self.env.filters.update(filters)
+ if tests: self.env.tests.update(tests)
+ if self.source:
+ self.tpl = self.env.from_string(self.source)
+ else:
+ self.tpl = self.env.get_template(self.filename)
+
+ def render(self, **args):
+ _defaults = self.defaults.copy()
+ _defaults.update(args)
+ return self.tpl.render(**_defaults).encode("utf-8")
+
+ def loader(self, name):
+ fname = self.search(name, self.lookup)
+ if fname:
+ with open(fname, "rb") as f:
+ return f.read().decode(self.encoding)
+
+
+class SimpleTemplate(BaseTemplate):
+ blocks = ('if','elif','else','try','except','finally','for','while','with','def','class')
+ dedent_blocks = ('elif', 'else', 'except', 'finally')
+
+ def prepare(self, escape_func=cgi.escape, noescape=False):
+ self.cache = {}
+ if self.source:
+ self.code = self.translate(self.source)
+ self.co = compile(self.code, '<string>', 'exec')
+ else:
+ self.code = self.translate(open(self.filename).read())
+ self.co = compile(self.code, self.filename, 'exec')
+ enc = self.encoding
+ self._str = lambda x: touni(x, enc)
+ self._escape = lambda x: escape_func(touni(x, enc))
+ if noescape:
+ self._str, self._escape = self._escape, self._str
+
+ def translate(self, template):
+ stack = [] # Current Code indentation
+ lineno = 0 # Current line of code
+ ptrbuffer = [] # Buffer for printable strings and token tuple instances
+ codebuffer = [] # Buffer for generated python code
+ touni = functools.partial(unicode, encoding=self.encoding)
+ multiline = dedent = False
+
+ def yield_tokens(line):
+ for i, part in enumerate(re.split(r'\{\{(.*?)\}\}', line)):
+ if i % 2:
+ if part.startswith('!'): yield 'RAW', part[1:]
+ else: yield 'CMD', part
+ else: yield 'TXT', part
+
+ def split_comment(codeline):
+ """ Removes comments from a line of code. """
+ line = codeline.splitlines()[0]
+ try:
+ tokens = list(tokenize.generate_tokens(iter(line).next))
+ except tokenize.TokenError:
+ return line.rsplit('#',1) if '#' in line else (line, '')
+ for token in tokens:
+ if token[0] == tokenize.COMMENT:
+ start, end = token[2][1], token[3][1]
+ return codeline[:start] + codeline[end:], codeline[start:end]
+ return line, ''
+
+ def flush(): # Flush the ptrbuffer
+ if not ptrbuffer: return
+ cline = ''
+ for line in ptrbuffer:
+ for token, value in line:
+ if token == 'TXT': cline += repr(value)
+ elif token == 'RAW': cline += '_str(%s)' % value
+ elif token == 'CMD': cline += '_escape(%s)' % value
+ cline += ', '
+ cline = cline[:-2] + '\\\n'
+ cline = cline[:-2]
+ if cline[:-1].endswith('\\\\\\\\\\n'):
+ cline = cline[:-7] + cline[-1] # 'nobr\\\\\n' --> 'nobr'
+ cline = '_printlist([' + cline + '])'
+ del ptrbuffer[:] # Do this before calling code() again
+ code(cline)
+
+ def code(stmt):
+ for line in stmt.splitlines():
+ codebuffer.append(' ' * len(stack) + line.strip())
+
+ for line in template.splitlines(True):
+ lineno += 1
+ line = line if isinstance(line, unicode)\
+ else unicode(line, encoding=self.encoding)
+ if lineno <= 2:
+ m = re.search(r"%.*coding[:=]\s*([-\w\.]+)", line)
+ if m: self.encoding = m.group(1)
+ if m: line = line.replace('coding','coding (removed)')
+ if line.strip()[:2].count('%') == 1:
+ line = line.split('%',1)[1].lstrip() # Full line following the %
+ cline = split_comment(line)[0].strip()
+ cmd = re.split(r'[^a-zA-Z0-9_]', cline)[0]
+ flush() ##encodig (TODO: why?)
+ if cmd in self.blocks or multiline:
+ cmd = multiline or cmd
+ dedent = cmd in self.dedent_blocks # "else:"
+ if dedent and not oneline and not multiline:
+ cmd = stack.pop()
+ code(line)
+ oneline = not cline.endswith(':') # "if 1: pass"
+ multiline = cmd if cline.endswith('\\') else False
+ if not oneline and not multiline:
+ stack.append(cmd)
+ elif cmd == 'end' and stack:
+ code('#end(%s) %s' % (stack.pop(), line.strip()[3:]))
+ elif cmd == 'include':
+ p = cline.split(None, 2)[1:]
+ if len(p) == 2:
+ code("_=_include(%s, _stdout, %s)" % (repr(p[0]), p[1]))
+ elif p:
+ code("_=_include(%s, _stdout)" % repr(p[0]))
+ else: # Empty %include -> reverse of %rebase
+ code("_printlist(_base)")
+ elif cmd == 'rebase':
+ p = cline.split(None, 2)[1:]
+ if len(p) == 2:
+ code("globals()['_rebase']=(%s, dict(%s))" % (repr(p[0]), p[1]))
+ elif p:
+ code("globals()['_rebase']=(%s, {})" % repr(p[0]))
+ else:
+ code(line)
+ else: # Line starting with text (not '%') or '%%' (escaped)
+ if line.strip().startswith('%%'):
+ line = line.replace('%%', '%', 1)
+ ptrbuffer.append(yield_tokens(line))
+ flush()
+ return '\n'.join(codebuffer) + '\n'
+
+ def subtemplate(self, _name, _stdout, **args):
+ if _name not in self.cache:
+ self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
+ return self.cache[_name].execute(_stdout, **args)
+
+ def execute(self, _stdout, **args):
+ env = self.defaults.copy()
+ env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
+ '_include': self.subtemplate, '_str': self._str,
+ '_escape': self._escape})
+ env.update(args)
+ eval(self.co, env)
+ if '_rebase' in env:
+ subtpl, rargs = env['_rebase']
+ subtpl = self.__class__(name=subtpl, lookup=self.lookup)
+ rargs['_base'] = _stdout[:] #copy stdout
+ del _stdout[:] # clear stdout
+ return subtpl.execute(_stdout, **rargs)
+ return env
+
+ def render(self, **args):
+ """ Render the template using keyword arguments as local variables. """
+ stdout = []
+ self.execute(stdout, **args)
+ return ''.join(stdout)
+
+
+def template(tpl, template_adapter=SimpleTemplate, **kwargs):
+ '''
+ Get a rendered template as a string iterator.
+ You can use a name, a filename or a template string as first parameter.
+ '''
+ if tpl not in TEMPLATES or DEBUG:
+ settings = kwargs.get('template_settings',{})
+ lookup = kwargs.get('template_lookup', TEMPLATE_PATH)
+ if isinstance(tpl, template_adapter):
+ TEMPLATES[tpl] = tpl
+ if settings: TEMPLATES[tpl].prepare(**settings)
+ elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
+ TEMPLATES[tpl] = template_adapter(source=tpl, lookup=lookup, **settings)
+ else:
+ TEMPLATES[tpl] = template_adapter(name=tpl, lookup=lookup, **settings)
+ if not TEMPLATES[tpl]:
+ abort(500, 'Template (%s) not found' % tpl)
+ return TEMPLATES[tpl].render(**kwargs)
+
+mako_template = functools.partial(template, template_adapter=MakoTemplate)
+cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
+jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
+
+def view(tpl_name, **defaults):
+ ''' Decorator: renders a template for a handler.
+ The handler can control its behavior like that:
+
+ - return a dict of template vars to fill out the template
+ - return something other than a dict and the view decorator will not
+ process the template, but return the handler result as is.
+ This includes returning a HTTPResponse(dict) to get,
+ for instance, JSON with autojson or other castfilters
+ '''
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ result = func(*args, **kwargs)
+ if isinstance(result, (dict, DictMixin)):
+ tplvars = defaults.copy()
+ tplvars.update(result)
+ return template(tpl_name, **tplvars)
+ return result
+ return wrapper
+ return decorator
+
+mako_view = functools.partial(view, template_adapter=MakoTemplate)
+cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
+jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
+
+
+
+
+
+
+# Modul initialization and configuration
+
+TEMPLATE_PATH = ['./', './views/']
+TEMPLATES = {}
+DEBUG = False
+MEMFILE_MAX = 1024*100
+HTTP_CODES = {
+ 100: 'CONTINUE',
+ 101: 'SWITCHING PROTOCOLS',
+ 200: 'OK',
+ 201: 'CREATED',
+ 202: 'ACCEPTED',
+ 203: 'NON-AUTHORITATIVE INFORMATION',
+ 204: 'NO CONTENT',
+ 205: 'RESET CONTENT',
+ 206: 'PARTIAL CONTENT',
+ 300: 'MULTIPLE CHOICES',
+ 301: 'MOVED PERMANENTLY',
+ 302: 'FOUND',
+ 303: 'SEE OTHER',
+ 304: 'NOT MODIFIED',
+ 305: 'USE PROXY',
+ 306: 'RESERVED',
+ 307: 'TEMPORARY REDIRECT',
+ 400: 'BAD REQUEST',
+ 401: 'UNAUTHORIZED',
+ 402: 'PAYMENT REQUIRED',
+ 403: 'FORBIDDEN',
+ 404: 'NOT FOUND',
+ 405: 'METHOD NOT ALLOWED',
+ 406: 'NOT ACCEPTABLE',
+ 407: 'PROXY AUTHENTICATION REQUIRED',
+ 408: 'REQUEST TIMEOUT',
+ 409: 'CONFLICT',
+ 410: 'GONE',
+ 411: 'LENGTH REQUIRED',
+ 412: 'PRECONDITION FAILED',
+ 413: 'REQUEST ENTITY TOO LARGE',
+ 414: 'REQUEST-URI TOO LONG',
+ 415: 'UNSUPPORTED MEDIA TYPE',
+ 416: 'REQUESTED RANGE NOT SATISFIABLE',
+ 417: 'EXPECTATION FAILED',
+ 500: 'INTERNAL SERVER ERROR',
+ 501: 'NOT IMPLEMENTED',
+ 502: 'BAD GATEWAY',
+ 503: 'SERVICE UNAVAILABLE',
+ 504: 'GATEWAY TIMEOUT',
+ 505: 'HTTP VERSION NOT SUPPORTED',
+}
+""" A dict of known HTTP error and status codes """
+
+
+
+ERROR_PAGE_TEMPLATE = SimpleTemplate("""
+%try:
+ %from bottle import DEBUG, HTTP_CODES, request
+ %status_name = HTTP_CODES.get(e.status, 'Unknown').title()
+ <!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
+ <html>
+ <head>
+ <title>Error {{e.status}}: {{status_name}}</title>
+ <style type="text/css">
+ html {background-color: #eee; font-family: sans;}
+ body {background-color: #fff; border: 1px solid #ddd; padding: 15px; margin: 15px;}
+ pre {background-color: #eee; border: 1px solid #ddd; padding: 5px;}
+ </style>
+ </head>
+ <body>
+ <h1>Error {{e.status}}: {{status_name}}</h1>
+ <p>Sorry, the requested URL <tt>{{request.url}}</tt> caused an error:</p>
+ <pre>{{str(e.output)}}</pre>
+ %if DEBUG and e.exception:
+ <h2>Exception:</h2>
+ <pre>{{repr(e.exception)}}</pre>
+ %end
+ %if DEBUG and e.traceback:
+ <h2>Traceback:</h2>
+ <pre>{{e.traceback}}</pre>
+ %end
+ </body>
+ </html>
+%except ImportError:
+ <b>ImportError:</b> Could not generate the error page. Please add bottle to sys.path
+%end
+""")
+""" The HTML template used for error messages """
+
+request = Request()
+""" Whenever a page is requested, the :class:`Bottle` WSGI handler stores
+metadata about the current request into this instance of :class:`Request`.
+It is thread-safe and can be accessed from within handler functions. """
+
+response = Response()
+""" The :class:`Bottle` WSGI handler uses metadata assigned to this instance
+of :class:`Response` to generate the WSGI response. """
+
+local = threading.local()
+""" Thread-local namespace. Not used by Bottle, but could get handy """
+
+# Initialize app stack (create first empty Bottle app)
+# BC: 0.6.4 and needed for run()
+app = default_app = AppStack()
+app.push()
diff --git a/module/lib/wsgiserver/LICENSE.txt b/module/lib/wsgiserver/LICENSE.txt
new file mode 100644
index 000000000..a15165ee2
--- /dev/null
+++ b/module/lib/wsgiserver/LICENSE.txt
@@ -0,0 +1,25 @@
+Copyright (c) 2004-2007, CherryPy Team (team@cherrypy.org)
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the CherryPy Team nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/module/lib/wsgiserver/__init__.py b/module/lib/wsgiserver/__init__.py
new file mode 100644
index 000000000..c380e18b0
--- /dev/null
+++ b/module/lib/wsgiserver/__init__.py
@@ -0,0 +1,1794 @@
+"""A high-speed, production ready, thread pooled, generic WSGI server.
+
+Simplest example on how to use this module directly
+(without using CherryPy's application machinery):
+
+ from cherrypy import wsgiserver
+
+ def my_crazy_app(environ, start_response):
+ status = '200 OK'
+ response_headers = [('Content-type','text/plain')]
+ start_response(status, response_headers)
+ return ['Hello world!\n']
+
+ server = wsgiserver.CherryPyWSGIServer(
+ ('0.0.0.0', 8070), my_crazy_app,
+ server_name='www.cherrypy.example')
+
+The CherryPy WSGI server can serve as many WSGI applications
+as you want in one instance by using a WSGIPathInfoDispatcher:
+
+ d = WSGIPathInfoDispatcher({'/': my_crazy_app, '/blog': my_blog_app})
+ server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 80), d)
+
+Want SSL support? Just set these attributes:
+
+ server.ssl_certificate = <filename>
+ server.ssl_private_key = <filename>
+
+ if __name__ == '__main__':
+ try:
+ server.start()
+ except KeyboardInterrupt:
+ server.stop()
+
+This won't call the CherryPy engine (application side) at all, only the
+WSGI server, which is independant from the rest of CherryPy. Don't
+let the name "CherryPyWSGIServer" throw you; the name merely reflects
+its origin, not its coupling.
+
+For those of you wanting to understand internals of this module, here's the
+basic call flow. The server's listening thread runs a very tight loop,
+sticking incoming connections onto a Queue:
+
+ server = CherryPyWSGIServer(...)
+ server.start()
+ while True:
+ tick()
+ # This blocks until a request comes in:
+ child = socket.accept()
+ conn = HTTPConnection(child, ...)
+ server.requests.put(conn)
+
+Worker threads are kept in a pool and poll the Queue, popping off and then
+handling each connection in turn. Each connection can consist of an arbitrary
+number of requests and their responses, so we run a nested loop:
+
+ while True:
+ conn = server.requests.get()
+ conn.communicate()
+ -> while True:
+ req = HTTPRequest(...)
+ req.parse_request()
+ -> # Read the Request-Line, e.g. "GET /page HTTP/1.1"
+ req.rfile.readline()
+ req.read_headers()
+ req.respond()
+ -> response = wsgi_app(...)
+ try:
+ for chunk in response:
+ if chunk:
+ req.write(chunk)
+ finally:
+ if hasattr(response, "close"):
+ response.close()
+ if req.close_connection:
+ return
+"""
+
+
+import base64
+import os
+import Queue
+import re
+quoted_slash = re.compile("(?i)%2F")
+import rfc822
+import socket
+try:
+ import cStringIO as StringIO
+except ImportError:
+ import StringIO
+
+_fileobject_uses_str_type = isinstance(socket._fileobject(None)._rbuf, basestring)
+
+import sys
+import threading
+import time
+import traceback
+from urllib import unquote
+from urlparse import urlparse
+import warnings
+
+try:
+ from OpenSSL import SSL
+ from OpenSSL import crypto
+except ImportError:
+ SSL = None
+
+import errno
+
+def plat_specific_errors(*errnames):
+ """Return error numbers for all errors in errnames on this platform.
+
+ The 'errno' module contains different global constants depending on
+ the specific platform (OS). This function will return the list of
+ numeric values for a given list of potential names.
+ """
+ errno_names = dir(errno)
+ nums = [getattr(errno, k) for k in errnames if k in errno_names]
+ # de-dupe the list
+ return dict.fromkeys(nums).keys()
+
+socket_error_eintr = plat_specific_errors("EINTR", "WSAEINTR")
+
+socket_errors_to_ignore = plat_specific_errors(
+ "EPIPE",
+ "EBADF", "WSAEBADF",
+ "ENOTSOCK", "WSAENOTSOCK",
+ "ETIMEDOUT", "WSAETIMEDOUT",
+ "ECONNREFUSED", "WSAECONNREFUSED",
+ "ECONNRESET", "WSAECONNRESET",
+ "ECONNABORTED", "WSAECONNABORTED",
+ "ENETRESET", "WSAENETRESET",
+ "EHOSTDOWN", "EHOSTUNREACH",
+ )
+socket_errors_to_ignore.append("timed out")
+
+socket_errors_nonblocking = plat_specific_errors(
+ 'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK')
+
+comma_separated_headers = ['ACCEPT', 'ACCEPT-CHARSET', 'ACCEPT-ENCODING',
+ 'ACCEPT-LANGUAGE', 'ACCEPT-RANGES', 'ALLOW', 'CACHE-CONTROL',
+ 'CONNECTION', 'CONTENT-ENCODING', 'CONTENT-LANGUAGE', 'EXPECT',
+ 'IF-MATCH', 'IF-NONE-MATCH', 'PRAGMA', 'PROXY-AUTHENTICATE', 'TE',
+ 'TRAILER', 'TRANSFER-ENCODING', 'UPGRADE', 'VARY', 'VIA', 'WARNING',
+ 'WWW-AUTHENTICATE']
+
+
+class WSGIPathInfoDispatcher(object):
+ """A WSGI dispatcher for dispatch based on the PATH_INFO.
+
+ apps: a dict or list of (path_prefix, app) pairs.
+ """
+
+ def __init__(self, apps):
+ try:
+ apps = apps.items()
+ except AttributeError:
+ pass
+
+ # Sort the apps by len(path), descending
+ apps.sort()
+ apps.reverse()
+
+ # The path_prefix strings must start, but not end, with a slash.
+ # Use "" instead of "/".
+ self.apps = [(p.rstrip("/"), a) for p, a in apps]
+
+ def __call__(self, environ, start_response):
+ path = environ["PATH_INFO"] or "/"
+ for p, app in self.apps:
+ # The apps list should be sorted by length, descending.
+ if path.startswith(p + "/") or path == p:
+ environ = environ.copy()
+ environ["SCRIPT_NAME"] = environ["SCRIPT_NAME"] + p
+ environ["PATH_INFO"] = path[len(p):]
+ return app(environ, start_response)
+
+ start_response('404 Not Found', [('Content-Type', 'text/plain'),
+ ('Content-Length', '0')])
+ return ['']
+
+
+class MaxSizeExceeded(Exception):
+ pass
+
+class SizeCheckWrapper(object):
+ """Wraps a file-like object, raising MaxSizeExceeded if too large."""
+
+ def __init__(self, rfile, maxlen):
+ self.rfile = rfile
+ self.maxlen = maxlen
+ self.bytes_read = 0
+
+ def _check_length(self):
+ if self.maxlen and self.bytes_read > self.maxlen:
+ raise MaxSizeExceeded()
+
+ def read(self, size=None):
+ data = self.rfile.read(size)
+ self.bytes_read += len(data)
+ self._check_length()
+ return data
+
+ def readline(self, size=None):
+ if size is not None:
+ data = self.rfile.readline(size)
+ self.bytes_read += len(data)
+ self._check_length()
+ return data
+
+ # User didn't specify a size ...
+ # We read the line in chunks to make sure it's not a 100MB line !
+ res = []
+ while True:
+ data = self.rfile.readline(256)
+ self.bytes_read += len(data)
+ self._check_length()
+ res.append(data)
+ # See http://www.cherrypy.org/ticket/421
+ if len(data) < 256 or data[-1:] == "\n":
+ return ''.join(res)
+
+ def readlines(self, sizehint=0):
+ # Shamelessly stolen from StringIO
+ total = 0
+ lines = []
+ line = self.readline()
+ while line:
+ lines.append(line)
+ total += len(line)
+ if 0 < sizehint <= total:
+ break
+ line = self.readline()
+ return lines
+
+ def close(self):
+ self.rfile.close()
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ data = self.rfile.next()
+ self.bytes_read += len(data)
+ self._check_length()
+ return data
+
+
+class HTTPRequest(object):
+ """An HTTP Request (and response).
+
+ A single HTTP connection may consist of multiple request/response pairs.
+
+ send: the 'send' method from the connection's socket object.
+ wsgi_app: the WSGI application to call.
+ environ: a partial WSGI environ (server and connection entries).
+ The caller MUST set the following entries:
+ * All wsgi.* entries, including .input
+ * SERVER_NAME and SERVER_PORT
+ * Any SSL_* entries
+ * Any custom entries like REMOTE_ADDR and REMOTE_PORT
+ * SERVER_SOFTWARE: the value to write in the "Server" response header.
+ * ACTUAL_SERVER_PROTOCOL: the value to write in the Status-Line of
+ the response. From RFC 2145: "An HTTP server SHOULD send a
+ response version equal to the highest version for which the
+ server is at least conditionally compliant, and whose major
+ version is less than or equal to the one received in the
+ request. An HTTP server MUST NOT send a version for which
+ it is not at least conditionally compliant."
+
+ outheaders: a list of header tuples to write in the response.
+ ready: when True, the request has been parsed and is ready to begin
+ generating the response. When False, signals the calling Connection
+ that the response should not be generated and the connection should
+ close.
+ close_connection: signals the calling Connection that the request
+ should close. This does not imply an error! The client and/or
+ server may each request that the connection be closed.
+ chunked_write: if True, output will be encoded with the "chunked"
+ transfer-coding. This value is set automatically inside
+ send_headers.
+ """
+
+ max_request_header_size = 0
+ max_request_body_size = 0
+
+ def __init__(self, wfile, environ, wsgi_app):
+ self.rfile = environ['wsgi.input']
+ self.wfile = wfile
+ self.environ = environ.copy()
+ self.wsgi_app = wsgi_app
+
+ self.ready = False
+ self.started_response = False
+ self.status = ""
+ self.outheaders = []
+ self.sent_headers = False
+ self.close_connection = False
+ self.chunked_write = False
+
+ def parse_request(self):
+ """Parse the next HTTP request start-line and message-headers."""
+ self.rfile.maxlen = self.max_request_header_size
+ self.rfile.bytes_read = 0
+
+ try:
+ self._parse_request()
+ except MaxSizeExceeded:
+ self.simple_response("413 Request Entity Too Large")
+ return
+
+ def _parse_request(self):
+ # HTTP/1.1 connections are persistent by default. If a client
+ # requests a page, then idles (leaves the connection open),
+ # then rfile.readline() will raise socket.error("timed out").
+ # Note that it does this based on the value given to settimeout(),
+ # and doesn't need the client to request or acknowledge the close
+ # (although your TCP stack might suffer for it: cf Apache's history
+ # with FIN_WAIT_2).
+ request_line = self.rfile.readline()
+ if not request_line:
+ # Force self.ready = False so the connection will close.
+ self.ready = False
+ return
+
+ if request_line == "\r\n":
+ # RFC 2616 sec 4.1: "...if the server is reading the protocol
+ # stream at the beginning of a message and receives a CRLF
+ # first, it should ignore the CRLF."
+ # But only ignore one leading line! else we enable a DoS.
+ request_line = self.rfile.readline()
+ if not request_line:
+ self.ready = False
+ return
+
+ environ = self.environ
+
+ try:
+ method, path, req_protocol = request_line.strip().split(" ", 2)
+ except ValueError:
+ self.simple_response(400, "Malformed Request-Line")
+ return
+
+ environ["REQUEST_METHOD"] = method
+
+ # path may be an abs_path (including "http://host.domain.tld");
+ scheme, location, path, params, qs, frag = urlparse(path)
+
+ if frag:
+ self.simple_response("400 Bad Request",
+ "Illegal #fragment in Request-URI.")
+ return
+
+ if scheme:
+ environ["wsgi.url_scheme"] = scheme
+ if params:
+ path = path + ";" + params
+
+ environ["SCRIPT_NAME"] = ""
+
+ # Unquote the path+params (e.g. "/this%20path" -> "this path").
+ # http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
+ #
+ # But note that "...a URI must be separated into its components
+ # before the escaped characters within those components can be
+ # safely decoded." http://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2
+ atoms = [unquote(x) for x in quoted_slash.split(path)]
+ path = "%2F".join(atoms)
+ environ["PATH_INFO"] = path
+
+ # Note that, like wsgiref and most other WSGI servers,
+ # we unquote the path but not the query string.
+ environ["QUERY_STRING"] = qs
+
+ # Compare request and server HTTP protocol versions, in case our
+ # server does not support the requested protocol. Limit our output
+ # to min(req, server). We want the following output:
+ # request server actual written supported response
+ # protocol protocol response protocol feature set
+ # a 1.0 1.0 1.0 1.0
+ # b 1.0 1.1 1.1 1.0
+ # c 1.1 1.0 1.0 1.0
+ # d 1.1 1.1 1.1 1.1
+ # Notice that, in (b), the response will be "HTTP/1.1" even though
+ # the client only understands 1.0. RFC 2616 10.5.6 says we should
+ # only return 505 if the _major_ version is different.
+ rp = int(req_protocol[5]), int(req_protocol[7])
+ server_protocol = environ["ACTUAL_SERVER_PROTOCOL"]
+ sp = int(server_protocol[5]), int(server_protocol[7])
+ if sp[0] != rp[0]:
+ self.simple_response("505 HTTP Version Not Supported")
+ return
+ # Bah. "SERVER_PROTOCOL" is actually the REQUEST protocol.
+ environ["SERVER_PROTOCOL"] = req_protocol
+ self.response_protocol = "HTTP/%s.%s" % min(rp, sp)
+
+ # If the Request-URI was an absoluteURI, use its location atom.
+ if location:
+ environ["SERVER_NAME"] = location
+
+ # then all the http headers
+ try:
+ self.read_headers()
+ except ValueError, ex:
+ self.simple_response("400 Bad Request", repr(ex.args))
+ return
+
+ mrbs = self.max_request_body_size
+ if mrbs and int(environ.get("CONTENT_LENGTH", 0)) > mrbs:
+ self.simple_response("413 Request Entity Too Large")
+ return
+
+ # Persistent connection support
+ if self.response_protocol == "HTTP/1.1":
+ # Both server and client are HTTP/1.1
+ if environ.get("HTTP_CONNECTION", "") == "close":
+ self.close_connection = True
+ else:
+ # Either the server or client (or both) are HTTP/1.0
+ if environ.get("HTTP_CONNECTION", "") != "Keep-Alive":
+ self.close_connection = True
+
+ # Transfer-Encoding support
+ te = None
+ if self.response_protocol == "HTTP/1.1":
+ te = environ.get("HTTP_TRANSFER_ENCODING")
+ if te:
+ te = [x.strip().lower() for x in te.split(",") if x.strip()]
+
+ self.chunked_read = False
+
+ if te:
+ for enc in te:
+ if enc == "chunked":
+ self.chunked_read = True
+ else:
+ # Note that, even if we see "chunked", we must reject
+ # if there is an extension we don't recognize.
+ self.simple_response("501 Unimplemented")
+ self.close_connection = True
+ return
+
+ # From PEP 333:
+ # "Servers and gateways that implement HTTP 1.1 must provide
+ # transparent support for HTTP 1.1's "expect/continue" mechanism.
+ # This may be done in any of several ways:
+ # 1. Respond to requests containing an Expect: 100-continue request
+ # with an immediate "100 Continue" response, and proceed normally.
+ # 2. Proceed with the request normally, but provide the application
+ # with a wsgi.input stream that will send the "100 Continue"
+ # response if/when the application first attempts to read from
+ # the input stream. The read request must then remain blocked
+ # until the client responds.
+ # 3. Wait until the client decides that the server does not support
+ # expect/continue, and sends the request body on its own.
+ # (This is suboptimal, and is not recommended.)
+ #
+ # We used to do 3, but are now doing 1. Maybe we'll do 2 someday,
+ # but it seems like it would be a big slowdown for such a rare case.
+ if environ.get("HTTP_EXPECT", "") == "100-continue":
+ self.simple_response(100)
+
+ self.ready = True
+
+ def read_headers(self):
+ """Read header lines from the incoming stream."""
+ environ = self.environ
+
+ while True:
+ line = self.rfile.readline()
+ if not line:
+ # No more data--illegal end of headers
+ raise ValueError("Illegal end of headers.")
+
+ if line == '\r\n':
+ # Normal end of headers
+ break
+
+ if line[0] in ' \t':
+ # It's a continuation line.
+ v = line.strip()
+ else:
+ k, v = line.split(":", 1)
+ k, v = k.strip().upper(), v.strip()
+ envname = "HTTP_" + k.replace("-", "_")
+
+ if k in comma_separated_headers:
+ existing = environ.get(envname)
+ if existing:
+ v = ", ".join((existing, v))
+ environ[envname] = v
+
+ ct = environ.pop("HTTP_CONTENT_TYPE", None)
+ if ct is not None:
+ environ["CONTENT_TYPE"] = ct
+ cl = environ.pop("HTTP_CONTENT_LENGTH", None)
+ if cl is not None:
+ environ["CONTENT_LENGTH"] = cl
+
+ def decode_chunked(self):
+ """Decode the 'chunked' transfer coding."""
+ cl = 0
+ data = StringIO.StringIO()
+ while True:
+ line = self.rfile.readline().strip().split(";", 1)
+ chunk_size = int(line.pop(0), 16)
+ if chunk_size <= 0:
+ break
+## if line: chunk_extension = line[0]
+ cl += chunk_size
+ data.write(self.rfile.read(chunk_size))
+ crlf = self.rfile.read(2)
+ if crlf != "\r\n":
+ self.simple_response("400 Bad Request",
+ "Bad chunked transfer coding "
+ "(expected '\\r\\n', got %r)" % crlf)
+ return
+
+ # Grab any trailer headers
+ self.read_headers()
+
+ data.seek(0)
+ self.environ["wsgi.input"] = data
+ self.environ["CONTENT_LENGTH"] = str(cl) or ""
+ return True
+
+ def respond(self):
+ """Call the appropriate WSGI app and write its iterable output."""
+ # Set rfile.maxlen to ensure we don't read past Content-Length.
+ # This will also be used to read the entire request body if errors
+ # are raised before the app can read the body.
+ if self.chunked_read:
+ # If chunked, Content-Length will be 0.
+ self.rfile.maxlen = self.max_request_body_size
+ else:
+ cl = int(self.environ.get("CONTENT_LENGTH", 0))
+ if self.max_request_body_size:
+ self.rfile.maxlen = min(cl, self.max_request_body_size)
+ else:
+ self.rfile.maxlen = cl
+ self.rfile.bytes_read = 0
+
+ try:
+ self._respond()
+ except MaxSizeExceeded:
+ if not self.sent_headers:
+ self.simple_response("413 Request Entity Too Large")
+ return
+
+ def _respond(self):
+ if self.chunked_read:
+ if not self.decode_chunked():
+ self.close_connection = True
+ return
+
+ response = self.wsgi_app(self.environ, self.start_response)
+ try:
+ for chunk in response:
+ # "The start_response callable must not actually transmit
+ # the response headers. Instead, it must store them for the
+ # server or gateway to transmit only after the first
+ # iteration of the application return value that yields
+ # a NON-EMPTY string, or upon the application's first
+ # invocation of the write() callable." (PEP 333)
+ if chunk:
+ self.write(chunk)
+ finally:
+ if hasattr(response, "close"):
+ response.close()
+
+ if (self.ready and not self.sent_headers):
+ self.sent_headers = True
+ self.send_headers()
+ if self.chunked_write:
+ self.wfile.sendall("0\r\n\r\n")
+
+ def simple_response(self, status, msg=""):
+ """Write a simple response back to the client."""
+ status = str(status)
+ buf = ["%s %s\r\n" % (self.environ['ACTUAL_SERVER_PROTOCOL'], status),
+ "Content-Length: %s\r\n" % len(msg),
+ "Content-Type: text/plain\r\n"]
+
+ if status[:3] == "413" and self.response_protocol == 'HTTP/1.1':
+ # Request Entity Too Large
+ self.close_connection = True
+ buf.append("Connection: close\r\n")
+
+ buf.append("\r\n")
+ if msg:
+ buf.append(msg)
+
+ try:
+ self.wfile.sendall("".join(buf))
+ except socket.error, x:
+ if x.args[0] not in socket_errors_to_ignore:
+ raise
+
+ def start_response(self, status, headers, exc_info = None):
+ """WSGI callable to begin the HTTP response."""
+ # "The application may call start_response more than once,
+ # if and only if the exc_info argument is provided."
+ if self.started_response and not exc_info:
+ raise AssertionError("WSGI start_response called a second "
+ "time with no exc_info.")
+
+ # "if exc_info is provided, and the HTTP headers have already been
+ # sent, start_response must raise an error, and should raise the
+ # exc_info tuple."
+ if self.sent_headers:
+ try:
+ raise exc_info[0], exc_info[1], exc_info[2]
+ finally:
+ exc_info = None
+
+ self.started_response = True
+ self.status = status
+ self.outheaders.extend(headers)
+ return self.write
+
+ def write(self, chunk):
+ """WSGI callable to write unbuffered data to the client.
+
+ This method is also used internally by start_response (to write
+ data from the iterable returned by the WSGI application).
+ """
+ if not self.started_response:
+ raise AssertionError("WSGI write called before start_response.")
+
+ if not self.sent_headers:
+ self.sent_headers = True
+ self.send_headers()
+
+ if self.chunked_write and chunk:
+ buf = [hex(len(chunk))[2:], "\r\n", chunk, "\r\n"]
+ self.wfile.sendall("".join(buf))
+ else:
+ self.wfile.sendall(chunk)
+
+ def send_headers(self):
+ """Assert, process, and send the HTTP response message-headers."""
+ hkeys = [key.lower() for key, value in self.outheaders]
+ status = int(self.status[:3])
+
+ if status == 413:
+ # Request Entity Too Large. Close conn to avoid garbage.
+ self.close_connection = True
+ elif "content-length" not in hkeys:
+ # "All 1xx (informational), 204 (no content),
+ # and 304 (not modified) responses MUST NOT
+ # include a message-body." So no point chunking.
+ if status < 200 or status in (204, 205, 304):
+ pass
+ else:
+ if (self.response_protocol == 'HTTP/1.1'
+ and self.environ["REQUEST_METHOD"] != 'HEAD'):
+ # Use the chunked transfer-coding
+ self.chunked_write = True
+ self.outheaders.append(("Transfer-Encoding", "chunked"))
+ else:
+ # Closing the conn is the only way to determine len.
+ self.close_connection = True
+
+ if "connection" not in hkeys:
+ if self.response_protocol == 'HTTP/1.1':
+ # Both server and client are HTTP/1.1 or better
+ if self.close_connection:
+ self.outheaders.append(("Connection", "close"))
+ else:
+ # Server and/or client are HTTP/1.0
+ if not self.close_connection:
+ self.outheaders.append(("Connection", "Keep-Alive"))
+
+ if (not self.close_connection) and (not self.chunked_read):
+ # Read any remaining request body data on the socket.
+ # "If an origin server receives a request that does not include an
+ # Expect request-header field with the "100-continue" expectation,
+ # the request includes a request body, and the server responds
+ # with a final status code before reading the entire request body
+ # from the transport connection, then the server SHOULD NOT close
+ # the transport connection until it has read the entire request,
+ # or until the client closes the connection. Otherwise, the client
+ # might not reliably receive the response message. However, this
+ # requirement is not be construed as preventing a server from
+ # defending itself against denial-of-service attacks, or from
+ # badly broken client implementations."
+ size = self.rfile.maxlen - self.rfile.bytes_read
+ if size > 0:
+ self.rfile.read(size)
+
+ if "date" not in hkeys:
+ self.outheaders.append(("Date", rfc822.formatdate()))
+
+ if "server" not in hkeys:
+ self.outheaders.append(("Server", self.environ['SERVER_SOFTWARE']))
+
+ buf = [self.environ['ACTUAL_SERVER_PROTOCOL'], " ", self.status, "\r\n"]
+ try:
+ buf += [k + ": " + v + "\r\n" for k, v in self.outheaders]
+ except TypeError:
+ if not isinstance(k, str):
+ raise TypeError("WSGI response header key %r is not a string.")
+ if not isinstance(v, str):
+ raise TypeError("WSGI response header value %r is not a string.")
+ else:
+ raise
+ buf.append("\r\n")
+ self.wfile.sendall("".join(buf))
+
+
+class NoSSLError(Exception):
+ """Exception raised when a client speaks HTTP to an HTTPS socket."""
+ pass
+
+
+class FatalSSLAlert(Exception):
+ """Exception raised when the SSL implementation signals a fatal alert."""
+ pass
+
+
+if not _fileobject_uses_str_type:
+ class CP_fileobject(socket._fileobject):
+ """Faux file object attached to a socket object."""
+
+ def sendall(self, data):
+ """Sendall for non-blocking sockets."""
+ while data:
+ try:
+ bytes_sent = self.send(data)
+ data = data[bytes_sent:]
+ except socket.error, e:
+ if e.args[0] not in socket_errors_nonblocking:
+ raise
+
+ def send(self, data):
+ return self._sock.send(data)
+
+ def flush(self):
+ if self._wbuf:
+ buffer = "".join(self._wbuf)
+ self._wbuf = []
+ self.sendall(buffer)
+
+ def recv(self, size):
+ while True:
+ try:
+ return self._sock.recv(size)
+ except socket.error, e:
+ if (e.args[0] not in socket_errors_nonblocking
+ and e.args[0] not in socket_error_eintr):
+ raise
+
+ def read(self, size=-1):
+ # Use max, disallow tiny reads in a loop as they are very inefficient.
+ # We never leave read() with any leftover data from a new recv() call
+ # in our internal buffer.
+ rbufsize = max(self._rbufsize, self.default_bufsize)
+ # Our use of StringIO rather than lists of string objects returned by
+ # recv() minimizes memory usage and fragmentation that occurs when
+ # rbufsize is large compared to the typical return value of recv().
+ buf = self._rbuf
+ buf.seek(0, 2) # seek end
+ if size < 0:
+ # Read until EOF
+ self._rbuf = StringIO.StringIO() # reset _rbuf. we consume it via buf.
+ while True:
+ data = self.recv(rbufsize)
+ if not data:
+ break
+ buf.write(data)
+ return buf.getvalue()
+ else:
+ # Read until size bytes or EOF seen, whichever comes first
+ buf_len = buf.tell()
+ if buf_len >= size:
+ # Already have size bytes in our buffer? Extract and return.
+ buf.seek(0)
+ rv = buf.read(size)
+ self._rbuf = StringIO.StringIO()
+ self._rbuf.write(buf.read())
+ return rv
+
+ self._rbuf = StringIO.StringIO() # reset _rbuf. we consume it via buf.
+ while True:
+ left = size - buf_len
+ # recv() will malloc the amount of memory given as its
+ # parameter even though it often returns much less data
+ # than that. The returned data string is short lived
+ # as we copy it into a StringIO and free it. This avoids
+ # fragmentation issues on many platforms.
+ data = self.recv(left)
+ if not data:
+ break
+ n = len(data)
+ if n == size and not buf_len:
+ # Shortcut. Avoid buffer data copies when:
+ # - We have no data in our buffer.
+ # AND
+ # - Our call to recv returned exactly the
+ # number of bytes we were asked to read.
+ return data
+ if n == left:
+ buf.write(data)
+ del data # explicit free
+ break
+ assert n <= left, "recv(%d) returned %d bytes" % (left, n)
+ buf.write(data)
+ buf_len += n
+ del data # explicit free
+ #assert buf_len == buf.tell()
+ return buf.getvalue()
+
+ def readline(self, size=-1):
+ buf = self._rbuf
+ buf.seek(0, 2) # seek end
+ if buf.tell() > 0:
+ # check if we already have it in our buffer
+ buf.seek(0)
+ bline = buf.readline(size)
+ if bline.endswith('\n') or len(bline) == size:
+ self._rbuf = StringIO.StringIO()
+ self._rbuf.write(buf.read())
+ return bline
+ del bline
+ if size < 0:
+ # Read until \n or EOF, whichever comes first
+ if self._rbufsize <= 1:
+ # Speed up unbuffered case
+ buf.seek(0)
+ buffers = [buf.read()]
+ self._rbuf = StringIO.StringIO() # reset _rbuf. we consume it via buf.
+ data = None
+ recv = self.recv
+ while data != "\n":
+ data = recv(1)
+ if not data:
+ break
+ buffers.append(data)
+ return "".join(buffers)
+
+ buf.seek(0, 2) # seek end
+ self._rbuf = StringIO.StringIO() # reset _rbuf. we consume it via buf.
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ buf.write(data[:nl])
+ self._rbuf.write(data[nl:])
+ del data
+ break
+ buf.write(data)
+ return buf.getvalue()
+ else:
+ # Read until size bytes or \n or EOF seen, whichever comes first
+ buf.seek(0, 2) # seek end
+ buf_len = buf.tell()
+ if buf_len >= size:
+ buf.seek(0)
+ rv = buf.read(size)
+ self._rbuf = StringIO.StringIO()
+ self._rbuf.write(buf.read())
+ return rv
+ self._rbuf = StringIO.StringIO() # reset _rbuf. we consume it via buf.
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ left = size - buf_len
+ # did we just receive a newline?
+ nl = data.find('\n', 0, left)
+ if nl >= 0:
+ nl += 1
+ # save the excess data to _rbuf
+ self._rbuf.write(data[nl:])
+ if buf_len:
+ buf.write(data[:nl])
+ break
+ else:
+ # Shortcut. Avoid data copy through buf when returning
+ # a substring of our first recv().
+ return data[:nl]
+ n = len(data)
+ if n == size and not buf_len:
+ # Shortcut. Avoid data copy through buf when
+ # returning exactly all of our first recv().
+ return data
+ if n >= left:
+ buf.write(data[:left])
+ self._rbuf.write(data[left:])
+ break
+ buf.write(data)
+ buf_len += n
+ #assert buf_len == buf.tell()
+ return buf.getvalue()
+
+else:
+ class CP_fileobject(socket._fileobject):
+ """Faux file object attached to a socket object."""
+
+ def sendall(self, data):
+ """Sendall for non-blocking sockets."""
+ while data:
+ try:
+ bytes_sent = self.send(data)
+ data = data[bytes_sent:]
+ except socket.error, e:
+ if e.args[0] not in socket_errors_nonblocking:
+ raise
+
+ def send(self, data):
+ return self._sock.send(data)
+
+ def flush(self):
+ if self._wbuf:
+ buffer = "".join(self._wbuf)
+ self._wbuf = []
+ self.sendall(buffer)
+
+ def recv(self, size):
+ while True:
+ try:
+ return self._sock.recv(size)
+ except socket.error, e:
+ if (e.args[0] not in socket_errors_nonblocking
+ and e.args[0] not in socket_error_eintr):
+ raise
+
+ def read(self, size=-1):
+ if size < 0:
+ # Read until EOF
+ buffers = [self._rbuf]
+ self._rbuf = ""
+ if self._rbufsize <= 1:
+ recv_size = self.default_bufsize
+ else:
+ recv_size = self._rbufsize
+
+ while True:
+ data = self.recv(recv_size)
+ if not data:
+ break
+ buffers.append(data)
+ return "".join(buffers)
+ else:
+ # Read until size bytes or EOF seen, whichever comes first
+ data = self._rbuf
+ buf_len = len(data)
+ if buf_len >= size:
+ self._rbuf = data[size:]
+ return data[:size]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ""
+ while True:
+ left = size - buf_len
+ recv_size = max(self._rbufsize, left)
+ data = self.recv(recv_size)
+ if not data:
+ break
+ buffers.append(data)
+ n = len(data)
+ if n >= left:
+ self._rbuf = data[left:]
+ buffers[-1] = data[:left]
+ break
+ buf_len += n
+ return "".join(buffers)
+
+ def readline(self, size=-1):
+ data = self._rbuf
+ if size < 0:
+ # Read until \n or EOF, whichever comes first
+ if self._rbufsize <= 1:
+ # Speed up unbuffered case
+ assert data == ""
+ buffers = []
+ while data != "\n":
+ data = self.recv(1)
+ if not data:
+ break
+ buffers.append(data)
+ return "".join(buffers)
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ return data[:nl]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ""
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ buffers.append(data)
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ buffers[-1] = data[:nl]
+ break
+ return "".join(buffers)
+ else:
+ # Read until size bytes or \n or EOF seen, whichever comes first
+ nl = data.find('\n', 0, size)
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ return data[:nl]
+ buf_len = len(data)
+ if buf_len >= size:
+ self._rbuf = data[size:]
+ return data[:size]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ""
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ buffers.append(data)
+ left = size - buf_len
+ nl = data.find('\n', 0, left)
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ buffers[-1] = data[:nl]
+ break
+ n = len(data)
+ if n >= left:
+ self._rbuf = data[left:]
+ buffers[-1] = data[:left]
+ break
+ buf_len += n
+ return "".join(buffers)
+
+
+class SSL_fileobject(CP_fileobject):
+ """SSL file object attached to a socket object."""
+
+ ssl_timeout = 3
+ ssl_retry = .01
+
+ def _safe_call(self, is_reader, call, *args, **kwargs):
+ """Wrap the given call with SSL error-trapping.
+
+ is_reader: if False EOF errors will be raised. If True, EOF errors
+ will return "" (to emulate normal sockets).
+ """
+ start = time.time()
+ while True:
+ try:
+ return call(*args, **kwargs)
+ except SSL.WantReadError:
+ # Sleep and try again. This is dangerous, because it means
+ # the rest of the stack has no way of differentiating
+ # between a "new handshake" error and "client dropped".
+ # Note this isn't an endless loop: there's a timeout below.
+ time.sleep(self.ssl_retry)
+ except SSL.WantWriteError:
+ time.sleep(self.ssl_retry)
+ except SSL.SysCallError, e:
+ if is_reader and e.args == (-1, 'Unexpected EOF'):
+ return ""
+
+ errnum = e.args[0]
+ if is_reader and errnum in socket_errors_to_ignore:
+ return ""
+ raise socket.error(errnum)
+ except SSL.Error, e:
+ if is_reader and e.args == (-1, 'Unexpected EOF'):
+ return ""
+
+ thirdarg = None
+ try:
+ thirdarg = e.args[0][0][2]
+ except IndexError:
+ pass
+
+ if thirdarg == 'http request':
+ # The client is talking HTTP to an HTTPS server.
+ raise NoSSLError()
+ raise FatalSSLAlert(*e.args)
+ except:
+ raise
+
+ if time.time() - start > self.ssl_timeout:
+ raise socket.timeout("timed out")
+
+ def recv(self, *args, **kwargs):
+ buf = []
+ r = super(SSL_fileobject, self).recv
+ while True:
+ data = self._safe_call(True, r, *args, **kwargs)
+ buf.append(data)
+ p = self._sock.pending()
+ if not p:
+ return "".join(buf)
+
+ def sendall(self, *args, **kwargs):
+ return self._safe_call(False, super(SSL_fileobject, self).sendall, *args, **kwargs)
+
+ def send(self, *args, **kwargs):
+ return self._safe_call(False, super(SSL_fileobject, self).send, *args, **kwargs)
+
+
+class HTTPConnection(object):
+ """An HTTP connection (active socket).
+
+ socket: the raw socket object (usually TCP) for this connection.
+ wsgi_app: the WSGI application for this server/connection.
+ environ: a WSGI environ template. This will be copied for each request.
+
+ rfile: a fileobject for reading from the socket.
+ send: a function for writing (+ flush) to the socket.
+ """
+
+ rbufsize = -1
+ RequestHandlerClass = HTTPRequest
+ environ = {"wsgi.version": (1, 0),
+ "wsgi.url_scheme": "http",
+ "wsgi.multithread": True,
+ "wsgi.multiprocess": False,
+ "wsgi.run_once": False,
+ "wsgi.errors": sys.stderr,
+ }
+
+ def __init__(self, sock, wsgi_app, environ):
+ self.socket = sock
+ self.wsgi_app = wsgi_app
+
+ # Copy the class environ into self.
+ self.environ = self.environ.copy()
+ self.environ.update(environ)
+
+ if SSL and isinstance(sock, SSL.ConnectionType):
+ timeout = sock.gettimeout()
+ self.rfile = SSL_fileobject(sock, "rb", self.rbufsize)
+ self.rfile.ssl_timeout = timeout
+ self.wfile = SSL_fileobject(sock, "wb", -1)
+ self.wfile.ssl_timeout = timeout
+ else:
+ self.rfile = CP_fileobject(sock, "rb", self.rbufsize)
+ self.wfile = CP_fileobject(sock, "wb", -1)
+
+ # Wrap wsgi.input but not HTTPConnection.rfile itself.
+ # We're also not setting maxlen yet; we'll do that separately
+ # for headers and body for each iteration of self.communicate
+ # (if maxlen is 0 the wrapper doesn't check length).
+ self.environ["wsgi.input"] = SizeCheckWrapper(self.rfile, 0)
+
+ def communicate(self):
+ """Read each request and respond appropriately."""
+ try:
+ while True:
+ # (re)set req to None so that if something goes wrong in
+ # the RequestHandlerClass constructor, the error doesn't
+ # get written to the previous request.
+ req = None
+ req = self.RequestHandlerClass(self.wfile, self.environ,
+ self.wsgi_app)
+
+ # This order of operations should guarantee correct pipelining.
+ req.parse_request()
+ if not req.ready:
+ return
+
+ req.respond()
+ if req.close_connection:
+ return
+
+ except socket.error, e:
+ errnum = e.args[0]
+ if errnum == 'timed out':
+ if req and not req.sent_headers:
+ req.simple_response("408 Request Timeout")
+ elif errnum not in socket_errors_to_ignore:
+ if req and not req.sent_headers:
+ req.simple_response("500 Internal Server Error",
+ format_exc())
+ return
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except FatalSSLAlert, e:
+ # Close the connection.
+ return
+ except NoSSLError:
+ if req and not req.sent_headers:
+ # Unwrap our wfile
+ req.wfile = CP_fileobject(self.socket._sock, "wb", -1)
+ req.simple_response("400 Bad Request",
+ "The client sent a plain HTTP request, but "
+ "this server only speaks HTTPS on this port.")
+ self.linger = True
+ except Exception, e:
+ if req and not req.sent_headers:
+ req.simple_response("500 Internal Server Error", format_exc())
+
+ linger = False
+
+ def close(self):
+ """Close the socket underlying this connection."""
+ self.rfile.close()
+
+ if not self.linger:
+ # Python's socket module does NOT call close on the kernel socket
+ # when you call socket.close(). We do so manually here because we
+ # want this server to send a FIN TCP segment immediately. Note this
+ # must be called *before* calling socket.close(), because the latter
+ # drops its reference to the kernel socket.
+ self.socket._sock.close()
+ self.socket.close()
+ else:
+ # On the other hand, sometimes we want to hang around for a bit
+ # to make sure the client has a chance to read our entire
+ # response. Skipping the close() calls here delays the FIN
+ # packet until the socket object is garbage-collected later.
+ # Someday, perhaps, we'll do the full lingering_close that
+ # Apache does, but not today.
+ pass
+
+
+def format_exc(limit=None):
+ """Like print_exc() but return a string. Backport for Python 2.3."""
+ try:
+ etype, value, tb = sys.exc_info()
+ return ''.join(traceback.format_exception(etype, value, tb, limit))
+ finally:
+ etype = value = tb = None
+
+
+_SHUTDOWNREQUEST = None
+
+class WorkerThread(threading.Thread):
+ """Thread which continuously polls a Queue for Connection objects.
+
+ server: the HTTP Server which spawned this thread, and which owns the
+ Queue and is placing active connections into it.
+ ready: a simple flag for the calling server to know when this thread
+ has begun polling the Queue.
+
+ Due to the timing issues of polling a Queue, a WorkerThread does not
+ check its own 'ready' flag after it has started. To stop the thread,
+ it is necessary to stick a _SHUTDOWNREQUEST object onto the Queue
+ (one for each running WorkerThread).
+ """
+
+ conn = None
+
+ def __init__(self, server):
+ self.ready = False
+ self.server = server
+ threading.Thread.__init__(self)
+
+ def run(self):
+ try:
+ self.ready = True
+ while True:
+ conn = self.server.requests.get()
+ if conn is _SHUTDOWNREQUEST:
+ return
+
+ self.conn = conn
+ try:
+ conn.communicate()
+ finally:
+ conn.close()
+ self.conn = None
+ except (KeyboardInterrupt, SystemExit), exc:
+ self.server.interrupt = exc
+
+
+class ThreadPool(object):
+ """A Request Queue for the CherryPyWSGIServer which pools threads.
+
+ ThreadPool objects must provide min, get(), put(obj), start()
+ and stop(timeout) attributes.
+ """
+
+ def __init__(self, server, min=10, max=-1):
+ self.server = server
+ self.min = min
+ self.max = max
+ self._threads = []
+ self._queue = Queue.Queue()
+ self.get = self._queue.get
+
+ def start(self):
+ """Start the pool of threads."""
+ for i in xrange(self.min):
+ self._threads.append(WorkerThread(self.server))
+ for worker in self._threads:
+ worker.setName("CP WSGIServer " + worker.getName())
+ worker.start()
+ for worker in self._threads:
+ while not worker.ready:
+ time.sleep(.1)
+
+ def _get_idle(self):
+ """Number of worker threads which are idle. Read-only."""
+ return len([t for t in self._threads if t.conn is None])
+ idle = property(_get_idle, doc=_get_idle.__doc__)
+
+ def put(self, obj):
+ self._queue.put(obj)
+ if obj is _SHUTDOWNREQUEST:
+ return
+
+ def grow(self, amount):
+ """Spawn new worker threads (not above self.max)."""
+ for i in xrange(amount):
+ if self.max > 0 and len(self._threads) >= self.max:
+ break
+ worker = WorkerThread(self.server)
+ worker.setName("CP WSGIServer " + worker.getName())
+ self._threads.append(worker)
+ worker.start()
+
+ def shrink(self, amount):
+ """Kill off worker threads (not below self.min)."""
+ # Grow/shrink the pool if necessary.
+ # Remove any dead threads from our list
+ for t in self._threads:
+ if not t.isAlive():
+ self._threads.remove(t)
+ amount -= 1
+
+ if amount > 0:
+ for i in xrange(min(amount, len(self._threads) - self.min)):
+ # Put a number of shutdown requests on the queue equal
+ # to 'amount'. Once each of those is processed by a worker,
+ # that worker will terminate and be culled from our list
+ # in self.put.
+ self._queue.put(_SHUTDOWNREQUEST)
+
+ def stop(self, timeout=5):
+ # Must shut down threads here so the code that calls
+ # this method can know when all threads are stopped.
+ for worker in self._threads:
+ self._queue.put(_SHUTDOWNREQUEST)
+
+ # Don't join currentThread (when stop is called inside a request).
+ current = threading.currentThread()
+ while self._threads:
+ worker = self._threads.pop()
+ if worker is not current and worker.isAlive():
+ try:
+ if timeout is None or timeout < 0:
+ worker.join()
+ else:
+ worker.join(timeout)
+ if worker.isAlive():
+ # We exhausted the timeout.
+ # Forcibly shut down the socket.
+ c = worker.conn
+ if c and not c.rfile.closed:
+ if SSL and isinstance(c.socket, SSL.ConnectionType):
+ # pyOpenSSL.socket.shutdown takes no args
+ c.socket.shutdown()
+ else:
+ c.socket.shutdown(socket.SHUT_RD)
+ worker.join()
+ except (AssertionError,
+ # Ignore repeated Ctrl-C.
+ # See http://www.cherrypy.org/ticket/691.
+ KeyboardInterrupt), exc1:
+ pass
+
+
+
+class SSLConnection:
+ """A thread-safe wrapper for an SSL.Connection.
+
+ *args: the arguments to create the wrapped SSL.Connection(*args).
+ """
+
+ def __init__(self, *args):
+ self._ssl_conn = SSL.Connection(*args)
+ self._lock = threading.RLock()
+
+ for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read',
+ 'renegotiate', 'bind', 'listen', 'connect', 'accept',
+ 'setblocking', 'fileno', 'shutdown', 'close', 'get_cipher_list',
+ 'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
+ 'makefile', 'get_app_data', 'set_app_data', 'state_string',
+ 'sock_shutdown', 'get_peer_certificate', 'want_read',
+ 'want_write', 'set_connect_state', 'set_accept_state',
+ 'connect_ex', 'sendall', 'settimeout'):
+ exec """def %s(self, *args):
+ self._lock.acquire()
+ try:
+ return self._ssl_conn.%s(*args)
+ finally:
+ self._lock.release()
+""" % (f, f)
+
+
+try:
+ import fcntl
+except ImportError:
+ try:
+ from ctypes import windll, WinError
+ except ImportError:
+ def prevent_socket_inheritance(sock):
+ """Dummy function, since neither fcntl nor ctypes are available."""
+ pass
+ else:
+ def prevent_socket_inheritance(sock):
+ """Mark the given socket fd as non-inheritable (Windows)."""
+ if not windll.kernel32.SetHandleInformation(sock.fileno(), 1, 0):
+ raise WinError()
+else:
+ def prevent_socket_inheritance(sock):
+ """Mark the given socket fd as non-inheritable (POSIX)."""
+ fd = sock.fileno()
+ old_flags = fcntl.fcntl(fd, fcntl.F_GETFD)
+ fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC)
+
+
+class CherryPyWSGIServer(object):
+ """An HTTP server for WSGI.
+
+ bind_addr: The interface on which to listen for connections.
+ For TCP sockets, a (host, port) tuple. Host values may be any IPv4
+ or IPv6 address, or any valid hostname. The string 'localhost' is a
+ synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6).
+ The string '0.0.0.0' is a special IPv4 entry meaning "any active
+ interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for
+ IPv6. The empty string or None are not allowed.
+
+ For UNIX sockets, supply the filename as a string.
+ wsgi_app: the WSGI 'application callable'; multiple WSGI applications
+ may be passed as (path_prefix, app) pairs.
+ numthreads: the number of worker threads to create (default 10).
+ server_name: the string to set for WSGI's SERVER_NAME environ entry.
+ Defaults to socket.gethostname().
+ max: the maximum number of queued requests (defaults to -1 = no limit).
+ request_queue_size: the 'backlog' argument to socket.listen();
+ specifies the maximum number of queued connections (default 5).
+ timeout: the timeout in seconds for accepted connections (default 10).
+
+ nodelay: if True (the default since 3.1), sets the TCP_NODELAY socket
+ option.
+
+ protocol: the version string to write in the Status-Line of all
+ HTTP responses. For example, "HTTP/1.1" (the default). This
+ also limits the supported features used in the response.
+
+
+ SSL/HTTPS
+ ---------
+ The OpenSSL module must be importable for SSL functionality.
+ You can obtain it from http://pyopenssl.sourceforge.net/
+
+ ssl_certificate: the filename of the server SSL certificate.
+ ssl_privatekey: the filename of the server's private key file.
+
+ If either of these is None (both are None by default), this server
+ will not use SSL. If both are given and are valid, they will be read
+ on server start and used in the SSL context for the listening socket.
+ """
+
+ protocol = "HTTP/1.1"
+ _bind_addr = "127.0.0.1"
+ version = "CherryPy/3.1.2"
+ ready = False
+ _interrupt = None
+
+ nodelay = True
+
+ ConnectionClass = HTTPConnection
+ environ = {}
+
+ # Paths to certificate and private key files
+ ssl_certificate = None
+ ssl_private_key = None
+
+ def __init__(self, bind_addr, wsgi_app, numthreads=10, server_name=None,
+ max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5):
+ self.requests = ThreadPool(self, min=numthreads or 1, max=max)
+
+ if callable(wsgi_app):
+ # We've been handed a single wsgi_app, in CP-2.1 style.
+ # Assume it's mounted at "".
+ self.wsgi_app = wsgi_app
+ else:
+ # We've been handed a list of (path_prefix, wsgi_app) tuples,
+ # so that the server can call different wsgi_apps, and also
+ # correctly set SCRIPT_NAME.
+ warnings.warn("The ability to pass multiple apps is deprecated "
+ "and will be removed in 3.2. You should explicitly "
+ "include a WSGIPathInfoDispatcher instead.",
+ DeprecationWarning)
+ self.wsgi_app = WSGIPathInfoDispatcher(wsgi_app)
+
+ self.bind_addr = bind_addr
+ if not server_name:
+ server_name = socket.gethostname()
+ self.server_name = server_name
+ self.request_queue_size = request_queue_size
+
+ self.timeout = timeout
+ self.shutdown_timeout = shutdown_timeout
+
+ def _get_numthreads(self):
+ return self.requests.min
+ def _set_numthreads(self, value):
+ self.requests.min = value
+ numthreads = property(_get_numthreads, _set_numthreads)
+
+ def __str__(self):
+ return "%s.%s(%r)" % (self.__module__, self.__class__.__name__,
+ self.bind_addr)
+
+ def _get_bind_addr(self):
+ return self._bind_addr
+ def _set_bind_addr(self, value):
+ if isinstance(value, tuple) and value[0] in ('', None):
+ # Despite the socket module docs, using '' does not
+ # allow AI_PASSIVE to work. Passing None instead
+ # returns '0.0.0.0' like we want. In other words:
+ # host AI_PASSIVE result
+ # '' Y 192.168.x.y
+ # '' N 192.168.x.y
+ # None Y 0.0.0.0
+ # None N 127.0.0.1
+ # But since you can get the same effect with an explicit
+ # '0.0.0.0', we deny both the empty string and None as values.
+ raise ValueError("Host values of '' or None are not allowed. "
+ "Use '0.0.0.0' (IPv4) or '::' (IPv6) instead "
+ "to listen on all active interfaces.")
+ self._bind_addr = value
+ bind_addr = property(_get_bind_addr, _set_bind_addr,
+ doc="""The interface on which to listen for connections.
+
+ For TCP sockets, a (host, port) tuple. Host values may be any IPv4
+ or IPv6 address, or any valid hostname. The string 'localhost' is a
+ synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6).
+ The string '0.0.0.0' is a special IPv4 entry meaning "any active
+ interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for
+ IPv6. The empty string or None are not allowed.
+
+ For UNIX sockets, supply the filename as a string.""")
+
+ def start(self):
+ """Run the server forever."""
+ # We don't have to trap KeyboardInterrupt or SystemExit here,
+ # because cherrpy.server already does so, calling self.stop() for us.
+ # If you're using this server with another framework, you should
+ # trap those exceptions in whatever code block calls start().
+ self._interrupt = None
+
+ # Select the appropriate socket
+ if isinstance(self.bind_addr, basestring):
+ # AF_UNIX socket
+
+ # So we can reuse the socket...
+ try: os.unlink(self.bind_addr)
+ except: pass
+
+ # So everyone can access the socket...
+ try: os.chmod(self.bind_addr, 0777)
+ except: pass
+
+ info = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, "", self.bind_addr)]
+ else:
+ # AF_INET or AF_INET6 socket
+ # Get the correct address family for our host (allows IPv6 addresses)
+ host, port = self.bind_addr
+ try:
+ info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
+ socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
+ except socket.gaierror:
+ # Probably a DNS issue. Assume IPv4.
+ info = [(socket.AF_INET, socket.SOCK_STREAM, 0, "", self.bind_addr)]
+
+ self.socket = None
+ msg = "No socket could be created"
+ for res in info:
+ af, socktype, proto, canonname, sa = res
+ try:
+ self.bind(af, socktype, proto)
+ except socket.error, msg:
+ if self.socket:
+ self.socket.close()
+ self.socket = None
+ continue
+ break
+ if not self.socket:
+ raise socket.error, msg
+
+ # Timeout so KeyboardInterrupt can be caught on Win32
+ self.socket.settimeout(1)
+ self.socket.listen(self.request_queue_size)
+
+ # Create worker threads
+ self.requests.start()
+
+ self.ready = True
+ while self.ready:
+ self.tick()
+ if self.interrupt:
+ while self.interrupt is True:
+ # Wait for self.stop() to complete. See _set_interrupt.
+ time.sleep(0.1)
+ if self.interrupt:
+ raise self.interrupt
+
+ def bind(self, family, type, proto=0):
+ """Create (or recreate) the actual socket object."""
+ self.socket = socket.socket(family, type, proto)
+ prevent_socket_inheritance(self.socket)
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ if self.nodelay:
+ self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ if self.ssl_certificate and self.ssl_private_key:
+ if SSL is None:
+ raise ImportError("You must install pyOpenSSL to use HTTPS.")
+
+ # See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473
+ ctx = SSL.Context(SSL.SSLv23_METHOD)
+ ctx.use_privatekey_file(self.ssl_private_key)
+ ctx.use_certificate_file(self.ssl_certificate)
+ self.socket = SSLConnection(ctx, self.socket)
+ self.populate_ssl_environ()
+
+ # If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
+ # activate dual-stack. See http://www.cherrypy.org/ticket/871.
+ if (not isinstance(self.bind_addr, basestring)
+ and self.bind_addr[0] == '::' and family == socket.AF_INET6):
+ try:
+ self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
+ except (AttributeError, socket.error):
+ # Apparently, the socket option is not available in
+ # this machine's TCP stack
+ pass
+
+ self.socket.bind(self.bind_addr)
+
+ def tick(self):
+ """Accept a new connection and put it on the Queue."""
+ try:
+ s, addr = self.socket.accept()
+ prevent_socket_inheritance(s)
+ if not self.ready:
+ return
+ if hasattr(s, 'settimeout'):
+ s.settimeout(self.timeout)
+
+ environ = self.environ.copy()
+ # SERVER_SOFTWARE is common for IIS. It's also helpful for
+ # us to pass a default value for the "Server" response header.
+ if environ.get("SERVER_SOFTWARE") is None:
+ environ["SERVER_SOFTWARE"] = "%s WSGI Server" % self.version
+ # set a non-standard environ entry so the WSGI app can know what
+ # the *real* server protocol is (and what features to support).
+ # See http://www.faqs.org/rfcs/rfc2145.html.
+ environ["ACTUAL_SERVER_PROTOCOL"] = self.protocol
+ environ["SERVER_NAME"] = self.server_name
+
+ if isinstance(self.bind_addr, basestring):
+ # AF_UNIX. This isn't really allowed by WSGI, which doesn't
+ # address unix domain sockets. But it's better than nothing.
+ environ["SERVER_PORT"] = ""
+ else:
+ environ["SERVER_PORT"] = str(self.bind_addr[1])
+ # optional values
+ # Until we do DNS lookups, omit REMOTE_HOST
+ environ["REMOTE_ADDR"] = addr[0]
+ environ["REMOTE_PORT"] = str(addr[1])
+
+ conn = self.ConnectionClass(s, self.wsgi_app, environ)
+ self.requests.put(conn)
+ except socket.timeout:
+ # The only reason for the timeout in start() is so we can
+ # notice keyboard interrupts on Win32, which don't interrupt
+ # accept() by default
+ return
+ except socket.error, x:
+ if x.args[0] in socket_error_eintr:
+ # I *think* this is right. EINTR should occur when a signal
+ # is received during the accept() call; all docs say retry
+ # the call, and I *think* I'm reading it right that Python
+ # will then go ahead and poll for and handle the signal
+ # elsewhere. See http://www.cherrypy.org/ticket/707.
+ return
+ if x.args[0] in socket_errors_nonblocking:
+ # Just try again. See http://www.cherrypy.org/ticket/479.
+ return
+ if x.args[0] in socket_errors_to_ignore:
+ # Our socket was closed.
+ # See http://www.cherrypy.org/ticket/686.
+ return
+ raise
+
+ def _get_interrupt(self):
+ return self._interrupt
+ def _set_interrupt(self, interrupt):
+ self._interrupt = True
+ self.stop()
+ self._interrupt = interrupt
+ interrupt = property(_get_interrupt, _set_interrupt,
+ doc="Set this to an Exception instance to "
+ "interrupt the server.")
+
+ def stop(self):
+ """Gracefully shutdown a server that is serving forever."""
+ self.ready = False
+
+ sock = getattr(self, "socket", None)
+ if sock:
+ if not isinstance(self.bind_addr, basestring):
+ # Touch our own socket to make accept() return immediately.
+ try:
+ host, port = sock.getsockname()[:2]
+ except socket.error, x:
+ if x.args[0] not in socket_errors_to_ignore:
+ raise
+ else:
+ # Note that we're explicitly NOT using AI_PASSIVE,
+ # here, because we want an actual IP to touch.
+ # localhost won't work if we've bound to a public IP,
+ # but it will if we bound to '0.0.0.0' (INADDR_ANY).
+ for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
+ socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ s = None
+ try:
+ s = socket.socket(af, socktype, proto)
+ # See http://groups.google.com/group/cherrypy-users/
+ # browse_frm/thread/bbfe5eb39c904fe0
+ s.settimeout(1.0)
+ s.connect((host, port))
+ s.close()
+ except socket.error:
+ if s:
+ s.close()
+ if hasattr(sock, "close"):
+ sock.close()
+ self.socket = None
+
+ self.requests.stop(self.shutdown_timeout)
+
+ def populate_ssl_environ(self):
+ """Create WSGI environ entries to be merged into each request."""
+ cert = open(self.ssl_certificate, 'rb').read()
+ cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
+ ssl_environ = {
+ "wsgi.url_scheme": "https",
+ "HTTPS": "on",
+ # pyOpenSSL doesn't provide access to any of these AFAICT
+## 'SSL_PROTOCOL': 'SSLv2',
+## SSL_CIPHER string The cipher specification name
+## SSL_VERSION_INTERFACE string The mod_ssl program version
+## SSL_VERSION_LIBRARY string The OpenSSL program version
+ }
+
+ # Server certificate attributes
+ ssl_environ.update({
+ 'SSL_SERVER_M_VERSION': cert.get_version(),
+ 'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
+## 'SSL_SERVER_V_START': Validity of server's certificate (start time),
+## 'SSL_SERVER_V_END': Validity of server's certificate (end time),
+ })
+
+ for prefix, dn in [("I", cert.get_issuer()),
+ ("S", cert.get_subject())]:
+ # X509Name objects don't seem to have a way to get the
+ # complete DN string. Use str() and slice it instead,
+ # because str(dn) == "<X509Name object '/C=US/ST=...'>"
+ dnstr = str(dn)[18:-2]
+
+ wsgikey = 'SSL_SERVER_%s_DN' % prefix
+ ssl_environ[wsgikey] = dnstr
+
+ # The DN should be of the form: /k1=v1/k2=v2, but we must allow
+ # for any value to contain slashes itself (in a URL).
+ while dnstr:
+ pos = dnstr.rfind("=")
+ dnstr, value = dnstr[:pos], dnstr[pos + 1:]
+ pos = dnstr.rfind("/")
+ dnstr, key = dnstr[:pos], dnstr[pos + 1:]
+ if key and value:
+ wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
+ ssl_environ[wsgikey] = value
+
+ self.environ.update(ssl_environ)
+