desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'File uploads parsed from `multipart/form-data` encoded POST or PUT request body. The values are instances of :class:`FileUpload`.'
@DictProperty('environ', 'bottle.request.files', read_only=True) def files(self):
files = FormsDict() for (name, item) in self.POST.allitems(): if isinstance(item, FileUpload): files[name] = item return files
'If the ``Content-Type`` header is ``application/json`` or ``application/json-rpc``, this property holds the parsed content of the request body. Only requests smaller than :attr:`MEMFILE_MAX` are processed to avoid memory exhaustion. Invalid JSON raises a 400 error response.'
@DictProperty('environ', 'bottle.request.json', read_only=True) def json(self):
ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0] if (ctype in ('application/json', 'application/json-rpc')): b = self._get_body_string() if (not b): return None try: return json_loads(b) except (ValueError, TypeError): raise HTTPError(400, 'Invalid JSON') return None
'read body until content-length or MEMFILE_MAX into a string. Raise HTTPError(413) on requests that are to large.'
def _get_body_string(self):
clen = self.content_length if (clen > self.MEMFILE_MAX): raise HTTPError(413, 'Request entity too large') if (clen < 0): clen = (self.MEMFILE_MAX + 1) data = self.body.read(clen) if (len(data) > self.MEMFILE_MAX): raise HTTPError(413, 'Request entity too large') return data
'The HTTP request body as a seek-able file-like object. Depending on :attr:`MEMFILE_MAX`, this is either a temporary file or a :class:`io.BytesIO` instance. Accessing this property for the first time reads and replaces the ``wsgi.input`` environ variable. Subsequent accesses just do a `seek(0)` on the file object.'
@property def body(self):
self._body.seek(0) return self._body
'True if Chunked transfer encoding was.'
@property def chunked(self):
return ('chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower())
'The values of :attr:`forms` and :attr:`files` combined into a single :class:`FormsDict`. Values are either strings (form values) or instances of :class:`cgi.FieldStorage` (file uploads).'
@DictProperty('environ', 'bottle.request.post', read_only=True) def POST(self):
post = FormsDict() if (not self.content_type.startswith('multipart/')): pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1')) for (key, value) in pairs: post[key] = value return post safe_env = {'QUERY_STRING': ''} for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'): if (key in self.environ): safe_env[key] = self.environ[key] args = dict(fp=self.body, environ=safe_env, keep_blank_values=True) if py3k: args['encoding'] = 'utf8' data = cgi.FieldStorage(**args) self['_cgi.FieldStorage'] = data data = (data.list or []) for item in data: if item.filename: post[item.name] = FileUpload(item.file, item.name, item.filename, item.headers) else: post[item.name] = item.value return post
'The full request URI including hostname and scheme. If your app lives behind a reverse proxy or load balancer and you get confusing results, make sure that the ``X-Forwarded-Host`` header is set correctly.'
@property def url(self):
return self.urlparts.geturl()
'The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. The tuple contains (scheme, host, path, query_string and fragment), but the fragment is always empty because it is not visible to the server.'
@DictProperty('environ', 'bottle.request.urlparts', read_only=True) def urlparts(self):
env = self.environ http = (env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http')) host = (env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')) if (not host): host = env.get('SERVER_NAME', '127.0.0.1') port = env.get('SERVER_PORT') if (port and (port != ('80' if (http == 'http') else '443'))): host += (':' + port) path = urlquote(self.fullpath) return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
'Request path including :attr:`script_name` (if present).'
@property def fullpath(self):
return urljoin(self.script_name, self.path.lstrip('/'))
'The raw :attr:`query` part of the URL (everything in between ``?`` and ``#``) as a string.'
@property def query_string(self):
return self.environ.get('QUERY_STRING', '')
'The initial portion of the URL\'s `path` that was removed by a higher level (server or routing middleware) before the application was called. This script path is returned with leading and tailing slashes.'
@property def script_name(self):
script_name = self.environ.get('SCRIPT_NAME', '').strip('/') return ((('/' + script_name) + '/') if script_name else '/')
'Shift path segments from :attr:`path` to :attr:`script_name` and vice versa. :param shift: The number of path segments to shift. May be negative to change the shift direction. (default: 1)'
def path_shift(self, shift=1):
(script, path) = path_shift(self.environ.get('SCRIPT_NAME', '/'), self.path, shift) (self['SCRIPT_NAME'], self['PATH_INFO']) = (script, path)
'The request body length as an integer. The client is responsible to set this header. Otherwise, the real length of the body is unknown and -1 is returned. In this case, :attr:`body` will be empty.'
@property def content_length(self):
return int((self.environ.get('CONTENT_LENGTH') or (-1)))
'The Content-Type header as a lowercase-string (default: empty).'
@property def content_type(self):
return self.environ.get('CONTENT_TYPE', '').lower()
'True if the request was triggered by a XMLHttpRequest. This only works with JavaScript libraries that support the `X-Requested-With` header (most of the popular libraries do).'
@property def is_xhr(self):
requested_with = self.environ.get('HTTP_X_REQUESTED_WITH', '') return (requested_with.lower() == 'xmlhttprequest')
'Alias for :attr:`is_xhr`. "Ajax" is not the right term.'
@property def is_ajax(self):
return self.is_xhr
'HTTP authentication data as a (user, password) tuple. This implementation currently supports basic (not digest) authentication only. If the authentication happened at a higher level (e.g. in the front web-server or a middleware), the password field is None, but the user field is looked up from the ``REMOTE_USER`` environ variable. On any errors, None is returned.'
@property def auth(self):
basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION', '')) if basic: return basic ruser = self.environ.get('REMOTE_USER') if ruser: return (ruser, None) return None
'A list of all IPs that were involved in this request, starting with the client IP and followed by zero or more proxies. This does only work if all proxies support the ```X-Forwarded-For`` header. Note that this information can be forged by malicious clients.'
@property def remote_route(self):
proxy = self.environ.get('HTTP_X_FORWARDED_FOR') if proxy: return [ip.strip() for ip in proxy.split(',')] remote = self.environ.get('REMOTE_ADDR') return ([remote] if remote else [])
'The client IP as a string. Note that this information can be forged by malicious clients.'
@property def remote_addr(self):
route = self.remote_route return (route[0] if route else None)
'Return a new :class:`Request` with a shallow :attr:`environ` copy.'
def copy(self):
return Request(self.environ.copy())
'Change an environ value and clear all caches that depend on it.'
def __setitem__(self, key, value):
if self.environ.get('bottle.request.readonly'): raise KeyError('The environ dictionary is read-only.') self.environ[key] = value todelete = () if (key == 'wsgi.input'): todelete = ('body', 'forms', 'files', 'params', 'post', 'json') elif (key == 'QUERY_STRING'): todelete = ('query', 'params') elif key.startswith('HTTP_'): todelete = ('headers', 'cookies') for key in todelete: self.environ.pop(('bottle.request.' + key), None)
'Search in self.environ for additional user defined attributes.'
def __getattr__(self, name):
try: var = self.environ[('bottle.request.ext.%s' % name)] return (var.__get__(self) if hasattr(var, '__get__') else var) except KeyError: raise AttributeError(('Attribute %r not defined.' % name))
'Returns a copy of self.'
def copy(self, cls=None):
cls = (cls or BaseResponse) assert issubclass(cls, BaseResponse) copy = cls() copy.status = self.status copy._headers = dict(((k, v[:]) for (k, v) in self._headers.items())) if self._cookies: copy._cookies = SimpleCookie() copy._cookies.load(self._cookies.output(header='')) return copy
'The HTTP status line as a string (e.g. ``404 Not Found``).'
@property def status_line(self):
return self._status_line
'The HTTP status code as an integer (e.g. 404).'
@property def status_code(self):
return self._status_code
'An instance of :class:`HeaderDict`, a case-insensitive dict-like view on the response headers.'
@property def headers(self):
hdict = HeaderDict() hdict.dict = self._headers return hdict
'Return the value of a previously defined header. If there is no header with that name, return a default value.'
def get_header(self, name, default=None):
return self._headers.get(_hkey(name), [default])[(-1)]
'Create a new response header, replacing any previously defined headers with the same name.'
def set_header(self, name, value):
self._headers[_hkey(name)] = [_hval(value)]
'Add an additional response header, not removing duplicates.'
def add_header(self, name, value):
self._headers.setdefault(_hkey(name), []).append(_hval(value))
'Yield (header, value) tuples, skipping headers that are not allowed with the current response status code.'
def iter_headers(self):
return self.headerlist
'WSGI conform list of (header, value) tuples.'
@property def headerlist(self):
out = [] headers = list(self._headers.items()) if ('Content-Type' not in self._headers): headers.append(('Content-Type', [self.default_content_type])) if (self._status_code in self.bad_headers): bad_headers = self.bad_headers[self._status_code] headers = [h for h in headers if (h[0] not in bad_headers)] out += [(name, val) for (name, vals) in headers for val in vals] if self._cookies: for c in self._cookies.values(): out.append(('Set-Cookie', _hval(c.OutputString()))) if py3k: out = [(k, v.encode('utf8').decode('latin1')) for (k, v) in out] return out
'Return the charset specified in the content-type header (default: utf8).'
@property def charset(self, default='UTF-8'):
if ('charset=' in self.content_type): return self.content_type.split('charset=')[(-1)].split(';')[0].strip() return default
'Create a new cookie or replace an old one. If the `secret` parameter is set, create a `Signed Cookie` (described below). :param name: the name of the cookie. :param value: the value of the cookie. :param secret: a signature key required for signed cookies. Additionally, this method accepts all RFC 2109 attributes that are supported by :class:`cookie.Morsel`, including: :param max_age: maximum age in seconds. (default: None) :param expires: a datetime object or UNIX timestamp. (default: None) :param domain: the domain that is allowed to read the cookie. (default: current domain) :param path: limits the cookie to a given path (default: current path) :param secure: limit the cookie to HTTPS connections (default: off). :param httponly: prevents client-side javascript to read this cookie (default: off, requires Python 2.6 or newer). :param same_site: disables third-party use for a cookie. Allowed attributes: `lax` and `strict`. In strict mode the cookie will never be sent. In lax mode the cookie is only sent with a top-level GET request. If neither `expires` nor `max_age` is set (default), the cookie will expire at the end of the browser session (as soon as the browser window is closed). Signed cookies may store any pickle-able object and are cryptographically signed to prevent manipulation. Keep in mind that cookies are limited to 4kb in most browsers. Warning: Pickle is a potentially dangerous format. If an attacker gains access to the secret key, he could forge cookies that execute code on server side if unpickeld. Using pickle is discouraged and support for it will be removed in later versions of bottle. Warning: Signed cookies are not encrypted (the client can still see the content) and not copy-protected (the client can restore an old cookie). The main intention is to make pickling and unpickling save, not to store secret information at client side.'
def set_cookie(self, name, value, secret=None, digestmod=hashlib.sha256, **options):
if (not self._cookies): self._cookies = SimpleCookie() Morsel._reserved['same-site'] = 'SameSite' if secret: if (not isinstance(value, basestring)): depr(0, 13, 'Pickling of arbitrary objects into cookies is deprecated.', 'Only store strings in cookies. JSON strings are fine, too.') encoded = base64.b64encode(pickle.dumps([name, value], (-1))) sig = base64.b64encode(hmac.new(tob(secret), encoded, digestmod=digestmod).digest()) value = touni((((tob('!') + sig) + tob('?')) + encoded)) elif (not isinstance(value, basestring)): raise TypeError('Secret key required for non-string cookies.') if ((len(name) + len(value)) > 3800): raise ValueError('Content does not fit into a cookie.') self._cookies[name] = value for (key, value) in options.items(): if (key == 'max_age'): if isinstance(value, timedelta): value = (value.seconds + ((value.days * 24) * 3600)) if (key == 'expires'): if isinstance(value, (datedate, datetime)): value = value.timetuple() elif isinstance(value, (int, float)): value = time.gmtime(value) value = time.strftime('%a, %d %b %Y %H:%M:%S GMT', value) if ((key == 'same_site') and (value.lower() not in ('lax', 'strict'))): raise CookieError(('Invalid attribute %r' % (key,))) if ((key in ('secure', 'httponly')) and (not value)): continue self._cookies[name][key.replace('_', '-')] = value
'Delete a cookie. Be sure to use the same `domain` and `path` settings as used to create the cookie.'
def delete_cookie(self, key, **kwargs):
kwargs['max_age'] = (-1) kwargs['expires'] = 0 self.set_cookie(key, '', **kwargs)
'Create a virtual package that redirects imports (see PEP 302).'
def __init__(self, name, impmask):
self.name = name self.impmask = impmask self.module = sys.modules.setdefault(name, imp.new_module(name)) self.module.__dict__.update({'__file__': __file__, '__path__': [], '__all__': [], '__loader__': self}) sys.meta_path.append(self)
'Return the most recent value for a key. :param default: The default value to be returned if the key is not present or the type conversion fails. :param index: An index for the list of available values. :param type: If defined, this callable is used to cast the value into a specific type. Exception are suppressed and result in the default value to be returned.'
def get(self, key, default=None, index=(-1), type=None):
try: val = self.dict[key][index] return (type(val) if type else val) except Exception: pass return default
'Add a new value to the list of values for this key.'
def append(self, key, value):
self.dict.setdefault(key, []).append(value)
'Replace the list of values with a single value.'
def replace(self, key, value):
self.dict[key] = [value]
'Return a (possibly empty) list of values for a key.'
def getall(self, key):
return (self.dict.get(key) or [])
'Returns a copy with all keys and values de- or recoded to match :attr:`input_encoding`. Some libraries (e.g. WTForms) want a unicode dictionary.'
def decode(self, encoding=None):
copy = FormsDict() enc = copy.input_encoding = (encoding or self.input_encoding) copy.recode_unicode = False for (key, value) in self.allitems(): copy.append(self._fix(key, enc), self._fix(value, enc)) return copy
'Return the value as a unicode string, or the default.'
def getunicode(self, name, default=None, encoding=None):
try: return self._fix(self[name], encoding) except (UnicodeError, KeyError): return default
'Translate header field name to CGI/WSGI environ key.'
def _ekey(self, key):
key = key.replace('-', '_').upper() if (key in self.cgikeys): return key return ('HTTP_' + key)
'Return the header value as is (may be bytes or unicode).'
def raw(self, key, default=None):
return self.environ.get(self._ekey(key), default)
'Load values from a Python module. Example modue ``config.py``:: DEBUG = True SQLITE = { "db": ":memory:" >>> c = ConfigDict() >>> c.load_module(\'config\') {DEBUG: True, \'SQLITE.DB\': \'memory\'} >>> c.load_module("config", False) {\'DEBUG\': True, \'SQLITE\': {\'DB\': \'memory\'}} :param squash: If true (default), dictionary values are assumed to represent namespaces (see :meth:`load_dict`).'
def load_module(self, path, squash=True):
config_obj = load(path) obj = {key: getattr(config_obj, key) for key in dir(config_obj) if key.isupper()} if squash: self.load_dict(obj) else: self.update(obj) return self
'Load values from an ``*.ini`` style config file. A configuration file consists of sections, each led by a ``[section]`` header, followed by key/value entries separated by either ``=`` or ``:``. Section names and keys are case-insensitive. Leading and trailing whitespace is removed from keys and values. Values can be omitted, in which case the key/value delimiter may also be left out. Values can also span multiple lines, as long as they are indented deeper than the first line of the value. Commends are prefixed by ``#`` or ``;`` and may only appear on their own on an otherwise empty line. Both section and key names may contain dots (``.``) as namespace separators. The actual configuration parameter name is constructed by joining section name and key name together and converting to lower case. The special sections ``bottle`` and ``ROOT`` refer to the root namespace and the ``DEFAULT`` section defines default values for all other sections. With Python 3, extended string interpolation is enabled. :param filename: The path of a config file, or a list of paths. :param options: All keyword parameters are passed to the underlying :class:`python:configparser.ConfigParser` constructor call.'
def load_config(self, filename, **options):
options.setdefault('allow_no_value', True) if py3k: options.setdefault('interpolation', configparser.ExtendedInterpolation()) conf = configparser.ConfigParser(**options) conf.read(filename) for section in conf.sections(): for key in conf.options(section): value = conf.get(section, key) if (section not in ['bottle', 'ROOT']): key = ((section + '.') + key) self[key.lower()] = value return self
'Load values from a dictionary structure. Nesting can be used to represent namespaces. >>> c = ConfigDict() >>> c.load_dict({\'some\': {\'namespace\': {\'key\': \'value\'} } }) {\'some.namespace.key\': \'value\'}'
def load_dict(self, source, namespace=''):
for (key, value) in source.items(): if isinstance(key, basestring): nskey = ((namespace + '.') + key).strip('.') if isinstance(value, dict): self.load_dict(value, namespace=nskey) else: self[nskey] = value else: raise TypeError(('Key has type %r (not a string)' % type(key))) return self
'If the first parameter is a string, all keys are prefixed with this namespace. Apart from that it works just as the usual dict.update(). >>> c = ConfigDict() >>> c.update(\'some.namespace\', key=\'value\')'
def update(self, *a, **ka):
prefix = '' if (a and isinstance(a[0], basestring)): prefix = (a[0].strip('.') + '.') a = a[1:] for (key, value) in dict(*a, **ka).items(): self[(prefix + key)] = value
'Recursively set or update virtual keys. Do nothing if non-virtual value is present.'
def _set_virtual(self, key, value):
if ((key in self) and (key not in self._virtual_keys)): return self._virtual_keys.add(key) if ((key in self) and (self[key] is not value)): self._on_change(key, value) dict.__setitem__(self, key, value) for overlay in self._iter_overlays(): overlay._set_virtual(key, value)
'Recursively delete virtual entry. Do nothing if key is not virtual.'
def _delete_virtual(self, key):
if (key not in self._virtual_keys): return if (key in self): self._on_change(key, None) dict.__delitem__(self, key) self._virtual_keys.discard(key) for overlay in self._iter_overlays(): overlay._delete_virtual(key)
'Return the value of a meta field for a key.'
def meta_get(self, key, metafield, default=None):
return self._meta.get(key, {}).get(metafield, default)
'Set the meta field for a key to a new value.'
def meta_set(self, key, metafield, value):
self._meta.setdefault(key, {})[metafield] = value
'Return an iterable of meta field names defined for a key.'
def meta_list(self, key):
return self._meta.get(key, {}).keys()
'(Unstable) Shortcut for plugins to define own config parameters.'
def _define(self, key, default=_UNSET, help=_UNSET, validate=_UNSET):
if (default is not _UNSET): self.setdefault(key, default) if (help is not _UNSET): self.meta_set(key, 'help', help) if (validate is not _UNSET): self.meta_set(key, 'validate', validate)
'(Unstable) Create a new overlay that acts like a chained map: Values missing in the overlay are copied from the source map. Both maps share the same meta entries. Entries that were copied from the source are called \'virtual\'. You can not delete virtual keys, but overwrite them, which turns them into non-virtual entries. Setting keys on an overlay never affects its source, but may affect any number of child overlays. Other than collections.ChainMap or most other implementations, this approach does not resolve missing keys on demand, but instead actively copies all values from the source to the overlay and keeps track of virtual and non-virtual keys internally. This removes any lookup-overhead. Read-access is as fast as a build-in dict for both virtual and non-virtual keys. Changes are propagated recursively and depth-first. A failing on-change handler in an overlay stops the propagation of virtual values and may result in an partly updated tree. Take extra care here and make sure that on-change handlers never fail. Used by Route.config'
def _make_overlay(self):
self._overlays[:] = [ref for ref in self._overlays if (ref() is not None)] overlay = ConfigDict() overlay._meta = self._meta overlay._source = self self._overlays.append(weakref.ref(overlay)) for key in self: overlay._set_virtual(key, self[key]) return overlay
'Return the current default application.'
def __call__(self):
return self.default
'Add a new :class:`Bottle` instance to the stack'
def push(self, value=None):
if (not isinstance(value, Bottle)): value = Bottle() self.append(value) return value
'Add a new path to the list of search paths. Return False if the path does not exist. :param path: The new search path. Relative paths are turned into an absolute and normalized form. If the path looks like a file (not ending in `/`), the filename is stripped off. :param base: Path used to absolutize relative search paths. Defaults to :attr:`base` which defaults to ``os.getcwd()``. :param index: Position within the list of search paths. Defaults to last index (appends to the list). The `base` parameter makes it easy to reference files installed along with a python module or package:: res.add_path(\'./resources/\', __file__)'
def add_path(self, path, base=None, index=None, create=False):
base = os.path.abspath(os.path.dirname((base or self.base))) path = os.path.abspath(os.path.join(base, os.path.dirname(path))) path += os.sep if (path in self.path): self.path.remove(path) if (create and (not os.path.isdir(path))): os.makedirs(path) if (index is None): self.path.append(path) else: self.path.insert(index, path) self.cache.clear() return os.path.exists(path)
'Iterate over all existing files in all registered paths.'
def __iter__(self):
search = self.path[:] while search: path = search.pop() if (not os.path.isdir(path)): continue for name in os.listdir(path): full = os.path.join(path, name) if os.path.isdir(full): search.append(full) else: (yield full)
'Search for a resource and return an absolute file path, or `None`. The :attr:`path` list is searched in order. The first match is returend. Symlinks are followed. The result is cached to speed up future lookups.'
def lookup(self, name):
if ((name not in self.cache) or DEBUG): for path in self.path: fpath = os.path.join(path, name) if os.path.isfile(fpath): if (self.cachemode in ('all', 'found')): self.cache[name] = fpath return fpath if (self.cachemode == 'all'): self.cache[name] = None return self.cache[name]
'Find a resource and return a file object, or raise IOError.'
def open(self, name, mode='r', *args, **kwargs):
fname = self.lookup(name) if (not fname): raise IOError(('Resource %r not found.' % name)) return self.opener(fname, mode=mode, *args, **kwargs)
'Wrapper for file uploads.'
def __init__(self, fileobj, name, filename, headers=None):
self.file = fileobj self.name = name self.raw_filename = filename self.headers = (HeaderDict(headers) if headers else HeaderDict())
'Return the value of a header within the mulripart part.'
def get_header(self, name, default=None):
return self.headers.get(name, default)
'Name of the file on the client file system, but normalized to ensure file system compatibility. An empty filename is returned as \'empty\'. Only ASCII letters, digits, dashes, underscores and dots are allowed in the final filename. Accents are removed, if possible. Whitespace is replaced by a single dash. Leading or tailing dots or dashes are removed. The filename is limited to 255 characters.'
@cached_property def filename(self):
fname = self.raw_filename if (not isinstance(fname, unicode)): fname = fname.decode('utf8', 'ignore') fname = normalize('NFKD', fname) fname = fname.encode('ASCII', 'ignore').decode('ASCII') fname = os.path.basename(fname.replace('\\', os.path.sep)) fname = re.sub('[^a-zA-Z0-9-_.\\s]', '', fname).strip() fname = re.sub('[-\\s]+', '-', fname).strip('.-') return (fname[:255] or 'empty')
'Save file to disk or copy its content to an open file(-like) object. If *destination* is a directory, :attr:`filename` is added to the path. Existing files are not overwritten by default (IOError). :param destination: File path, directory or file(-like) object. :param overwrite: If True, replace existing files. (default: False) :param chunk_size: Bytes to read at a time. (default: 64kb)'
def save(self, destination, overwrite=False, chunk_size=(2 ** 16)):
if isinstance(destination, basestring): if os.path.isdir(destination): destination = os.path.join(destination, self.filename) if ((not overwrite) and os.path.exists(destination)): raise IOError('File exists.') with open(destination, 'wb') as fp: self._copy_file(fp, chunk_size) else: self._copy_file(destination, chunk_size)
'Create a new template. If the source parameter (str or buffer) is missing, the name argument is used to guess a template filename. Subclasses can assume that self.source and/or self.filename are set. Both are strings. The lookup, encoding and settings parameters are stored as instance variables. The lookup parameter stores a list containing directory paths. The encoding parameter should be used to decode byte strings or files. The settings parameter contains a dict for engine-specific settings.'
def __init__(self, source=None, name=None, lookup=None, encoding='utf8', **settings):
self.name = name self.source = (source.read() if hasattr(source, 'read') else source) self.filename = (source.filename if hasattr(source, 'filename') else None) self.lookup = ([os.path.abspath(x) for x in lookup] if lookup else []) self.encoding = encoding self.settings = self.settings.copy() self.settings.update(settings) if ((not self.source) and self.name): self.filename = self.search(self.name, self.lookup) if (not self.filename): raise TemplateError(('Template %s not found.' % repr(name))) if ((not self.source) and (not self.filename)): raise TemplateError('No template specified.') self.prepare(**self.settings)
'Search name in all directories specified in lookup. First without, then with common extensions. Return first hit.'
@classmethod def search(cls, name, lookup=None):
if (not lookup): raise depr(0, 12, 'Empty template lookup path.', 'Configure a template lookup path.') if os.path.isabs(name): raise depr(0, 12, 'Use of absolute path for template name.', 'Refer to templates with names or paths relative to the lookup path.') for spath in lookup: spath = (os.path.abspath(spath) + os.sep) fname = os.path.abspath(os.path.join(spath, name)) if (not fname.startswith(spath)): continue if os.path.isfile(fname): return fname for ext in cls.extensions: if os.path.isfile(('%s.%s' % (fname, ext))): return ('%s.%s' % (fname, ext))
'This reads or sets the global settings stored in class.settings.'
@classmethod def global_config(cls, key, *args):
if args: cls.settings = cls.settings.copy() cls.settings[key] = args[0] else: return cls.settings[key]
'Run preparations (parsing, caching, ...). It should be possible to call this again to refresh a template or to update settings.'
def prepare(self, **options):
raise NotImplementedError
'Render the template with the specified local variables and return a single byte or unicode string. If it is a byte string, the encoding must match self.encoding. This method must be thread-safe! Local variables may be provided in dictionaries (args) or directly, as keywords (kwargs).'
def render(self, *args, **kwargs):
raise NotImplementedError
'Render the template using keyword arguments as local variables.'
def render(self, *args, **kwargs):
env = {} stdout = [] for dictarg in args: env.update(dictarg) env.update(kwargs) self.execute(stdout, env) return ''.join(stdout)
'Tokens as a space separated string (default: <% %> % {{ }})'
def get_syntax(self):
return self._syntax
'Render HTML for the package. If ``PIPELINE_ENABLED`` is ``True``, this will render the package\'s output file (using :py:meth:`render_compressed_output`). Otherwise, this will render the package\'s source files (using :py:meth:`render_compressed_sources`). Subclasses can override this method to provide custom behavior for determining what to render.'
def render_compressed(self, package, package_name, package_type):
if settings.PIPELINE_ENABLED: return self.render_compressed_output(package, package_name, package_type) else: return self.render_compressed_sources(package, package_name, package_type)
'Render HTML for using the package\'s output file. Subclasses can override this method to provide custom behavior for rendering the output file.'
def render_compressed_output(self, package, package_name, package_type):
method = getattr(self, u'render_{0}'.format(package_type)) return method(package, package.output_filename)
'Render HTML for using the package\'s list of source files. Each source file will first be collected, if ``PIPELINE_COLLECTOR_ENABLED`` is ``True``. If there are any errors compiling any of the source files, an ``SHOW_ERRORS_INLINE`` is ``True``, those errors will be shown at the top of the page. Subclasses can override this method to provide custom behavior for rendering the source files.'
def render_compressed_sources(self, package, package_name, package_type):
if settings.PIPELINE_COLLECTOR_ENABLED: default_collector.collect(self.request) packager = Packager() method = getattr(self, u'render_individual_{0}'.format(package_type)) try: paths = packager.compile(package.paths) except CompilerError as e: if settings.SHOW_ERRORS_INLINE: method = getattr(self, u'render_error_{0}'.format(package_type)) return method(package_name, e) else: raise templates = packager.pack_templates(package) return method(package, paths, templates=templates)
'Concatenate and compress JS files'
def compress_js(self, paths, templates=None, **kwargs):
js = self.concatenate(paths) if templates: js = (js + self.compile_templates(templates)) if (not settings.DISABLE_WRAPPER): js = (settings.JS_WRAPPER % js) compressor = self.js_compressor if compressor: js = getattr(compressor(verbose=self.verbose), u'compress_js')(js) return js
'Concatenate and compress CSS files'
def compress_css(self, paths, output_filename, variant=None, **kwargs):
css = self.concatenate_and_rewrite(paths, output_filename, variant) compressor = self.css_compressor if compressor: css = getattr(compressor(verbose=self.verbose), u'compress_css')(css) if (not variant): return css elif (variant == u'datauri'): return self.with_data_uri(css) else: raise CompressorError((u'"%s" is not a valid variant' % variant))
'Find out the name of a JS template'
def template_name(self, path, base):
if (not base): path = os.path.basename(path) if (path == base): base = os.path.dirname(path) name = re.sub((u'^%s[\\/\\\\]?(.*)%s$' % (re.escape(base), re.escape(settings.TEMPLATE_EXT))), u'\\1', path) return re.sub(u'[\\/\\\\]', settings.TEMPLATE_SEPARATOR, name)
'Concatenate together files and rewrite urls'
def concatenate_and_rewrite(self, paths, output_filename, variant=None):
stylesheets = [] for path in paths: def reconstruct(match): quote = (match.group(1) or u'') asset_path = match.group(2) if NON_REWRITABLE_URL.match(asset_path): return (u'url(%s%s%s)' % (quote, asset_path, quote)) asset_url = self.construct_asset_path(asset_path, path, output_filename, variant) return (u'url(%s)' % asset_url) content = self.read_text(path) content = re.sub(URL_DETECTOR, reconstruct, content) stylesheets.append(content) return u'\n'.join(stylesheets)
'Concatenate together a list of files'
def concatenate(self, paths):
return u'\n;'.join([self.read_text(path) for path in paths])
'Return a rewritten asset URL for a stylesheet'
def construct_asset_path(self, asset_path, css_path, output_filename, variant=None):
public_path = self.absolute_path(asset_path, os.path.dirname(css_path).replace(u'\\', u'/')) if self.embeddable(public_path, variant): return (u'__EMBED__%s' % public_path) if (not posixpath.isabs(asset_path)): asset_path = self.relative_path(public_path, output_filename) return asset_path
'Is the asset embeddable ?'
def embeddable(self, path, variant):
(name, ext) = os.path.splitext(path) font = (ext in FONT_EXTS) if (not variant): return False if (not (re.search(settings.EMBED_PATH, path.replace(u'\\', u'/')) and self.storage.exists(path))): return False if (ext not in EMBED_EXTS): return False if (not (font or (len(self.encoded_content(path)) < settings.EMBED_MAX_IMAGE_SIZE))): return False return True
'Return the base64 encoded contents'
def encoded_content(self, path):
if (path in self.__class__.asset_contents): return self.__class__.asset_contents[path] data = self.read_bytes(path) self.__class__.asset_contents[path] = force_text(base64.b64encode(data)) return self.__class__.asset_contents[path]
'Get mime-type from filename'
def mime_type(self, path):
(name, ext) = os.path.splitext(path) return MIME_TYPES[ext]
'Return the absolute public path for an asset, given the path of the stylesheet that contains it.'
def absolute_path(self, path, start):
if posixpath.isabs(path): path = posixpath.join(staticfiles_storage.location, path) else: path = posixpath.join(start, path) return posixpath.normpath(path)
'Rewrite paths relative to the output stylesheet path'
def relative_path(self, absolute_path, output_filename):
absolute_path = posixpath.join(settings.PIPELINE_ROOT, absolute_path) output_path = posixpath.join(settings.PIPELINE_ROOT, posixpath.dirname(output_filename)) return relpath(absolute_path, output_path)
'Read file content in binary mode'
def read_bytes(self, path):
file = staticfiles_storage.open(path) content = file.read() file.close() return content
'Looks for files in PIPELINE.STYLESHEETS and PIPELINE.JAVASCRIPT'
def find(self, path, all=False):
matches = [] for elem in chain(settings.STYLESHEETS.values(), settings.JAVASCRIPT.values()): if (normpath(elem['output_filename']) == normpath(path)): match = safe_join(settings.PIPELINE_ROOT, path) if (not all): return match matches.append(match) return matches
'Work out the uncached name of the file and look that up instead'
def find(self, path, all=False):
try: (start, _, extn) = path.rsplit('.', 2) except ValueError: return [] path = '.'.join((start, extn)) return (find(path, all=all) or [])
'Initialize the property. Args: get_media_files_func (callable): The function to call to generate the media files. media_cls (type): The Media class owning the property. extra_files (object): Files listed in the original ``css`` or ``js`` attribute on the Media class.'
def __init__(self, get_media_files_func, media_cls, extra_files):
self._get_media_files_func = get_media_files_func self._media_cls = media_cls self._extra_files = extra_files
'The media files represented by the property.'
@cached_property def _media_files(self):
return self._get_media_files_func(self._media_cls, self._extra_files)
'Return the media files when accessed as an attribute. This is called when accessing the attribute directly through the Media class (for example, ``Media.css``). It returns the media files directly. Args: *args (tuple, unused): Unused positional arguments. **kwargs (dict, unused): Unused keyword arguments. Returns: object: The list or dictionary containing the media files definition.'
def __get__(self, *args, **kwargs):
return self._media_files
'Return an attribute on the media files definition. This is called when accessing an attribute that doesn\'t otherwise exist in the property\'s dictionary. The call is forwarded onto the media files definition. Args: attr_name (unicode): The attribute name. Returns: object: The attribute value. Raises: AttributeError: An attribute with this name could not be found.'
def __getattr__(self, attr_name):
return getattr(self._media_files, attr_name)
'Iterate through the media files definition. This is called when attempting to iterate over this property. It iterates over the media files definition instead. Yields: object: Each entry in the media files definition.'
def __iter__(self):
return iter(self._media_files)
'Construct the class. Args: name (bytes): The name of the class. bases (tuple): The base classes for the class. attrs (dict): The attributes going into the class. Returns: type: The new class.'
def __new__(cls, name, bases, attrs):
new_class = super(PipelineFormMediaMetaClass, cls).__new__(cls, name, bases, attrs) if (u'css_packages' in attrs): new_class.css = PipelineFormMediaProperty(cls._get_css_files, new_class, (attrs.get(u'css') or {})) if (u'js_packages' in attrs): new_class.js = PipelineFormMediaProperty(cls._get_js_files, new_class, (attrs.get(u'js') or [])) return new_class
'Return all CSS files from the Media class. Args: extra_files (dict): The contents of the Media class\'s original :py:attr:`css` attribute, if one was provided. Returns: dict: The CSS media types and files to return for the :py:attr:`css` attribute.'
def _get_css_files(cls, extra_files):
packager = Packager() css_packages = getattr(cls, u'css_packages', {}) return dict(((media_target, cls._get_media_files(packager=packager, media_packages=media_packages, media_type=u'css', extra_files=extra_files.get(media_target, []))) for (media_target, media_packages) in six.iteritems(css_packages)))
'Return all JavaScript files from the Media class. Args: extra_files (list): The contents of the Media class\'s original :py:attr:`js` attribute, if one was provided. Returns: list: The JavaScript files to return for the :py:attr:`js` attribute.'
def _get_js_files(cls, extra_files):
return cls._get_media_files(packager=Packager(), media_packages=getattr(cls, u'js_packages', {}), media_type=u'js', extra_files=extra_files)
'Return source or output media files for a list of packages. This will go through the media files belonging to the provided list of packages referenced in a Media class and return the output files (if Pipeline is enabled) or the source files (if not enabled). Args: packager (pipeline.packager.Packager): The packager responsible for media compilation for this type of package. media_packages (list of unicode): The list of media packages referenced in Media to compile or return. extra_files (list of unicode): The list of extra files to include in the result. This would be the list stored in the Media class\'s original :py:attr:`css` or :py:attr:`js` attributes. Returns: list: The list of media files for the given packages.'
def _get_media_files(cls, packager, media_packages, media_type, extra_files):
source_files = list(extra_files) if ((not settings.PIPELINE_ENABLED) and settings.PIPELINE_COLLECTOR_ENABLED): default_collector.collect() for media_package in media_packages: package = packager.package_for(media_type, media_package) if settings.PIPELINE_ENABLED: source_files.append(staticfiles_storage.url(package.output_filename)) else: source_files += packager.compile(package.paths) return source_files
'Execute a command at cwd, saving its normal output at stdout_captured. Errors, defined as nonzero return code or a failure to start execution, will raise a CompilerError exception with a description of the cause. They do not write output. This is file-system safe (any valid file names are allowed, even with spaces or crazy characters) and OS agnostic (existing and future OSes that Python supports should already work). The only thing weird here is that any incoming command arg item may itself be a tuple. This allows compiler implementations to look clean while supporting historical string config settings and maintaining backwards compatibility. Thus, we flatten one layer deep. ((env, foocomp), infile, (-arg,)) -> (env, foocomp, infile, -arg)'
def execute_command(self, command, cwd=None, stdout_captured=None):
argument_list = [] for flattening_arg in command: if isinstance(flattening_arg, string_types): argument_list.append(flattening_arg) else: argument_list.extend(flattening_arg) argument_list = filter(None, argument_list) stdout = None try: temp_file_container = (cwd or os.path.dirname((stdout_captured or u'')) or os.getcwd()) with NamedTemporaryFile(delete=False, dir=temp_file_container) as stdout: compiling = subprocess.Popen(argument_list, cwd=cwd, stdout=stdout, stderr=subprocess.PIPE) (_, stderr) = compiling.communicate() set_std_streams_blocking() if (compiling.returncode != 0): stdout_captured = None raise CompilerError(u'{0!r} exit code {1}\n{2}'.format(argument_list, compiling.returncode, stderr), command=argument_list, error_output=stderr) if self.verbose: with open(stdout.name) as out: print out.read() print stderr except OSError as e: stdout_captured = None raise CompilerError(e, command=argument_list, error_output=text_type(e)) finally: if stdout: if stdout_captured: shutil.move(stdout.name, os.path.join((cwd or os.curdir), stdout_captured)) else: os.remove(stdout.name)
'Test post_process with a storage that doesn\'t implement the path method.'
@override_settings(STATICFILES_STORAGE=u'tests.tests.test_storage.PipelineNoPathStorage') @pipeline_settings(JS_COMPRESSOR=None, CSS_COMPRESSOR=None, COMPILERS=[u'tests.tests.test_storage.DummyCSSCompiler']) def test_post_process_no_path(self):
staticfiles_storage._setup() try: call_command(u'collectstatic', verbosity=0, interactive=False) except NotImplementedError: self.fail(u'Received an error running collectstatic')
'Write sample content to the test static file.'
def _write_content(self, content=u'abc123'):
with self.storage.open(self.filename, u'w') as f: f.write(content)