desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Return the bytecode as string.'
| def bytecode_to_string(self):
| out = StringIO()
self.write_bytecode(out)
return out.getvalue()
|
'Subclasses have to override this method to load bytecode into a
bucket. If they are not able to find code in the cache for the
bucket, it must not do anything.'
| def load_bytecode(self, bucket):
| raise NotImplementedError()
|
'Subclasses have to override this method to write the bytecode
from a bucket back to the cache. If it unable to do so it must not
fail silently but raise an exception.'
| def dump_bytecode(self, bucket):
| raise NotImplementedError()
|
'Returns the unique hash key for this template name.'
| def get_cache_key(self, name, filename=None):
| hash = sha1(name.encode('utf-8'))
if (filename is not None):
if isinstance(filename, unicode):
filename = filename.encode('utf-8')
hash.update(('|' + filename))
return hash.hexdigest()
|
'Returns a checksum for the source.'
| def get_source_checksum(self, source):
| return sha1(source.encode('utf-8')).hexdigest()
|
'Return a cache bucket for the given template. All arguments are
mandatory but filename may be `None`.'
| def get_bucket(self, environment, name, filename, source):
| key = self.get_cache_key(name, filename)
checksum = self.get_source_checksum(source)
bucket = Bucket(environment, key, checksum)
self.load_bytecode(bucket)
return bucket
|
'Put the bucket into the cache.'
| def set_bucket(self, bucket):
| self.dump_bytecode(bucket)
|
'Get the template source, filename and reload helper for a template.
It\'s passed the environment and template name and has to return a
tuple in the form ``(source, filename, uptodate)`` or raise a
`TemplateNotFound` error if it can\'t locate the template.
The source part of the returned tuple must be the source of the
template as unicode string or a ASCII bytestring. The filename should
be the name of the file on the filesystem if it was loaded from there,
otherwise `None`. The filename is used by python for the tracebacks
if no loader extension is used.
The last item in the tuple is the `uptodate` function. If auto
reloading is enabled it\'s always called to check if the template
changed. No arguments are passed so the function must store the
old state somewhere (for example in a closure). If it returns `False`
the template will be reloaded.'
| def get_source(self, environment, template):
| raise TemplateNotFound(template)
|
'Loads a template. This method looks up the template in the cache
or loads one by calling :meth:`get_source`. Subclasses should not
override this method as loaders working on collections of other
loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
will not call this method but `get_source` directly.'
| @internalcode
def load(self, environment, name, globals=None):
| code = None
if (globals is None):
globals = {}
(source, filename, uptodate) = self.get_source(environment, name)
bcc = environment.bytecode_cache
if (bcc is not None):
bucket = bcc.get_bucket(environment, name, filename, source)
code = bucket.code
if (code is None):
code = environment.compile(source, name, filename)
if ((bcc is not None) and (bucket.code is None)):
bucket.code = code
bcc.set_bucket(bucket)
return environment.template_class.from_code(environment, code, globals, uptodate)
|
'Add the items to the instance of the environment if they do not exist
yet. This is used by :ref:`extensions <writing-extensions>` to register
callbacks and configuration values without breaking inheritance.'
| def extend(self, **attributes):
| for (key, value) in attributes.iteritems():
if (not hasattr(self, key)):
setattr(self, key, value)
|
'Create a new overlay environment that shares all the data with the
current environment except of cache and the overriden attributes.
Extensions cannot be removed for a overlayed environment. A overlayed
environment automatically gets all the extensions of the environment it
is linked to plus optional extra extensions.
Creating overlays should happen after the initial environment was set
up completely. Not all attributes are truly linked, some are just
copied over so modifications on the original environment may not shine
through.'
| def overlay(self, block_start_string=missing, block_end_string=missing, variable_start_string=missing, variable_end_string=missing, comment_start_string=missing, comment_end_string=missing, line_statement_prefix=missing, line_comment_prefix=missing, trim_blocks=missing, extensions=missing, optimized=missing, undefined=missing, finalize=missing, autoescape=missing, loader=missing, cache_size=missing, auto_reload=missing, bytecode_cache=missing):
| args = dict(locals())
del args['self'], args['cache_size'], args['extensions']
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.overlayed = True
rv.linked_to = self
for (key, value) in args.iteritems():
if (value is not missing):
setattr(rv, key, value)
if (cache_size is not missing):
rv.cache = create_cache(cache_size)
else:
rv.cache = copy_cache(self.cache)
rv.extensions = {}
for (key, value) in self.extensions.iteritems():
rv.extensions[key] = value.bind(rv)
if (extensions is not missing):
rv.extensions.update(load_extensions(extensions))
return _environment_sanity_check(rv)
|
'Get an item or attribute of an object but prefer the item.'
| def getitem(self, obj, argument):
| try:
return obj[argument]
except (TypeError, LookupError):
if isinstance(argument, basestring):
try:
attr = str(argument)
except:
pass
else:
try:
return getattr(obj, attr)
except AttributeError:
pass
return self.undefined(obj=obj, name=argument)
|
'Get an item or attribute of an object but prefer the attribute.
Unlike :meth:`getitem` the attribute *must* be a bytestring.'
| def getattr(self, obj, attribute):
| try:
return getattr(obj, attribute)
except AttributeError:
pass
try:
return obj[attribute]
except (TypeError, LookupError, AttributeError):
return self.undefined(obj=obj, name=attribute)
|
'Parse the sourcecode and return the abstract syntax tree. This
tree of nodes is used by the compiler to convert the template into
executable source- or bytecode. This is useful for debugging or to
extract information from templates.
If you are :ref:`developing Jinja2 extensions <writing-extensions>`
this gives you a good overview of the node tree generated.'
| @internalcode
def parse(self, source, name=None, filename=None):
| try:
return self._parse(source, name, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source)
|
'Internal parsing function used by `parse` and `compile`.'
| def _parse(self, source, name, filename):
| if isinstance(filename, unicode):
filename = filename.encode('utf-8')
return Parser(self, source, name, filename).parse()
|
'Lex the given sourcecode and return a generator that yields
tokens as tuples in the form ``(lineno, token_type, value)``.
This can be useful for :ref:`extension development <writing-extensions>`
and debugging templates.
This does not perform preprocessing. If you want the preprocessing
of the extensions to be applied you have to filter source through
the :meth:`preprocess` method.'
| def lex(self, source, name=None, filename=None):
| source = unicode(source)
try:
return self.lexer.tokeniter(source, name, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source)
|
'Preprocesses the source with all extensions. This is automatically
called for all parsing and compiling methods but *not* for :meth:`lex`
because there you usually only want the actual source tokenized.'
| def preprocess(self, source, name=None, filename=None):
| return reduce((lambda s, e: e.preprocess(s, name, filename)), self.extensions.itervalues(), unicode(source))
|
'Called by the parser to do the preprocessing and filtering
for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.'
| def _tokenize(self, source, name, filename=None, state=None):
| source = self.preprocess(source, name, filename)
stream = self.lexer.tokenize(source, name, filename, state)
for ext in self.extensions.itervalues():
stream = ext.filter_stream(stream)
if (not isinstance(stream, TokenStream)):
stream = TokenStream(stream, name, filename)
return stream
|
'Compile a node or template source code. The `name` parameter is
the load name of the template after it was joined using
:meth:`join_path` if necessary, not the filename on the file system.
the `filename` parameter is the estimated filename of the template on
the file system. If the template came from a database or memory this
can be omitted.
The return value of this method is a python code object. If the `raw`
parameter is `True` the return value will be a string with python
code equivalent to the bytecode returned otherwise. This method is
mainly used internally.'
| @internalcode
def compile(self, source, name=None, filename=None, raw=False):
| source_hint = None
try:
if isinstance(source, basestring):
source_hint = source
source = self._parse(source, name, filename)
if self.optimized:
source = optimize(source, self)
source = generate(source, self, name, filename)
if raw:
return source
if (filename is None):
filename = '<template>'
elif isinstance(filename, unicode):
filename = filename.encode('utf-8')
return compile(source, filename, 'exec')
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source)
|
'A handy helper method that returns a callable that accepts keyword
arguments that appear as variables in the expression. If called it
returns the result of the expression.
This is useful if applications want to use the same rules as Jinja
in template "configuration files" or similar situations.
Example usage:
>>> env = Environment()
>>> expr = env.compile_expression(\'foo == 42\')
>>> expr(foo=23)
False
>>> expr(foo=42)
True
Per default the return value is converted to `None` if the
expression returns an undefined value. This can be changed
by setting `undefined_to_none` to `False`.
>>> env.compile_expression(\'var\')() is None
True
>>> env.compile_expression(\'var\', undefined_to_none=False)()
Undefined
**new in Jinja 2.1**'
| def compile_expression(self, source, undefined_to_none=True):
| parser = Parser(self, source, state='variable')
exc_info = None
try:
expr = parser.parse_expression()
if (not parser.stream.eos):
raise TemplateSyntaxError('chunk after expression', parser.stream.current.lineno, None, None)
except TemplateSyntaxError:
exc_info = sys.exc_info()
if (exc_info is not None):
self.handle_exception(exc_info, source_hint=source)
body = [nodes.Assign(nodes.Name('result', 'store'), expr, lineno=1)]
template = self.from_string(nodes.Template(body, lineno=1))
return TemplateExpression(template, undefined_to_none)
|
'Exception handling helper. This is used internally to either raise
rewritten exceptions or return a rendered traceback for the template.'
| def handle_exception(self, exc_info=None, rendered=False, source_hint=None):
| global _make_traceback
if (exc_info is None):
exc_info = sys.exc_info()
if (_make_traceback is None):
from jinja2.debug import make_traceback as _make_traceback
traceback = _make_traceback(exc_info, source_hint)
if (rendered and (self.exception_formatter is not None)):
return self.exception_formatter(traceback)
if (self.exception_handler is not None):
self.exception_handler(traceback)
(exc_type, exc_value, tb) = traceback.standard_exc_info
raise exc_type, exc_value, tb
|
'Join a template with the parent. By default all the lookups are
relative to the loader root so this method returns the `template`
parameter unchanged, but if the paths should be relative to the
parent template, this function can be used to calculate the real
template name.
Subclasses may override this method and implement template path
joining here.'
| def join_path(self, template, parent):
| return template
|
'Load a template from the loader. If a loader is configured this
method ask the loader for the template and returns a :class:`Template`.
If the `parent` parameter is not `None`, :meth:`join_path` is called
to get the real template name before loading.
The `globals` parameter can be used to provide template wide globals.
These variables are available in the context at render time.
If the template does not exist a :exc:`TemplateNotFound` exception is
raised.'
| @internalcode
def get_template(self, name, parent=None, globals=None):
| if (self.loader is None):
raise TypeError('no loader for this environment specified')
if (parent is not None):
name = self.join_path(name, parent)
if (self.cache is not None):
template = self.cache.get(name)
if ((template is not None) and ((not self.auto_reload) or template.is_up_to_date)):
return template
template = self.loader.load(self, name, self.make_globals(globals))
if (self.cache is not None):
self.cache[name] = template
return template
|
'Load a template from a string. This parses the source given and
returns a :class:`Template` object.'
| def from_string(self, source, globals=None, template_class=None):
| globals = self.make_globals(globals)
cls = (template_class or self.template_class)
return cls.from_code(self, self.compile(source), globals, None)
|
'Return a dict for the globals.'
| def make_globals(self, d):
| if (not d):
return self.globals
return dict(self.globals, **d)
|
'Creates a template object from compiled code and the globals. This
is used by the loaders and environment to create a template object.'
| @classmethod
def from_code(cls, environment, code, globals, uptodate=None):
| t = object.__new__(cls)
namespace = {'environment': environment, '__jinja_template__': t}
exec code in namespace
t.environment = environment
t.globals = globals
t.name = namespace['name']
t.filename = code.co_filename
t.blocks = namespace['blocks']
t.root_render_func = namespace['root']
t._module = None
t._debug_info = namespace['debug_info']
t._uptodate = uptodate
return t
|
'This method accepts the same arguments as the `dict` constructor:
A dict, a dict subclass or some keyword arguments. If no arguments
are given the context will be empty. These two calls do the same::
template.render(knights=\'that say nih\')
template.render({\'knights\': \'that say nih\'})
This will return the rendered template as unicode string.'
| def render(self, *args, **kwargs):
| vars = dict(*args, **kwargs)
try:
return concat(self.root_render_func(self.new_context(vars)))
except:
exc_info = sys.exc_info()
return self.environment.handle_exception(exc_info, True)
|
'Works exactly like :meth:`generate` but returns a
:class:`TemplateStream`.'
| def stream(self, *args, **kwargs):
| return TemplateStream(self.generate(*args, **kwargs))
|
'For very large templates it can be useful to not render the whole
template at once but evaluate each statement after another and yield
piece for piece. This method basically does exactly that and returns
a generator that yields one item after another as unicode strings.
It accepts the same arguments as :meth:`render`.'
| def generate(self, *args, **kwargs):
| vars = dict(*args, **kwargs)
try:
for event in self.root_render_func(self.new_context(vars)):
(yield event)
except:
exc_info = sys.exc_info()
else:
return
(yield self.environment.handle_exception(exc_info, True))
|
'Create a new :class:`Context` for this template. The vars
provided will be passed to the template. Per default the globals
are added to the context. If shared is set to `True` the data
is passed as it to the context without adding the globals.
`locals` can be a dict of local variables for internal usage.'
| def new_context(self, vars=None, shared=False, locals=None):
| return new_context(self.environment, self.name, self.blocks, vars, shared, self.globals, locals)
|
'This method works like the :attr:`module` attribute when called
without arguments but it will evaluate the template on every call
rather than caching it. It\'s also possible to provide
a dict which is then used as context. The arguments are the same
as for the :meth:`new_context` method.'
| def make_module(self, vars=None, shared=False, locals=None):
| return TemplateModule(self, self.new_context(vars, shared, locals))
|
'The template as module. This is used for imports in the
template runtime but is also useful if one wants to access
exported template variables from the Python layer:
>>> t = Template(\'{% macro foo() %}42{% endmacro %}23\')
>>> unicode(t.module)
u\'23\'
>>> t.module.foo()
u\'42\''
| @property
def module(self):
| if (self._module is not None):
return self._module
self._module = rv = self.make_module()
return rv
|
'Return the source line number of a line number in the
generated bytecode as they are not in sync.'
| def get_corresponding_lineno(self, lineno):
| for (template_line, code_line) in reversed(self.debug_info):
if (code_line <= lineno):
return template_line
return 1
|
'If this variable is `False` there is a newer version available.'
| @property
def is_up_to_date(self):
| if (self._uptodate is None):
return True
return self._uptodate()
|
'The debug info mapping.'
| @property
def debug_info(self):
| return [tuple(map(int, x.split('='))) for x in self._debug_info.split('&')]
|
'Dump the complete stream into a file or file-like object.
Per default unicode strings are written, if you want to encode
before writing specifiy an `encoding`.
Example usage::
Template(\'Hello {{ name }}!\').stream(name=\'foo\').dump(\'hello.html\')'
| def dump(self, fp, encoding=None, errors='strict'):
| close = False
if isinstance(fp, basestring):
fp = file(fp, 'w')
close = True
try:
if (encoding is not None):
iterable = (x.encode(encoding, errors) for x in self)
else:
iterable = self
if hasattr(fp, 'writelines'):
fp.writelines(iterable)
else:
for item in iterable:
fp.write(item)
finally:
if close:
fp.close()
|
'Disable the output buffering.'
| def disable_buffering(self):
| self._next = self._gen.next
self.buffered = False
|
'Enable buffering. Buffer `size` items before yielding them.'
| def enable_buffering(self, size=5):
| if (size <= 1):
raise ValueError('buffer size too small')
def generator(next):
buf = []
c_size = 0
push = buf.append
while 1:
try:
while (c_size < size):
c = next()
push(c)
if c:
c_size += 1
except StopIteration:
if (not c_size):
return
(yield concat(buf))
del buf[:]
c_size = 0
self.buffered = True
self._next = generator(self._gen.next).next
|
'Eliminate dead code.'
| def visit_If(self, node):
| if (node.find(nodes.Block) is not None):
return self.generic_visit(node)
try:
val = self.visit(node.test).as_const()
except nodes.Impossible:
return self.generic_visit(node)
if val:
body = node.body
else:
body = node.else_
result = []
for node in body:
result.extend(self.visit_list(node))
return result
|
'Do constant folding.'
| def fold(self, node):
| node = self.generic_visit(node)
try:
return nodes.Const.from_untrusted(node.as_const(), lineno=node.lineno, environment=self.environment)
except nodes.Impossible:
return node
|
'Chains the frames. Requires ctypes or the speedups extension.'
| def chain_frames(self):
| prev_tb = None
for tb in self.frames:
if (prev_tb is not None):
prev_tb.tb_next = tb
prev_tb = tb
prev_tb.tb_next = None
|
'Return a string with the traceback.'
| def render_as_text(self, limit=None):
| lines = traceback.format_exception(self.exc_type, self.exc_value, self.frames[0], limit=limit)
return ''.join(lines).rstrip()
|
'Return a unicode string with the traceback as rendered HTML.'
| def render_as_html(self, full=False):
| from jinja2.debugrenderer import render_traceback
return (u'%s\n\n<!--\n%s\n-->' % (render_traceback(self, full=full), self.render_as_text().decode('utf-8', 'replace')))
|
'`True` if this is a template syntax error.'
| @property
def is_template_syntax_error(self):
| return isinstance(self.exc_value, TemplateSyntaxError)
|
'Exception info tuple with a proxy around the frame objects.'
| @property
def exc_info(self):
| return (self.exc_type, self.exc_value, self.frames[0])
|
'Standard python exc_info for re-raising'
| @property
def standard_exc_info(self):
| return (self.exc_type, self.exc_value, self.frames[0].tb)
|
'Register a special name like `loop`.'
| def add_special(self, name):
| self.undeclared.discard(name)
self.declared.add(name)
|
'Check if a name is declared in this or an outer scope.'
| def is_declared(self, name, local_only=False):
| if ((name in self.declared_locally) or (name in self.declared_parameter)):
return True
if local_only:
return False
return (name in self.declared)
|
'Create a copy of the current one.'
| def copy(self):
| rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.identifiers = object.__new__(self.identifiers.__class__)
rv.identifiers.__dict__.update(self.identifiers.__dict__)
return rv
|
'Walk the node and check for identifiers. If the scope is hard (eg:
enforce on a python level) overrides from outer scopes are tracked
differently.'
| def inspect(self, nodes, hard_scope=False):
| visitor = FrameIdentifierVisitor(self.identifiers, hard_scope)
for node in nodes:
visitor.visit(node)
|
'Find all the shadowed names. extra is an iterable of variables
that may be defined with `add_special` which may occour scoped.'
| def find_shadowed(self, extra=()):
| i = self.identifiers
return (((i.declared | i.outer_undeclared) & (i.declared_locally | i.declared_parameter)) | set((x for x in extra if i.is_declared(x))))
|
'Return an inner frame.'
| def inner(self):
| return Frame(self)
|
'Return a soft frame. A soft frame may not be modified as
standalone thing as it shares the resources with the frame it
was created of, but it\'s not a rootlevel frame any longer.'
| def soft(self):
| rv = self.copy()
rv.rootlevel = False
return rv
|
'All assignments to names go through this function.'
| def visit_Name(self, node):
| if (node.ctx == 'store'):
self.identifiers.declared_locally.add(node.name)
elif (node.ctx == 'param'):
self.identifiers.declared_parameter.add(node.name)
elif ((node.ctx == 'load') and (not self.identifiers.is_declared(node.name, self.hard_scope))):
self.identifiers.undeclared.add(node.name)
|
'Visit assignments in the correct order.'
| def visit_Assign(self, node):
| self.visit(node.node)
self.visit(node.target)
|
'Visiting stops at for blocks. However the block sequence
is visited as part of the outer scope.'
| def visit_For(self, node):
| self.visit(node.iter)
|
'Fail with a `TemplateAssertionError`.'
| def fail(self, msg, lineno):
| raise TemplateAssertionError(msg, lineno, self.name, self.filename)
|
'Get a new unique identifier.'
| def temporary_identifier(self):
| self._last_identifier += 1
return ('t_%d' % self._last_identifier)
|
'Enable buffering for the frame from that point onwards.'
| def buffer(self, frame):
| frame.buffer = self.temporary_identifier()
self.writeline(('%s = []' % frame.buffer))
|
'Return the buffer contents of the frame.'
| def return_buffer_contents(self, frame):
| if self.environment.autoescape:
self.writeline(('return Markup(concat(%s))' % frame.buffer))
else:
self.writeline(('return concat(%s)' % frame.buffer))
|
'Indent by one.'
| def indent(self):
| self._indentation += 1
|
'Outdent by step.'
| def outdent(self, step=1):
| self._indentation -= step
|
'Yield or write into the frame buffer.'
| def start_write(self, frame, node=None):
| if (frame.buffer is None):
self.writeline('yield ', node)
else:
self.writeline(('%s.append(' % frame.buffer), node)
|
'End the writing process started by `start_write`.'
| def end_write(self, frame):
| if (frame.buffer is not None):
self.write(')')
|
'Simple shortcut for start_write + write + end_write.'
| def simple_write(self, s, frame, node=None):
| self.start_write(frame, node)
self.write(s)
self.end_write(frame)
|
'Visit a list of nodes as block in a frame. If the current frame
is no buffer a dummy ``if 0: yield None`` is written automatically
unless the force_generator parameter is set to False.'
| def blockvisit(self, nodes, frame):
| if (frame.buffer is None):
self.writeline('if 0: yield None')
else:
self.writeline('pass')
try:
for node in nodes:
self.visit(node, frame)
except CompilerExit:
pass
|
'Write a string into the output stream.'
| def write(self, x):
| if self._new_lines:
if (not self._first_write):
self.stream.write(('\n' * self._new_lines))
self.code_lineno += self._new_lines
if (self._write_debug_info is not None):
self.debug_info.append((self._write_debug_info, self.code_lineno))
self._write_debug_info = None
self._first_write = False
self.stream.write((' ' * self._indentation))
self._new_lines = 0
self.stream.write(x)
|
'Combination of newline and write.'
| def writeline(self, x, node=None, extra=0):
| self.newline(node, extra)
self.write(x)
|
'Add one or more newlines before the next write.'
| def newline(self, node=None, extra=0):
| self._new_lines = max(self._new_lines, (1 + extra))
if ((node is not None) and (node.lineno != self._last_line)):
self._write_debug_info = node.lineno
self._last_line = node.lineno
|
'Writes a function call to the stream for the current node.
A leading comma is added automatically. The extra keyword
arguments may not include python keywords otherwise a syntax
error could occour. The extra keyword arguments should be given
as python dict.'
| def signature(self, node, frame, extra_kwargs=None):
| kwarg_workaround = False
for kwarg in chain((x.key for x in node.kwargs), (extra_kwargs or ())):
if is_python_keyword(kwarg):
kwarg_workaround = True
break
for arg in node.args:
self.write(', ')
self.visit(arg, frame)
if (not kwarg_workaround):
for kwarg in node.kwargs:
self.write(', ')
self.visit(kwarg, frame)
if (extra_kwargs is not None):
for (key, value) in extra_kwargs.iteritems():
self.write((', %s=%s' % (key, value)))
if node.dyn_args:
self.write(', *')
self.visit(node.dyn_args, frame)
if kwarg_workaround:
if (node.dyn_kwargs is not None):
self.write(', **dict({')
else:
self.write(', **{')
for kwarg in node.kwargs:
self.write(('%r: ' % kwarg.key))
self.visit(kwarg.value, frame)
self.write(', ')
if (extra_kwargs is not None):
for (key, value) in extra_kwargs.iteritems():
self.write(('%r: %s, ' % (key, value)))
if (node.dyn_kwargs is not None):
self.write('}, **')
self.visit(node.dyn_kwargs, frame)
self.write(')')
else:
self.write('}')
elif (node.dyn_kwargs is not None):
self.write(', **')
self.visit(node.dyn_kwargs, frame)
|
'Pull all the references identifiers into the local scope.'
| def pull_locals(self, frame):
| for name in frame.identifiers.undeclared:
self.writeline(('l_%s = context.resolve(%r)' % (name, name)))
|
'Pull all the dependencies.'
| def pull_dependencies(self, nodes):
| visitor = DependencyFinderVisitor()
for node in nodes:
visitor.visit(node)
for dependency in ('filters', 'tests'):
mapping = getattr(self, dependency)
for name in getattr(visitor, dependency):
if (name not in mapping):
mapping[name] = self.temporary_identifier()
self.writeline(('%s = environment.%s[%r]' % (mapping[name], dependency, name)))
|
'Disable Python optimizations for the frame.'
| def unoptimize_scope(self, frame):
| if frame.identifiers.declared:
self.writeline(('if 0: dummy(%s)' % ', '.join((('l_' + name) for name in frame.identifiers.declared))))
|
'This function returns all the shadowed variables in a dict
in the form name: alias and will write the required assignments
into the current scope. No indentation takes place.
This also predefines locally declared variables from the loop
body because under some circumstances it may be the case that
`extra_vars` is passed to `Frame.find_shadowed`.'
| def push_scope(self, frame, extra_vars=()):
| aliases = {}
for name in frame.find_shadowed(extra_vars):
aliases[name] = ident = self.temporary_identifier()
self.writeline(('%s = l_%s' % (ident, name)))
to_declare = set()
for name in frame.identifiers.declared_locally:
if (name not in aliases):
to_declare.add(('l_' + name))
if to_declare:
self.writeline((' = '.join(to_declare) + ' = missing'))
return aliases
|
'Restore all aliases and delete unused variables.'
| def pop_scope(self, aliases, frame):
| for (name, alias) in aliases.iteritems():
self.writeline(('l_%s = %s' % (name, alias)))
to_delete = set()
for name in frame.identifiers.declared_locally:
if (name not in aliases):
to_delete.add(('l_' + name))
if to_delete:
self.writeline((' = '.join(to_delete) + ' = missing'))
|
'In Jinja a few statements require the help of anonymous
functions. Those are currently macros and call blocks and in
the future also recursive loops. As there is currently
technical limitation that doesn\'t allow reading and writing a
variable in a scope where the initial value is coming from an
outer scope, this function tries to fall back with a common
error message. Additionally the frame passed is modified so
that the argumetns are collected and callers are looked up.
This will return the modified frame.'
| def function_scoping(self, node, frame, children=None, find_special=True):
| if (children is None):
children = node.iter_child_nodes()
children = list(children)
func_frame = frame.inner()
func_frame.inspect(children, hard_scope=True)
overriden_closure_vars = ((func_frame.identifiers.undeclared & func_frame.identifiers.declared) & (func_frame.identifiers.declared_locally | func_frame.identifiers.declared_parameter))
if overriden_closure_vars:
self.fail(("It's not possible to set and access variables derived from an outer scope! (affects: %s)" % ', '.join(sorted(overriden_closure_vars))), node.lineno)
func_frame.identifiers.undeclared -= (func_frame.identifiers.undeclared & func_frame.identifiers.declared)
if (not find_special):
return func_frame
func_frame.accesses_kwargs = False
func_frame.accesses_varargs = False
func_frame.accesses_caller = False
func_frame.arguments = args = [('l_' + x.name) for x in node.args]
undeclared = find_undeclared(children, ('caller', 'kwargs', 'varargs'))
if ('caller' in undeclared):
func_frame.accesses_caller = True
func_frame.identifiers.add_special('caller')
args.append('l_caller')
if ('kwargs' in undeclared):
func_frame.accesses_kwargs = True
func_frame.identifiers.add_special('kwargs')
args.append('l_kwargs')
if ('varargs' in undeclared):
func_frame.accesses_varargs = True
func_frame.identifiers.add_special('varargs')
args.append('l_varargs')
return func_frame
|
'Dump the function def of a macro or call block.'
| def macro_body(self, node, frame, children=None):
| frame = self.function_scoping(node, frame, children)
frame.require_output_check = False
args = frame.arguments
if ('loop' in frame.identifiers.declared):
args = (args + ['l_loop=l_loop'])
self.writeline(('def macro(%s):' % ', '.join(args)), node)
self.indent()
self.buffer(frame)
self.pull_locals(frame)
self.blockvisit(node.body, frame)
self.return_buffer_contents(frame)
self.outdent()
return frame
|
'Dump the macro definition for the def created by macro_body.'
| def macro_def(self, node, frame):
| arg_tuple = ', '.join((repr(x.name) for x in node.args))
name = getattr(node, 'name', None)
if (len(node.args) == 1):
arg_tuple += ','
self.write(('Macro(environment, macro, %r, (%s), (' % (name, arg_tuple)))
for arg in node.defaults:
self.visit(arg, frame)
self.write(', ')
self.write(('), %r, %r, %r)' % (bool(frame.accesses_kwargs), bool(frame.accesses_varargs), bool(frame.accesses_caller))))
|
'Return a human readable position for the node.'
| def position(self, node):
| rv = ('line %d' % node.lineno)
if (self.name is not None):
rv += (' in ' + repr(self.name))
return rv
|
'Call a block and register it for the template.'
| def visit_Block(self, node, frame):
| level = 1
if frame.toplevel:
if self.has_known_extends:
return
if (self.extends_so_far > 0):
self.writeline('if parent_template is None:')
self.indent()
level += 1
if node.scoped:
context = 'context.derived(locals())'
else:
context = 'context'
self.writeline(('for event in context.blocks[%r][0](%s):' % (node.name, context)), node)
self.indent()
self.simple_write('event', frame)
self.outdent(level)
|
'Calls the extender.'
| def visit_Extends(self, node, frame):
| if (not frame.toplevel):
self.fail('cannot use extend from a non top-level scope', node.lineno)
if (self.extends_so_far > 0):
if (not self.has_known_extends):
self.writeline('if parent_template is not None:')
self.indent()
self.writeline(('raise TemplateRuntimeError(%r)' % 'extended multiple times'))
self.outdent()
if self.has_known_extends:
raise CompilerExit()
self.writeline('parent_template = environment.get_template(', node)
self.visit(node.template, frame)
self.write((', %r)' % self.name))
self.writeline('for name, parent_block in parent_template.blocks.iteritems():')
self.indent()
self.writeline('context.blocks.setdefault(name, []).append(parent_block)')
self.outdent()
if frame.rootlevel:
self.has_known_extends = True
self.extends_so_far += 1
|
'Handles includes.'
| def visit_Include(self, node, frame):
| if node.with_context:
self.unoptimize_scope(frame)
if node.ignore_missing:
self.writeline('try:')
self.indent()
self.writeline('template = environment.get_template(', node)
self.visit(node.template, frame)
self.write((', %r)' % self.name))
if node.ignore_missing:
self.outdent()
self.writeline('except TemplateNotFound:')
self.indent()
self.writeline('pass')
self.outdent()
self.writeline('else:')
self.indent()
if node.with_context:
self.writeline('for event in template.root_render_func(template.new_context(context.parent, True, locals())):')
else:
self.writeline('for event in template.module._body_stream:')
self.indent()
self.simple_write('event', frame)
self.outdent()
if node.ignore_missing:
self.outdent()
|
'Visit regular imports.'
| def visit_Import(self, node, frame):
| if node.with_context:
self.unoptimize_scope(frame)
self.writeline(('l_%s = ' % node.target), node)
if frame.toplevel:
self.write(('context.vars[%r] = ' % node.target))
self.write('environment.get_template(')
self.visit(node.template, frame)
self.write((', %r).' % self.name))
if node.with_context:
self.write('make_module(context.parent, True, locals())')
else:
self.write('module')
if (frame.toplevel and (not node.target.startswith('_'))):
self.writeline(('context.exported_vars.discard(%r)' % node.target))
frame.assigned_names.add(node.target)
|
'Visit named imports.'
| def visit_FromImport(self, node, frame):
| self.newline(node)
self.write('included_template = environment.get_template(')
self.visit(node.template, frame)
self.write((', %r).' % self.name))
if node.with_context:
self.write('make_module(context.parent, True)')
else:
self.write('module')
var_names = []
discarded_names = []
for name in node.names:
if isinstance(name, tuple):
(name, alias) = name
else:
alias = name
self.writeline(('l_%s = getattr(included_template, %r, missing)' % (alias, name)))
self.writeline(('if l_%s is missing:' % alias))
self.indent()
self.writeline(('l_%s = environment.undefined(%r %% included_template.__name__, name=%r)' % (alias, ('the template %%r (imported on %s) does not export the requested name %s' % (self.position(node), repr(name))), name)))
self.outdent()
if frame.toplevel:
var_names.append(alias)
if (not alias.startswith('_')):
discarded_names.append(alias)
frame.assigned_names.add(alias)
if var_names:
if (len(var_names) == 1):
name = var_names[0]
self.writeline(('context.vars[%r] = l_%s' % (name, name)))
else:
self.writeline(('context.vars.update({%s})' % ', '.join((('%r: l_%s' % (name, name)) for name in var_names))))
if discarded_names:
if (len(discarded_names) == 1):
self.writeline(('context.exported_vars.discard(%r)' % discarded_names[0]))
else:
self.writeline(('context.exported_vars.difference_update((%s))' % ', '.join(map(repr, discarded_names))))
|
'A rule for ignoring issues'
| def __init__(self, docname, lineno, issue, line):
| self.docname = docname
self.lineno = lineno
self.issue = issue
self.line = line
|
'Determine whether this issue should be ignored.'
| def is_ignored(self, line, lineno, issue):
| docname = self.docname
for rule in self.rules:
if (rule.docname != docname):
continue
if (rule.issue != issue):
continue
if (rule.line not in line):
continue
if ((rule.lineno is not None) and (abs((rule.lineno - lineno)) > 5)):
continue
return True
return False
|
'Load database of previously ignored issues.
A csv file, with exactly the same format as suspicious.csv
Fields: document name (normalized), line number, issue, surrounding text'
| def load_rules(self, filename):
| self.info('loading ignore rules... ', nonl=1)
self.rules = rules = []
try:
f = open(filename, 'rb')
except IOError:
return
for (i, row) in enumerate(csv.reader(f)):
if (len(row) != 4):
raise ValueError(('wrong format in %s, line %d: %s' % (filename, (i + 1), row)))
(docname, lineno, issue, text) = row
docname = docname.decode('utf-8')
if lineno:
lineno = int(lineno)
else:
lineno = None
issue = issue.decode('utf-8')
text = text.decode('utf-8')
rule = Rule(docname, lineno, issue, text)
rules.append(rule)
f.close()
self.info(('done, %d rules loaded' % len(self.rules)))
|
'Reconstruct from frozen data.'
| def load(self, stream, format):
| if isinstance(format, basestring):
format = self.formats[format]
frozen = format.load(stream)
if (not isinstance(frozen, dict)):
raise ValueError('old format')
index2fn = frozen['filenames']
self._titles = dict(zip(index2fn, frozen['titles']))
self._mapping = {}
for (k, v) in frozen['terms'].iteritems():
if isinstance(v, int):
self._mapping[k] = set([index2fn[v]])
else:
self._mapping[k] = set((index2fn[i] for i in v))
|
'Dump the frozen index to a stream.'
| def dump(self, stream, format):
| if isinstance(format, basestring):
format = self.formats[format]
format.dump(self.freeze(), stream)
|
'Create a usable data structure for serializing.'
| def freeze(self):
| filenames = self._titles.keys()
titles = self._titles.values()
fn2index = dict(((f, i) for (i, f) in enumerate(filenames)))
return dict(filenames=filenames, titles=titles, terms=self.get_terms(fn2index), descrefs=self.get_descrefs(fn2index), modules=self.get_modules(fn2index), desctypes=dict(((v, k) for (k, v) in self._desctypes.items())))
|
'Remove data for all filenames not in the list.'
| def prune(self, filenames):
| new_titles = {}
for filename in filenames:
if (filename in self._titles):
new_titles[filename] = self._titles[filename]
self._titles = new_titles
for wordnames in self._mapping.itervalues():
wordnames.intersection_update(filenames)
|
'Feed a doctree to the index.'
| def feed(self, filename, title, doctree):
| self._titles[filename] = title
visitor = WordCollector(doctree)
doctree.walk(visitor)
def add_term(word, prefix='', stem=self._stemmer.stem):
word = stem(word)
if ((len(word) < 3) or (word in stopwords) or word.isdigit()):
return
self._mapping.setdefault((prefix + word), set()).add(filename)
for word in word_re.findall(title):
add_term(word)
for word in visitor.found_words:
add_term(word)
|
'Convert field lists with known keys inside the description content into
better-looking equivalents.'
| def handle_doc_fields(self, node):
| for child in node.children:
if (not isinstance(child, nodes.field_list)):
continue
params = []
pfield = None
param_nodes = {}
param_types = {}
new_list = nodes.field_list()
for field in child:
(fname, fbody) = field
try:
(typ, obj) = fname.astext().split(None, 1)
typdesc = _(self.doc_fields_with_arg[typ])
if _is_only_paragraph(fbody):
children = fbody.children[0].children
else:
children = fbody.children
if (typdesc == '%param'):
if (not params):
pfield = nodes.field()
new_list += pfield
dlitem = nodes.list_item()
dlpar = nodes.paragraph()
dlpar += nodes.emphasis(obj, obj)
dlpar += nodes.Text(' -- ', ' -- ')
dlpar += children
param_nodes[obj] = dlpar
dlitem += dlpar
params.append(dlitem)
elif (typdesc == '%type'):
typenodes = fbody.children
if _is_only_paragraph(fbody):
typenodes = (([nodes.Text(' (')] + typenodes[0].children) + [nodes.Text(')')])
param_types[obj] = typenodes
else:
fieldname = (typdesc + ' ')
nfield = nodes.field()
nfieldname = nodes.field_name(fieldname, fieldname)
nfield += nfieldname
node = nfieldname
if (typ in self.doc_fields_with_linked_arg):
node = addnodes.pending_xref(obj, reftype='obj', refcaption=False, reftarget=obj, modname=self.env.currmodule, classname=self.env.currclass)
nfieldname += node
node += nodes.Text(obj, obj)
nfield += nodes.field_body()
nfield[1] += fbody.children
new_list += nfield
except (KeyError, ValueError):
fnametext = fname.astext()
try:
typ = _(self.doc_fields_without_arg[fnametext])
except KeyError:
typ = fnametext.capitalize()
fname[0] = nodes.Text(typ)
new_list += field
if params:
if (len(params) == 1):
pfield += nodes.field_name('', _('Parameter'))
pfield += nodes.field_body()
pfield[1] += params[0][0]
else:
pfield += nodes.field_name('', _('Parameters'))
pfield += nodes.field_body()
pfield[1] += nodes.bullet_list()
pfield[1][0].extend(params)
for (param, type) in param_types.iteritems():
if (param in param_nodes):
param_nodes[param][1:1] = type
child.replace_self(new_list)
|
'Retrieve the signatures to document from the directive arguments.'
| def get_signatures(self):
| return [strip_backslash_re.sub('', sig.strip()) for sig in self.arguments[0].split('\n')]
|
'Parse the signature *sig* into individual nodes and append them to
*signode*. If ValueError is raised, parsing is aborted and the whole
*sig* is put into a single desc_name node.'
| def parse_signature(self, sig, signode):
| raise ValueError
|
'Add cross-reference IDs and entries to self.indexnode, if applicable.'
| def add_target_and_index(self, name, sig, signode):
| return
|
'Called before parsing content. Used to set information about the current
directive context on the build environment.'
| def before_content(self):
| pass
|
'Called after parsing content. Used to reset information about the
current directive context on the build environment.'
| def after_content(self):
| pass
|
'May return a prefix to put before the object name in the signature.'
| def get_signature_prefix(self, sig):
| return ''
|
'May return true if an empty argument list is to be generated even if
the document contains none.'
| def needs_arglist(self):
| return False
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.