aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorIRIS YANG <irisykyang@google.com>2020-07-17 04:30:05 +0000
committerIRIS YANG <irisykyang@google.com>2020-07-17 04:30:05 +0000
commit81aec74062b5c629b3408f7f3d18343ec0bbcab8 (patch)
tree4b825dc642cb6eb9a060e54bf8d69288fbee4904 /src
parente868444bb65b7ae2a025b1c8c7854a8c4f2f58c1 (diff)
downloadjinja-81aec74062b5c629b3408f7f3d18343ec0bbcab8.tar.gz
Revert "Import external/python/jinja into master"
This reverts commit e868444bb65b7ae2a025b1c8c7854a8c4f2f58c1. Reason for revert: Since build will failed. We might need to wait b/160731429 fixed and submit it again. Change-Id: I56449de779d11c13cdfe1243b9a9726f94e55b33
Diffstat (limited to 'src')
-rw-r--r--src/jinja2/__init__.py43
-rw-r--r--src/jinja2/_identifier.py6
-rw-r--r--src/jinja2/asyncfilters.py157
-rw-r--r--src/jinja2/asyncsupport.py249
-rw-r--r--src/jinja2/bccache.py345
-rw-r--r--src/jinja2/compiler.py1754
-rw-r--r--src/jinja2/constants.py20
-rw-r--r--src/jinja2/debug.py261
-rw-r--r--src/jinja2/defaults.py42
-rw-r--r--src/jinja2/environment.py1331
-rw-r--r--src/jinja2/exceptions.py147
-rw-r--r--src/jinja2/ext.py700
-rw-r--r--src/jinja2/filters.py1361
-rw-r--r--src/jinja2/idtracking.py289
-rw-r--r--src/jinja2/lexer.py801
-rw-r--r--src/jinja2/loaders.py566
-rw-r--r--src/jinja2/meta.py98
-rw-r--r--src/jinja2/nativetypes.py93
-rw-r--r--src/jinja2/nodes.py1052
-rw-r--r--src/jinja2/optimizer.py40
-rw-r--r--src/jinja2/parser.py934
-rw-r--r--src/jinja2/runtime.py919
-rw-r--r--src/jinja2/sandbox.py419
-rw-r--r--src/jinja2/tests.py211
-rw-r--r--src/jinja2/utils.py666
-rw-r--r--src/jinja2/visitor.py79
26 files changed, 0 insertions, 12583 deletions
diff --git a/src/jinja2/__init__.py b/src/jinja2/__init__.py
deleted file mode 100644
index 8fa05183..00000000
--- a/src/jinja2/__init__.py
+++ /dev/null
@@ -1,43 +0,0 @@
-"""Jinja is a template engine written in pure Python. It provides a
-non-XML syntax that supports inline expressions and an optional
-sandboxed environment.
-"""
-from markupsafe import escape
-from markupsafe import Markup
-
-from .bccache import BytecodeCache
-from .bccache import FileSystemBytecodeCache
-from .bccache import MemcachedBytecodeCache
-from .environment import Environment
-from .environment import Template
-from .exceptions import TemplateAssertionError
-from .exceptions import TemplateError
-from .exceptions import TemplateNotFound
-from .exceptions import TemplateRuntimeError
-from .exceptions import TemplatesNotFound
-from .exceptions import TemplateSyntaxError
-from .exceptions import UndefinedError
-from .filters import contextfilter
-from .filters import environmentfilter
-from .filters import evalcontextfilter
-from .loaders import BaseLoader
-from .loaders import ChoiceLoader
-from .loaders import DictLoader
-from .loaders import FileSystemLoader
-from .loaders import FunctionLoader
-from .loaders import ModuleLoader
-from .loaders import PackageLoader
-from .loaders import PrefixLoader
-from .runtime import ChainableUndefined
-from .runtime import DebugUndefined
-from .runtime import make_logging_undefined
-from .runtime import StrictUndefined
-from .runtime import Undefined
-from .utils import clear_caches
-from .utils import contextfunction
-from .utils import environmentfunction
-from .utils import evalcontextfunction
-from .utils import is_undefined
-from .utils import select_autoescape
-
-__version__ = "3.0.0a1"
diff --git a/src/jinja2/_identifier.py b/src/jinja2/_identifier.py
deleted file mode 100644
index 224d5449..00000000
--- a/src/jinja2/_identifier.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import re
-
-# generated by scripts/generate_identifier_pattern.py
-pattern = re.compile(
- r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950
-)
diff --git a/src/jinja2/asyncfilters.py b/src/jinja2/asyncfilters.py
deleted file mode 100644
index 0aad12c8..00000000
--- a/src/jinja2/asyncfilters.py
+++ /dev/null
@@ -1,157 +0,0 @@
-from functools import wraps
-
-from . import filters
-from .asyncsupport import auto_aiter
-from .asyncsupport import auto_await
-
-
-async def auto_to_seq(value):
- seq = []
- if hasattr(value, "__aiter__"):
- async for item in value:
- seq.append(item)
- else:
- for item in value:
- seq.append(item)
- return seq
-
-
-async def async_select_or_reject(args, kwargs, modfunc, lookup_attr):
- seq, func = filters.prepare_select_or_reject(args, kwargs, modfunc, lookup_attr)
- if seq:
- async for item in auto_aiter(seq):
- if func(item):
- yield item
-
-
-def dualfilter(normal_filter, async_filter):
- wrap_evalctx = False
- if getattr(normal_filter, "environmentfilter", False) is True:
-
- def is_async(args):
- return args[0].is_async
-
- wrap_evalctx = False
- else:
- has_evalctxfilter = getattr(normal_filter, "evalcontextfilter", False) is True
- has_ctxfilter = getattr(normal_filter, "contextfilter", False) is True
- wrap_evalctx = not has_evalctxfilter and not has_ctxfilter
-
- def is_async(args):
- return args[0].environment.is_async
-
- @wraps(normal_filter)
- def wrapper(*args, **kwargs):
- b = is_async(args)
- if wrap_evalctx:
- args = args[1:]
- if b:
- return async_filter(*args, **kwargs)
- return normal_filter(*args, **kwargs)
-
- if wrap_evalctx:
- wrapper.evalcontextfilter = True
-
- wrapper.asyncfiltervariant = True
-
- return wrapper
-
-
-def asyncfiltervariant(original):
- def decorator(f):
- return dualfilter(original, f)
-
- return decorator
-
-
-@asyncfiltervariant(filters.do_first)
-async def do_first(environment, seq):
- try:
- return await auto_aiter(seq).__anext__()
- except StopAsyncIteration:
- return environment.undefined("No first item, sequence was empty.")
-
-
-@asyncfiltervariant(filters.do_groupby)
-async def do_groupby(environment, value, attribute):
- expr = filters.make_attrgetter(environment, attribute)
- return [
- filters._GroupTuple(key, await auto_to_seq(values))
- for key, values in filters.groupby(
- sorted(await auto_to_seq(value), key=expr), expr
- )
- ]
-
-
-@asyncfiltervariant(filters.do_join)
-async def do_join(eval_ctx, value, d="", attribute=None):
- return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute)
-
-
-@asyncfiltervariant(filters.do_list)
-async def do_list(value):
- return await auto_to_seq(value)
-
-
-@asyncfiltervariant(filters.do_reject)
-async def do_reject(*args, **kwargs):
- return async_select_or_reject(args, kwargs, lambda x: not x, False)
-
-
-@asyncfiltervariant(filters.do_rejectattr)
-async def do_rejectattr(*args, **kwargs):
- return async_select_or_reject(args, kwargs, lambda x: not x, True)
-
-
-@asyncfiltervariant(filters.do_select)
-async def do_select(*args, **kwargs):
- return async_select_or_reject(args, kwargs, lambda x: x, False)
-
-
-@asyncfiltervariant(filters.do_selectattr)
-async def do_selectattr(*args, **kwargs):
- return async_select_or_reject(args, kwargs, lambda x: x, True)
-
-
-@asyncfiltervariant(filters.do_map)
-async def do_map(*args, **kwargs):
- seq, func = filters.prepare_map(args, kwargs)
- if seq:
- async for item in auto_aiter(seq):
- yield await auto_await(func(item))
-
-
-@asyncfiltervariant(filters.do_sum)
-async def do_sum(environment, iterable, attribute=None, start=0):
- rv = start
- if attribute is not None:
- func = filters.make_attrgetter(environment, attribute)
- else:
-
- def func(x):
- return x
-
- async for item in auto_aiter(iterable):
- rv += func(item)
- return rv
-
-
-@asyncfiltervariant(filters.do_slice)
-async def do_slice(value, slices, fill_with=None):
- return filters.do_slice(await auto_to_seq(value), slices, fill_with)
-
-
-ASYNC_FILTERS = {
- "first": do_first,
- "groupby": do_groupby,
- "join": do_join,
- "list": do_list,
- # we intentionally do not support do_last because it may not be safe in async
- "reject": do_reject,
- "rejectattr": do_rejectattr,
- "map": do_map,
- "select": do_select,
- "selectattr": do_selectattr,
- "sum": do_sum,
- "slice": do_slice,
-}
diff --git a/src/jinja2/asyncsupport.py b/src/jinja2/asyncsupport.py
deleted file mode 100644
index e46a85a3..00000000
--- a/src/jinja2/asyncsupport.py
+++ /dev/null
@@ -1,249 +0,0 @@
-"""The code for async support. Importing this patches Jinja."""
-import asyncio
-import inspect
-from functools import update_wrapper
-
-from markupsafe import Markup
-
-from .environment import TemplateModule
-from .runtime import LoopContext
-from .utils import concat
-from .utils import internalcode
-from .utils import missing
-
-
-async def concat_async(async_gen):
- rv = []
-
- async def collect():
- async for event in async_gen:
- rv.append(event)
-
- await collect()
- return concat(rv)
-
-
-async def generate_async(self, *args, **kwargs):
- vars = dict(*args, **kwargs)
- try:
- async for event in self.root_render_func(self.new_context(vars)):
- yield event
- except Exception:
- yield self.environment.handle_exception()
-
-
-def wrap_generate_func(original_generate):
- def _convert_generator(self, loop, args, kwargs):
- async_gen = self.generate_async(*args, **kwargs)
- try:
- while 1:
- yield loop.run_until_complete(async_gen.__anext__())
- except StopAsyncIteration:
- pass
-
- def generate(self, *args, **kwargs):
- if not self.environment.is_async:
- return original_generate(self, *args, **kwargs)
- return _convert_generator(self, asyncio.get_event_loop(), args, kwargs)
-
- return update_wrapper(generate, original_generate)
-
-
-async def render_async(self, *args, **kwargs):
- if not self.environment.is_async:
- raise RuntimeError("The environment was not created with async mode enabled.")
-
- vars = dict(*args, **kwargs)
- ctx = self.new_context(vars)
-
- try:
- return await concat_async(self.root_render_func(ctx))
- except Exception:
- return self.environment.handle_exception()
-
-
-def wrap_render_func(original_render):
- def render(self, *args, **kwargs):
- if not self.environment.is_async:
- return original_render(self, *args, **kwargs)
- loop = asyncio.get_event_loop()
- return loop.run_until_complete(self.render_async(*args, **kwargs))
-
- return update_wrapper(render, original_render)
-
-
-def wrap_block_reference_call(original_call):
- @internalcode
- async def async_call(self):
- rv = await concat_async(self._stack[self._depth](self._context))
- if self._context.eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
- @internalcode
- def __call__(self):
- if not self._context.environment.is_async:
- return original_call(self)
- return async_call(self)
-
- return update_wrapper(__call__, original_call)
-
-
-def wrap_macro_invoke(original_invoke):
- @internalcode
- async def async_invoke(self, arguments, autoescape):
- rv = await self._func(*arguments)
- if autoescape:
- rv = Markup(rv)
- return rv
-
- @internalcode
- def _invoke(self, arguments, autoescape):
- if not self._environment.is_async:
- return original_invoke(self, arguments, autoescape)
- return async_invoke(self, arguments, autoescape)
-
- return update_wrapper(_invoke, original_invoke)
-
-
-@internalcode
-async def get_default_module_async(self):
- if self._module is not None:
- return self._module
- self._module = rv = await self.make_module_async()
- return rv
-
-
-def wrap_default_module(original_default_module):
- @internalcode
- def _get_default_module(self, ctx=None):
- if self.environment.is_async:
- raise RuntimeError("Template module attribute is unavailable in async mode")
- return original_default_module(self, ctx)
-
- return _get_default_module
-
-
-async def make_module_async(self, vars=None, shared=False, locals=None):
- context = self.new_context(vars, shared, locals)
- body_stream = []
- async for item in self.root_render_func(context):
- body_stream.append(item)
- return TemplateModule(self, context, body_stream)
-
-
-def patch_template():
- from . import Template
-
- Template.generate = wrap_generate_func(Template.generate)
- Template.generate_async = update_wrapper(generate_async, Template.generate_async)
- Template.render_async = update_wrapper(render_async, Template.render_async)
- Template.render = wrap_render_func(Template.render)
- Template._get_default_module = wrap_default_module(Template._get_default_module)
- Template._get_default_module_async = get_default_module_async
- Template.make_module_async = update_wrapper(
- make_module_async, Template.make_module_async
- )
-
-
-def patch_runtime():
- from .runtime import BlockReference, Macro
-
- BlockReference.__call__ = wrap_block_reference_call(BlockReference.__call__)
- Macro._invoke = wrap_macro_invoke(Macro._invoke)
-
-
-def patch_filters():
- from .filters import FILTERS
- from .asyncfilters import ASYNC_FILTERS
-
- FILTERS.update(ASYNC_FILTERS)
-
-
-def patch_all():
- patch_template()
- patch_runtime()
- patch_filters()
-
-
-async def auto_await(value):
- if inspect.isawaitable(value):
- return await value
- return value
-
-
-async def auto_aiter(iterable):
- if hasattr(iterable, "__aiter__"):
- async for item in iterable:
- yield item
- return
- for item in iterable:
- yield item
-
-
-class AsyncLoopContext(LoopContext):
- _to_iterator = staticmethod(auto_aiter)
-
- @property
- async def length(self):
- if self._length is not None:
- return self._length
-
- try:
- self._length = len(self._iterable)
- except TypeError:
- iterable = [x async for x in self._iterator]
- self._iterator = self._to_iterator(iterable)
- self._length = len(iterable) + self.index + (self._after is not missing)
-
- return self._length
-
- @property
- async def revindex0(self):
- return await self.length - self.index
-
- @property
- async def revindex(self):
- return await self.length - self.index0
-
- async def _peek_next(self):
- if self._after is not missing:
- return self._after
-
- try:
- self._after = await self._iterator.__anext__()
- except StopAsyncIteration:
- self._after = missing
-
- return self._after
-
- @property
- async def last(self):
- return await self._peek_next() is missing
-
- @property
- async def nextitem(self):
- rv = await self._peek_next()
-
- if rv is missing:
- return self._undefined("there is no next item")
-
- return rv
-
- def __aiter__(self):
- return self
-
- async def __anext__(self):
- if self._after is not missing:
- rv = self._after
- self._after = missing
- else:
- rv = await self._iterator.__anext__()
-
- self.index0 += 1
- self._before = self._current
- self._current = rv
- return rv, self
-
-
-patch_all()
diff --git a/src/jinja2/bccache.py b/src/jinja2/bccache.py
deleted file mode 100644
index 7ddcf405..00000000
--- a/src/jinja2/bccache.py
+++ /dev/null
@@ -1,345 +0,0 @@
-"""The optional bytecode cache system. This is useful if you have very
-complex template situations and the compilation of all those templates
-slows down your application too much.
-
-Situations where this is useful are often forking web applications that
-are initialized on the first request.
-"""
-import errno
-import fnmatch
-import marshal
-import os
-import pickle
-import stat
-import sys
-import tempfile
-from hashlib import sha1
-from io import BytesIO
-
-from .utils import open_if_exists
-
-bc_version = 5
-# Magic bytes to identify Jinja bytecode cache files. Contains the
-# Python major and minor version to avoid loading incompatible bytecode
-# if a project upgrades its Python version.
-bc_magic = (
- b"j2"
- + pickle.dumps(bc_version, 2)
- + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2)
-)
-
-
-class Bucket:
- """Buckets are used to store the bytecode for one template. It's created
- and initialized by the bytecode cache and passed to the loading functions.
-
- The buckets get an internal checksum from the cache assigned and use this
- to automatically reject outdated cache material. Individual bytecode
- cache subclasses don't have to care about cache invalidation.
- """
-
- def __init__(self, environment, key, checksum):
- self.environment = environment
- self.key = key
- self.checksum = checksum
- self.reset()
-
- def reset(self):
- """Resets the bucket (unloads the bytecode)."""
- self.code = None
-
- def load_bytecode(self, f):
- """Loads bytecode from a file or file like object."""
- # make sure the magic header is correct
- magic = f.read(len(bc_magic))
- if magic != bc_magic:
- self.reset()
- return
- # the source code of the file changed, we need to reload
- checksum = pickle.load(f)
- if self.checksum != checksum:
- self.reset()
- return
- # if marshal_load fails then we need to reload
- try:
- self.code = marshal.load(f)
- except (EOFError, ValueError, TypeError):
- self.reset()
- return
-
- def write_bytecode(self, f):
- """Dump the bytecode into the file or file like object passed."""
- if self.code is None:
- raise TypeError("can't write empty bucket")
- f.write(bc_magic)
- pickle.dump(self.checksum, f, 2)
- marshal.dump(self.code, f)
-
- def bytecode_from_string(self, string):
- """Load bytecode from a string."""
- self.load_bytecode(BytesIO(string))
-
- def bytecode_to_string(self):
- """Return the bytecode as string."""
- out = BytesIO()
- self.write_bytecode(out)
- return out.getvalue()
-
-
-class BytecodeCache:
- """To implement your own bytecode cache you have to subclass this class
- and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
- these methods are passed a :class:`~jinja2.bccache.Bucket`.
-
- A very basic bytecode cache that saves the bytecode on the file system::
-
- from os import path
-
- class MyCache(BytecodeCache):
-
- def __init__(self, directory):
- self.directory = directory
-
- def load_bytecode(self, bucket):
- filename = path.join(self.directory, bucket.key)
- if path.exists(filename):
- with open(filename, 'rb') as f:
- bucket.load_bytecode(f)
-
- def dump_bytecode(self, bucket):
- filename = path.join(self.directory, bucket.key)
- with open(filename, 'wb') as f:
- bucket.write_bytecode(f)
-
- A more advanced version of a filesystem based bytecode cache is part of
- Jinja.
- """
-
- def load_bytecode(self, bucket):
- """Subclasses have to override this method to load bytecode into a
- bucket. If they are not able to find code in the cache for the
- bucket, it must not do anything.
- """
- raise NotImplementedError()
-
- def dump_bytecode(self, bucket):
- """Subclasses have to override this method to write the bytecode
- from a bucket back to the cache. If it unable to do so it must not
- fail silently but raise an exception.
- """
- raise NotImplementedError()
-
- def clear(self):
- """Clears the cache. This method is not used by Jinja but should be
- implemented to allow applications to clear the bytecode cache used
- by a particular environment.
- """
-
- def get_cache_key(self, name, filename=None):
- """Returns the unique hash key for this template name."""
- hash = sha1(name.encode("utf-8"))
- if filename is not None:
- filename = "|" + filename
- if isinstance(filename, str):
- filename = filename.encode("utf-8")
- hash.update(filename)
- return hash.hexdigest()
-
- def get_source_checksum(self, source):
- """Returns a checksum for the source."""
- return sha1(source.encode("utf-8")).hexdigest()
-
- def get_bucket(self, environment, name, filename, source):
- """Return a cache bucket for the given template. All arguments are
- mandatory but filename may be `None`.
- """
- key = self.get_cache_key(name, filename)
- checksum = self.get_source_checksum(source)
- bucket = Bucket(environment, key, checksum)
- self.load_bytecode(bucket)
- return bucket
-
- def set_bucket(self, bucket):
- """Put the bucket into the cache."""
- self.dump_bytecode(bucket)
-
-
-class FileSystemBytecodeCache(BytecodeCache):
- """A bytecode cache that stores bytecode on the filesystem. It accepts
- two arguments: The directory where the cache items are stored and a
- pattern string that is used to build the filename.
-
- If no directory is specified a default cache directory is selected. On
- Windows the user's temp directory is used, on UNIX systems a directory
- is created for the user in the system temp directory.
-
- The pattern can be used to have multiple separate caches operate on the
- same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
- is replaced with the cache key.
-
- >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
-
- This bytecode cache supports clearing of the cache using the clear method.
- """
-
- def __init__(self, directory=None, pattern="__jinja2_%s.cache"):
- if directory is None:
- directory = self._get_default_cache_dir()
- self.directory = directory
- self.pattern = pattern
-
- def _get_default_cache_dir(self):
- def _unsafe_dir():
- raise RuntimeError(
- "Cannot determine safe temp directory. You "
- "need to explicitly provide one."
- )
-
- tmpdir = tempfile.gettempdir()
-
- # On windows the temporary directory is used specific unless
- # explicitly forced otherwise. We can just use that.
- if os.name == "nt":
- return tmpdir
- if not hasattr(os, "getuid"):
- _unsafe_dir()
-
- dirname = f"_jinja2-cache-{os.getuid()}"
- actual_dir = os.path.join(tmpdir, dirname)
-
- try:
- os.mkdir(actual_dir, stat.S_IRWXU)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
- try:
- os.chmod(actual_dir, stat.S_IRWXU)
- actual_dir_stat = os.lstat(actual_dir)
- if (
- actual_dir_stat.st_uid != os.getuid()
- or not stat.S_ISDIR(actual_dir_stat.st_mode)
- or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
- ):
- _unsafe_dir()
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
- actual_dir_stat = os.lstat(actual_dir)
- if (
- actual_dir_stat.st_uid != os.getuid()
- or not stat.S_ISDIR(actual_dir_stat.st_mode)
- or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU
- ):
- _unsafe_dir()
-
- return actual_dir
-
- def _get_cache_filename(self, bucket):
- return os.path.join(self.directory, self.pattern % (bucket.key,))
-
- def load_bytecode(self, bucket):
- f = open_if_exists(self._get_cache_filename(bucket), "rb")
- if f is not None:
- try:
- bucket.load_bytecode(f)
- finally:
- f.close()
-
- def dump_bytecode(self, bucket):
- f = open(self._get_cache_filename(bucket), "wb")
- try:
- bucket.write_bytecode(f)
- finally:
- f.close()
-
- def clear(self):
- # imported lazily here because google app-engine doesn't support
- # write access on the file system and the function does not exist
- # normally.
- from os import remove
-
- files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",))
- for filename in files:
- try:
- remove(os.path.join(self.directory, filename))
- except OSError:
- pass
-
-
-class MemcachedBytecodeCache(BytecodeCache):
- """This class implements a bytecode cache that uses a memcache cache for
- storing the information. It does not enforce a specific memcache library
- (tummy's memcache or cmemcache) but will accept any class that provides
- the minimal interface required.
-
- Libraries compatible with this class:
-
- - `cachelib <https://github.com/pallets/cachelib>`_
- - `python-memcached <https://pypi.org/project/python-memcached/>`_
-
- (Unfortunately the django cache interface is not compatible because it
- does not support storing binary data, only text. You can however pass
- the underlying cache client to the bytecode cache which is available
- as `django.core.cache.cache._client`.)
-
- The minimal interface for the client passed to the constructor is this:
-
- .. class:: MinimalClientInterface
-
- .. method:: set(key, value[, timeout])
-
- Stores the bytecode in the cache. `value` is a string and
- `timeout` the timeout of the key. If timeout is not provided
- a default timeout or no timeout should be assumed, if it's
- provided it's an integer with the number of seconds the cache
- item should exist.
-
- .. method:: get(key)
-
- Returns the value for the cache key. If the item does not
- exist in the cache the return value must be `None`.
-
- The other arguments to the constructor are the prefix for all keys that
- is added before the actual cache key and the timeout for the bytecode in
- the cache system. We recommend a high (or no) timeout.
-
- This bytecode cache does not support clearing of used items in the cache.
- The clear method is a no-operation function.
-
- .. versionadded:: 2.7
- Added support for ignoring memcache errors through the
- `ignore_memcache_errors` parameter.
- """
-
- def __init__(
- self,
- client,
- prefix="jinja2/bytecode/",
- timeout=None,
- ignore_memcache_errors=True,
- ):
- self.client = client
- self.prefix = prefix
- self.timeout = timeout
- self.ignore_memcache_errors = ignore_memcache_errors
-
- def load_bytecode(self, bucket):
- try:
- code = self.client.get(self.prefix + bucket.key)
- except Exception:
- if not self.ignore_memcache_errors:
- raise
- code = None
- if code is not None:
- bucket.bytecode_from_string(code)
-
- def dump_bytecode(self, bucket):
- args = (self.prefix + bucket.key, bucket.bytecode_to_string())
- if self.timeout is not None:
- args += (self.timeout,)
- try:
- self.client.set(*args)
- except Exception:
- if not self.ignore_memcache_errors:
- raise
diff --git a/src/jinja2/compiler.py b/src/jinja2/compiler.py
deleted file mode 100644
index abdbe6da..00000000
--- a/src/jinja2/compiler.py
+++ /dev/null
@@ -1,1754 +0,0 @@
-"""Compiles nodes from the parser into Python code."""
-from collections import namedtuple
-from functools import update_wrapper
-from io import StringIO
-from itertools import chain
-from keyword import iskeyword as is_python_keyword
-
-from markupsafe import escape
-from markupsafe import Markup
-
-from . import nodes
-from .exceptions import TemplateAssertionError
-from .idtracking import Symbols
-from .idtracking import VAR_LOAD_ALIAS
-from .idtracking import VAR_LOAD_PARAMETER
-from .idtracking import VAR_LOAD_RESOLVE
-from .idtracking import VAR_LOAD_UNDEFINED
-from .nodes import EvalContext
-from .optimizer import Optimizer
-from .utils import concat
-from .visitor import NodeVisitor
-
-operators = {
- "eq": "==",
- "ne": "!=",
- "gt": ">",
- "gteq": ">=",
- "lt": "<",
- "lteq": "<=",
- "in": "in",
- "notin": "not in",
-}
-
-
-def optimizeconst(f):
- def new_func(self, node, frame, **kwargs):
- # Only optimize if the frame is not volatile
- if self.optimized and not frame.eval_ctx.volatile:
- new_node = self.optimizer.visit(node, frame.eval_ctx)
- if new_node != node:
- return self.visit(new_node, frame)
- return f(self, node, frame, **kwargs)
-
- return update_wrapper(new_func, f)
-
-
-def generate(
- node, environment, name, filename, stream=None, defer_init=False, optimized=True
-):
- """Generate the python source for a node tree."""
- if not isinstance(node, nodes.Template):
- raise TypeError("Can't compile non template nodes")
- generator = environment.code_generator_class(
- environment, name, filename, stream, defer_init, optimized
- )
- generator.visit(node)
- if stream is None:
- return generator.stream.getvalue()
-
-
-def has_safe_repr(value):
- """Does the node have a safe representation?"""
- if value is None or value is NotImplemented or value is Ellipsis:
- return True
-
- if type(value) in {bool, int, float, complex, range, str, Markup}:
- return True
-
- if type(value) in {tuple, list, set, frozenset}:
- return all(has_safe_repr(v) for v in value)
-
- if type(value) is dict:
- return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items())
-
- return False
-
-
-def find_undeclared(nodes, names):
- """Check if the names passed are accessed undeclared. The return value
- is a set of all the undeclared names from the sequence of names found.
- """
- visitor = UndeclaredNameVisitor(names)
- try:
- for node in nodes:
- visitor.visit(node)
- except VisitorExit:
- pass
- return visitor.undeclared
-
-
-class MacroRef:
- def __init__(self, node):
- self.node = node
- self.accesses_caller = False
- self.accesses_kwargs = False
- self.accesses_varargs = False
-
-
-class Frame:
- """Holds compile time information for us."""
-
- def __init__(self, eval_ctx, parent=None, level=None):
- self.eval_ctx = eval_ctx
- self.symbols = Symbols(parent.symbols if parent else None, level=level)
-
- # a toplevel frame is the root + soft frames such as if conditions.
- self.toplevel = False
-
- # the root frame is basically just the outermost frame, so no if
- # conditions. This information is used to optimize inheritance
- # situations.
- self.rootlevel = False
-
- # in some dynamic inheritance situations the compiler needs to add
- # write tests around output statements.
- self.require_output_check = parent and parent.require_output_check
-
- # inside some tags we are using a buffer rather than yield statements.
- # this for example affects {% filter %} or {% macro %}. If a frame
- # is buffered this variable points to the name of the list used as
- # buffer.
- self.buffer = None
-
- # the name of the block we're in, otherwise None.
- self.block = parent.block if parent else None
-
- # the parent of this frame
- self.parent = parent
-
- if parent is not None:
- self.buffer = parent.buffer
-
- def copy(self):
- """Create a copy of the current one."""
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.symbols = self.symbols.copy()
- return rv
-
- def inner(self, isolated=False):
- """Return an inner frame."""
- if isolated:
- return Frame(self.eval_ctx, level=self.symbols.level + 1)
- return Frame(self.eval_ctx, self)
-
- def soft(self):
- """Return a soft frame. A soft frame may not be modified as
- standalone thing as it shares the resources with the frame it
- was created of, but it's not a rootlevel frame any longer.
-
- This is only used to implement if-statements.
- """
- rv = self.copy()
- rv.rootlevel = False
- return rv
-
- __copy__ = copy
-
-
-class VisitorExit(RuntimeError):
- """Exception used by the `UndeclaredNameVisitor` to signal a stop."""
-
-
-class DependencyFinderVisitor(NodeVisitor):
- """A visitor that collects filter and test calls."""
-
- def __init__(self):
- self.filters = set()
- self.tests = set()
-
- def visit_Filter(self, node):
- self.generic_visit(node)
- self.filters.add(node.name)
-
- def visit_Test(self, node):
- self.generic_visit(node)
- self.tests.add(node.name)
-
- def visit_Block(self, node):
- """Stop visiting at blocks."""
-
-
-class UndeclaredNameVisitor(NodeVisitor):
- """A visitor that checks if a name is accessed without being
- declared. This is different from the frame visitor as it will
- not stop at closure frames.
- """
-
- def __init__(self, names):
- self.names = set(names)
- self.undeclared = set()
-
- def visit_Name(self, node):
- if node.ctx == "load" and node.name in self.names:
- self.undeclared.add(node.name)
- if self.undeclared == self.names:
- raise VisitorExit()
- else:
- self.names.discard(node.name)
-
- def visit_Block(self, node):
- """Stop visiting a blocks."""
-
-
-class CompilerExit(Exception):
- """Raised if the compiler encountered a situation where it just
- doesn't make sense to further process the code. Any block that
- raises such an exception is not further processed.
- """
-
-
-class CodeGenerator(NodeVisitor):
- def __init__(
- self, environment, name, filename, stream=None, defer_init=False, optimized=True
- ):
- if stream is None:
- stream = StringIO()
- self.environment = environment
- self.name = name
- self.filename = filename
- self.stream = stream
- self.created_block_context = False
- self.defer_init = defer_init
- self.optimized = optimized
- if optimized:
- self.optimizer = Optimizer(environment)
-
- # aliases for imports
- self.import_aliases = {}
-
- # a registry for all blocks. Because blocks are moved out
- # into the global python scope they are registered here
- self.blocks = {}
-
- # the number of extends statements so far
- self.extends_so_far = 0
-
- # some templates have a rootlevel extends. In this case we
- # can safely assume that we're a child template and do some
- # more optimizations.
- self.has_known_extends = False
-
- # the current line number
- self.code_lineno = 1
-
- # registry of all filters and tests (global, not block local)
- self.tests = {}
- self.filters = {}
-
- # the debug information
- self.debug_info = []
- self._write_debug_info = None
-
- # the number of new lines before the next write()
- self._new_lines = 0
-
- # the line number of the last written statement
- self._last_line = 0
-
- # true if nothing was written so far.
- self._first_write = True
-
- # used by the `temporary_identifier` method to get new
- # unique, temporary identifier
- self._last_identifier = 0
-
- # the current indentation
- self._indentation = 0
-
- # Tracks toplevel assignments
- self._assign_stack = []
-
- # Tracks parameter definition blocks
- self._param_def_block = []
-
- # Tracks the current context.
- self._context_reference_stack = ["context"]
-
- # -- Various compilation helpers
-
- def fail(self, msg, lineno):
- """Fail with a :exc:`TemplateAssertionError`."""
- raise TemplateAssertionError(msg, lineno, self.name, self.filename)
-
- def temporary_identifier(self):
- """Get a new unique identifier."""
- self._last_identifier += 1
- return f"t_{self._last_identifier}"
-
- def buffer(self, frame):
- """Enable buffering for the frame from that point onwards."""
- frame.buffer = self.temporary_identifier()
- self.writeline(f"{frame.buffer} = []")
-
- def return_buffer_contents(self, frame, force_unescaped=False):
- """Return the buffer contents of the frame."""
- if not force_unescaped:
- if frame.eval_ctx.volatile:
- self.writeline("if context.eval_ctx.autoescape:")
- self.indent()
- self.writeline(f"return Markup(concat({frame.buffer}))")
- self.outdent()
- self.writeline("else:")
- self.indent()
- self.writeline(f"return concat({frame.buffer})")
- self.outdent()
- return
- elif frame.eval_ctx.autoescape:
- self.writeline(f"return Markup(concat({frame.buffer}))")
- return
- self.writeline(f"return concat({frame.buffer})")
-
- def indent(self):
- """Indent by one."""
- self._indentation += 1
-
- def outdent(self, step=1):
- """Outdent by step."""
- self._indentation -= step
-
- def start_write(self, frame, node=None):
- """Yield or write into the frame buffer."""
- if frame.buffer is None:
- self.writeline("yield ", node)
- else:
- self.writeline(f"{frame.buffer}.append(", node)
-
- def end_write(self, frame):
- """End the writing process started by `start_write`."""
- if frame.buffer is not None:
- self.write(")")
-
- def simple_write(self, s, frame, node=None):
- """Simple shortcut for start_write + write + end_write."""
- self.start_write(frame, node)
- self.write(s)
- self.end_write(frame)
-
- def blockvisit(self, nodes, frame):
- """Visit a list of nodes as block in a frame. If the current frame
- is no buffer a dummy ``if 0: yield None`` is written automatically.
- """
- try:
- self.writeline("pass")
- for node in nodes:
- self.visit(node, frame)
- except CompilerExit:
- pass
-
- def write(self, x):
- """Write a string into the output stream."""
- if self._new_lines:
- if not self._first_write:
- self.stream.write("\n" * self._new_lines)
- self.code_lineno += self._new_lines
- if self._write_debug_info is not None:
- self.debug_info.append((self._write_debug_info, self.code_lineno))
- self._write_debug_info = None
- self._first_write = False
- self.stream.write(" " * self._indentation)
- self._new_lines = 0
- self.stream.write(x)
-
- def writeline(self, x, node=None, extra=0):
- """Combination of newline and write."""
- self.newline(node, extra)
- self.write(x)
-
- def newline(self, node=None, extra=0):
- """Add one or more newlines before the next write."""
- self._new_lines = max(self._new_lines, 1 + extra)
- if node is not None and node.lineno != self._last_line:
- self._write_debug_info = node.lineno
- self._last_line = node.lineno
-
- def signature(self, node, frame, extra_kwargs=None):
- """Writes a function call to the stream for the current node.
- A leading comma is added automatically. The extra keyword
- arguments may not include python keywords otherwise a syntax
- error could occur. The extra keyword arguments should be given
- as python dict.
- """
- # if any of the given keyword arguments is a python keyword
- # we have to make sure that no invalid call is created.
- kwarg_workaround = False
- for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
- if is_python_keyword(kwarg):
- kwarg_workaround = True
- break
-
- for arg in node.args:
- self.write(", ")
- self.visit(arg, frame)
-
- if not kwarg_workaround:
- for kwarg in node.kwargs:
- self.write(", ")
- self.visit(kwarg, frame)
- if extra_kwargs is not None:
- for key, value in extra_kwargs.items():
- self.write(f", {key}={value}")
- if node.dyn_args:
- self.write(", *")
- self.visit(node.dyn_args, frame)
-
- if kwarg_workaround:
- if node.dyn_kwargs is not None:
- self.write(", **dict({")
- else:
- self.write(", **{")
- for kwarg in node.kwargs:
- self.write(f"{kwarg.key!r}: ")
- self.visit(kwarg.value, frame)
- self.write(", ")
- if extra_kwargs is not None:
- for key, value in extra_kwargs.items():
- self.write(f"{key!r}: {value}, ")
- if node.dyn_kwargs is not None:
- self.write("}, **")
- self.visit(node.dyn_kwargs, frame)
- self.write(")")
- else:
- self.write("}")
-
- elif node.dyn_kwargs is not None:
- self.write(", **")
- self.visit(node.dyn_kwargs, frame)
-
- def pull_dependencies(self, nodes):
- """Pull all the dependencies."""
- visitor = DependencyFinderVisitor()
- for node in nodes:
- visitor.visit(node)
- for dependency in "filters", "tests":
- mapping = getattr(self, dependency)
- for name in getattr(visitor, dependency):
- if name not in mapping:
- mapping[name] = self.temporary_identifier()
- self.writeline(f"{mapping[name]} = environment.{dependency}[{name!r}]")
-
- def enter_frame(self, frame):
- undefs = []
- for target, (action, param) in frame.symbols.loads.items():
- if action == VAR_LOAD_PARAMETER:
- pass
- elif action == VAR_LOAD_RESOLVE:
- self.writeline(f"{target} = {self.get_resolve_func()}({param!r})")
- elif action == VAR_LOAD_ALIAS:
- self.writeline(f"{target} = {param}")
- elif action == VAR_LOAD_UNDEFINED:
- undefs.append(target)
- else:
- raise NotImplementedError("unknown load instruction")
- if undefs:
- self.writeline(f"{' = '.join(undefs)} = missing")
-
- def leave_frame(self, frame, with_python_scope=False):
- if not with_python_scope:
- undefs = []
- for target in frame.symbols.loads:
- undefs.append(target)
- if undefs:
- self.writeline(f"{' = '.join(undefs)} = missing")
-
- def func(self, name):
- if self.environment.is_async:
- return f"async def {name}"
- return f"def {name}"
-
- def macro_body(self, node, frame):
- """Dump the function def of a macro or call block."""
- frame = frame.inner()
- frame.symbols.analyze_node(node)
- macro_ref = MacroRef(node)
-
- explicit_caller = None
- skip_special_params = set()
- args = []
- for idx, arg in enumerate(node.args):
- if arg.name == "caller":
- explicit_caller = idx
- if arg.name in ("kwargs", "varargs"):
- skip_special_params.add(arg.name)
- args.append(frame.symbols.ref(arg.name))
-
- undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs"))
-
- if "caller" in undeclared:
- # In older Jinja versions there was a bug that allowed caller
- # to retain the special behavior even if it was mentioned in
- # the argument list. However thankfully this was only really
- # working if it was the last argument. So we are explicitly
- # checking this now and error out if it is anywhere else in
- # the argument list.
- if explicit_caller is not None:
- try:
- node.defaults[explicit_caller - len(node.args)]
- except IndexError:
- self.fail(
- "When defining macros or call blocks the "
- 'special "caller" argument must be omitted '
- "or be given a default.",
- node.lineno,
- )
- else:
- args.append(frame.symbols.declare_parameter("caller"))
- macro_ref.accesses_caller = True
- if "kwargs" in undeclared and "kwargs" not in skip_special_params:
- args.append(frame.symbols.declare_parameter("kwargs"))
- macro_ref.accesses_kwargs = True
- if "varargs" in undeclared and "varargs" not in skip_special_params:
- args.append(frame.symbols.declare_parameter("varargs"))
- macro_ref.accesses_varargs = True
-
- # macros are delayed, they never require output checks
- frame.require_output_check = False
- frame.symbols.analyze_node(node)
- self.writeline(f"{self.func('macro')}({', '.join(args)}):", node)
- self.indent()
-
- self.buffer(frame)
- self.enter_frame(frame)
-
- self.push_parameter_definitions(frame)
- for idx, arg in enumerate(node.args):
- ref = frame.symbols.ref(arg.name)
- self.writeline(f"if {ref} is missing:")
- self.indent()
- try:
- default = node.defaults[idx - len(node.args)]
- except IndexError:
- self.writeline(
- f'{ref} = undefined("parameter {arg.name!r} was not provided",'
- f" name={arg.name!r})"
- )
- else:
- self.writeline(f"{ref} = ")
- self.visit(default, frame)
- self.mark_parameter_stored(ref)
- self.outdent()
- self.pop_parameter_definitions()
-
- self.blockvisit(node.body, frame)
- self.return_buffer_contents(frame, force_unescaped=True)
- self.leave_frame(frame, with_python_scope=True)
- self.outdent()
-
- return frame, macro_ref
-
- def macro_def(self, macro_ref, frame):
- """Dump the macro definition for the def created by macro_body."""
- arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args)
- name = getattr(macro_ref.node, "name", None)
- if len(macro_ref.node.args) == 1:
- arg_tuple += ","
- self.write(
- f"Macro(environment, macro, {name!r}, ({arg_tuple}),"
- f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r},"
- f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)"
- )
-
- def position(self, node):
- """Return a human readable position for the node."""
- rv = f"line {node.lineno}"
- if self.name is not None:
- rv = f"{rv} in {self.name!r}"
- return rv
-
- def dump_local_context(self, frame):
- items_kv = ", ".join(
- f"{name!r}: {target}"
- for name, target in frame.symbols.dump_stores().items()
- )
- return f"{{{items_kv}}}"
-
- def write_commons(self):
- """Writes a common preamble that is used by root and block functions.
- Primarily this sets up common local helpers and enforces a generator
- through a dead branch.
- """
- self.writeline("resolve = context.resolve_or_missing")
- self.writeline("undefined = environment.undefined")
- # always use the standard Undefined class for the implicit else of
- # conditional expressions
- self.writeline("cond_expr_undefined = Undefined")
- self.writeline("if 0: yield None")
-
- def push_parameter_definitions(self, frame):
- """Pushes all parameter targets from the given frame into a local
- stack that permits tracking of yet to be assigned parameters. In
- particular this enables the optimization from `visit_Name` to skip
- undefined expressions for parameters in macros as macros can reference
- otherwise unbound parameters.
- """
- self._param_def_block.append(frame.symbols.dump_param_targets())
-
- def pop_parameter_definitions(self):
- """Pops the current parameter definitions set."""
- self._param_def_block.pop()
-
- def mark_parameter_stored(self, target):
- """Marks a parameter in the current parameter definitions as stored.
- This will skip the enforced undefined checks.
- """
- if self._param_def_block:
- self._param_def_block[-1].discard(target)
-
- def push_context_reference(self, target):
- self._context_reference_stack.append(target)
-
- def pop_context_reference(self):
- self._context_reference_stack.pop()
-
- def get_context_ref(self):
- return self._context_reference_stack[-1]
-
- def get_resolve_func(self):
- target = self._context_reference_stack[-1]
- if target == "context":
- return "resolve"
- return f"{target}.resolve"
-
- def derive_context(self, frame):
- return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})"
-
- def parameter_is_undeclared(self, target):
- """Checks if a given target is an undeclared parameter."""
- if not self._param_def_block:
- return False
- return target in self._param_def_block[-1]
-
- def push_assign_tracking(self):
- """Pushes a new layer for assignment tracking."""
- self._assign_stack.append(set())
-
- def pop_assign_tracking(self, frame):
- """Pops the topmost level for assignment tracking and updates the
- context variables if necessary.
- """
- vars = self._assign_stack.pop()
- if not frame.toplevel or not vars:
- return
- public_names = [x for x in vars if x[:1] != "_"]
- if len(vars) == 1:
- name = next(iter(vars))
- ref = frame.symbols.ref(name)
- self.writeline(f"context.vars[{name!r}] = {ref}")
- else:
- self.writeline("context.vars.update({")
- for idx, name in enumerate(vars):
- if idx:
- self.write(", ")
- ref = frame.symbols.ref(name)
- self.write(f"{name!r}: {ref}")
- self.write("})")
- if public_names:
- if len(public_names) == 1:
- self.writeline(f"context.exported_vars.add({public_names[0]!r})")
- else:
- names_str = ", ".join(map(repr, public_names))
- self.writeline(f"context.exported_vars.update(({names_str}))")
-
- # -- Statement Visitors
-
- def visit_Template(self, node, frame=None):
- assert frame is None, "no root frame allowed"
- eval_ctx = EvalContext(self.environment, self.name)
-
- from .runtime import exported
-
- self.writeline("from __future__ import generator_stop") # Python < 3.7
- self.writeline("from jinja2.runtime import " + ", ".join(exported))
-
- if self.environment.is_async:
- self.writeline(
- "from jinja2.asyncsupport import auto_await, "
- "auto_aiter, AsyncLoopContext"
- )
-
- # if we want a deferred initialization we cannot move the
- # environment into a local name
- envenv = "" if self.defer_init else ", environment=environment"
-
- # do we have an extends tag at all? If not, we can save some
- # overhead by just not processing any inheritance code.
- have_extends = node.find(nodes.Extends) is not None
-
- # find all blocks
- for block in node.find_all(nodes.Block):
- if block.name in self.blocks:
- self.fail(f"block {block.name!r} defined twice", block.lineno)
- self.blocks[block.name] = block
-
- # find all imports and import them
- for import_ in node.find_all(nodes.ImportedName):
- if import_.importname not in self.import_aliases:
- imp = import_.importname
- self.import_aliases[imp] = alias = self.temporary_identifier()
- if "." in imp:
- module, obj = imp.rsplit(".", 1)
- self.writeline(f"from {module} import {obj} as {alias}")
- else:
- self.writeline(f"import {imp} as {alias}")
-
- # add the load name
- self.writeline(f"name = {self.name!r}")
-
- # generate the root render function.
- self.writeline(
- f"{self.func('root')}(context, missing=missing{envenv}):", extra=1
- )
- self.indent()
- self.write_commons()
-
- # process the root
- frame = Frame(eval_ctx)
- if "self" in find_undeclared(node.body, ("self",)):
- ref = frame.symbols.declare_parameter("self")
- self.writeline(f"{ref} = TemplateReference(context)")
- frame.symbols.analyze_node(node)
- frame.toplevel = frame.rootlevel = True
- frame.require_output_check = have_extends and not self.has_known_extends
- if have_extends:
- self.writeline("parent_template = None")
- self.enter_frame(frame)
- self.pull_dependencies(node.body)
- self.blockvisit(node.body, frame)
- self.leave_frame(frame, with_python_scope=True)
- self.outdent()
-
- # make sure that the parent root is called.
- if have_extends:
- if not self.has_known_extends:
- self.indent()
- self.writeline("if parent_template is not None:")
- self.indent()
- if not self.environment.is_async:
- self.writeline("yield from parent_template.root_render_func(context)")
- else:
- loop = "async for" if self.environment.is_async else "for"
- self.writeline(
- f"{loop} event in parent_template.root_render_func(context):"
- )
- self.indent()
- self.writeline("yield event")
- self.outdent()
- self.outdent(1 + (not self.has_known_extends))
-
- # at this point we now have the blocks collected and can visit them too.
- for name, block in self.blocks.items():
- self.writeline(
- f"{self.func('block_' + name)}(context, missing=missing{envenv}):",
- block,
- 1,
- )
- self.indent()
- self.write_commons()
- # It's important that we do not make this frame a child of the
- # toplevel template. This would cause a variety of
- # interesting issues with identifier tracking.
- block_frame = Frame(eval_ctx)
- undeclared = find_undeclared(block.body, ("self", "super"))
- if "self" in undeclared:
- ref = block_frame.symbols.declare_parameter("self")
- self.writeline(f"{ref} = TemplateReference(context)")
- if "super" in undeclared:
- ref = block_frame.symbols.declare_parameter("super")
- self.writeline(f"{ref} = context.super({name!r}, block_{name})")
- block_frame.symbols.analyze_node(block)
- block_frame.block = name
- self.enter_frame(block_frame)
- self.pull_dependencies(block.body)
- self.blockvisit(block.body, block_frame)
- self.leave_frame(block_frame, with_python_scope=True)
- self.outdent()
-
- blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks)
- self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1)
- debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info)
- self.writeline(f"debug_info = {debug_kv_str!r}")
-
- def visit_Block(self, node, frame):
- """Call a block and register it for the template."""
- level = 0
- if frame.toplevel:
- # if we know that we are a child template, there is no need to
- # check if we are one
- if self.has_known_extends:
- return
- if self.extends_so_far > 0:
- self.writeline("if parent_template is None:")
- self.indent()
- level += 1
-
- if node.scoped:
- context = self.derive_context(frame)
- else:
- context = self.get_context_ref()
-
- if not self.environment.is_async and frame.buffer is None:
- self.writeline(
- f"yield from context.blocks[{node.name!r}][0]({context})", node
- )
- else:
- loop = "async for" if self.environment.is_async else "for"
- self.writeline(
- f"{loop} event in context.blocks[{node.name!r}][0]({context}):", node
- )
- self.indent()
- self.simple_write("event", frame)
- self.outdent()
-
- self.outdent(level)
-
- def visit_Extends(self, node, frame):
- """Calls the extender."""
- if not frame.toplevel:
- self.fail("cannot use extend from a non top-level scope", node.lineno)
-
- # if the number of extends statements in general is zero so
- # far, we don't have to add a check if something extended
- # the template before this one.
- if self.extends_so_far > 0:
-
- # if we have a known extends we just add a template runtime
- # error into the generated code. We could catch that at compile
- # time too, but i welcome it not to confuse users by throwing the
- # same error at different times just "because we can".
- if not self.has_known_extends:
- self.writeline("if parent_template is not None:")
- self.indent()
- self.writeline('raise TemplateRuntimeError("extended multiple times")')
-
- # if we have a known extends already we don't need that code here
- # as we know that the template execution will end here.
- if self.has_known_extends:
- raise CompilerExit()
- else:
- self.outdent()
-
- self.writeline("parent_template = environment.get_template(", node)
- self.visit(node.template, frame)
- self.write(f", {self.name!r})")
- self.writeline("for name, parent_block in parent_template.blocks.items():")
- self.indent()
- self.writeline("context.blocks.setdefault(name, []).append(parent_block)")
- self.outdent()
-
- # if this extends statement was in the root level we can take
- # advantage of that information and simplify the generated code
- # in the top level from this point onwards
- if frame.rootlevel:
- self.has_known_extends = True
-
- # and now we have one more
- self.extends_so_far += 1
-
- def visit_Include(self, node, frame):
- """Handles includes."""
- if node.ignore_missing:
- self.writeline("try:")
- self.indent()
-
- func_name = "get_or_select_template"
- if isinstance(node.template, nodes.Const):
- if isinstance(node.template.value, str):
- func_name = "get_template"
- elif isinstance(node.template.value, (tuple, list)):
- func_name = "select_template"
- elif isinstance(node.template, (nodes.Tuple, nodes.List)):
- func_name = "select_template"
-
- self.writeline(f"template = environment.{func_name}(", node)
- self.visit(node.template, frame)
- self.write(f", {self.name!r})")
- if node.ignore_missing:
- self.outdent()
- self.writeline("except TemplateNotFound:")
- self.indent()
- self.writeline("pass")
- self.outdent()
- self.writeline("else:")
- self.indent()
-
- skip_event_yield = False
- if node.with_context:
- loop = "async for" if self.environment.is_async else "for"
- self.writeline(
- f"{loop} event in template.root_render_func("
- "template.new_context(context.get_all(), True,"
- f" {self.dump_local_context(frame)})):"
- )
- elif self.environment.is_async:
- self.writeline(
- "for event in (await template._get_default_module_async())"
- "._body_stream:"
- )
- else:
- self.writeline("yield from template._get_default_module()._body_stream")
- skip_event_yield = True
-
- if not skip_event_yield:
- self.indent()
- self.simple_write("event", frame)
- self.outdent()
-
- if node.ignore_missing:
- self.outdent()
-
- def visit_Import(self, node, frame):
- """Visit regular imports."""
- self.writeline(f"{frame.symbols.ref(node.target)} = ", node)
- if frame.toplevel:
- self.write(f"context.vars[{node.target!r}] = ")
- if self.environment.is_async:
- self.write("await ")
- self.write("environment.get_template(")
- self.visit(node.template, frame)
- self.write(f", {self.name!r}).")
- if node.with_context:
- func = "make_module" + ("_async" if self.environment.is_async else "")
- self.write(
- f"{func}(context.get_all(), True, {self.dump_local_context(frame)})"
- )
- elif self.environment.is_async:
- self.write("_get_default_module_async()")
- else:
- self.write("_get_default_module(context)")
- if frame.toplevel and not node.target.startswith("_"):
- self.writeline(f"context.exported_vars.discard({node.target!r})")
-
- def visit_FromImport(self, node, frame):
- """Visit named imports."""
- self.newline(node)
- prefix = "await " if self.environment.is_async else ""
- self.write(f"included_template = {prefix}environment.get_template(")
- self.visit(node.template, frame)
- self.write(f", {self.name!r}).")
- if node.with_context:
- func = "make_module" + ("_async" if self.environment.is_async else "")
- self.write(
- f"{func}(context.get_all(), True, {self.dump_local_context(frame)})"
- )
- elif self.environment.is_async:
- self.write("_get_default_module_async()")
- else:
- self.write("_get_default_module(context)")
-
- var_names = []
- discarded_names = []
- for name in node.names:
- if isinstance(name, tuple):
- name, alias = name
- else:
- alias = name
- self.writeline(
- f"{frame.symbols.ref(alias)} ="
- f" getattr(included_template, {name!r}, missing)"
- )
- self.writeline(f"if {frame.symbols.ref(alias)} is missing:")
- self.indent()
- message = (
- "the template {included_template.__name__!r}"
- f" (imported on {self.position(node)})"
- f" does not export the requested name {name!r}"
- )
- self.writeline(
- f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})"
- )
- self.outdent()
- if frame.toplevel:
- var_names.append(alias)
- if not alias.startswith("_"):
- discarded_names.append(alias)
-
- if var_names:
- if len(var_names) == 1:
- name = var_names[0]
- self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}")
- else:
- names_kv = ", ".join(
- f"{name!r}: {frame.symbols.ref(name)}" for name in var_names
- )
- self.writeline(f"context.vars.update({{{names_kv}}})")
- if discarded_names:
- if len(discarded_names) == 1:
- self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})")
- else:
- names_str = ", ".join(map(repr, discarded_names))
- self.writeline(
- f"context.exported_vars.difference_update(({names_str}))"
- )
-
- def visit_For(self, node, frame):
- loop_frame = frame.inner()
- test_frame = frame.inner()
- else_frame = frame.inner()
-
- # try to figure out if we have an extended loop. An extended loop
- # is necessary if the loop is in recursive mode if the special loop
- # variable is accessed in the body.
- extended_loop = node.recursive or "loop" in find_undeclared(
- node.iter_child_nodes(only=("body",)), ("loop",)
- )
-
- loop_ref = None
- if extended_loop:
- loop_ref = loop_frame.symbols.declare_parameter("loop")
-
- loop_frame.symbols.analyze_node(node, for_branch="body")
- if node.else_:
- else_frame.symbols.analyze_node(node, for_branch="else")
-
- if node.test:
- loop_filter_func = self.temporary_identifier()
- test_frame.symbols.analyze_node(node, for_branch="test")
- self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test)
- self.indent()
- self.enter_frame(test_frame)
- self.writeline("async for " if self.environment.is_async else "for ")
- self.visit(node.target, loop_frame)
- self.write(" in ")
- self.write("auto_aiter(fiter)" if self.environment.is_async else "fiter")
- self.write(":")
- self.indent()
- self.writeline("if ", node.test)
- self.visit(node.test, test_frame)
- self.write(":")
- self.indent()
- self.writeline("yield ")
- self.visit(node.target, loop_frame)
- self.outdent(3)
- self.leave_frame(test_frame, with_python_scope=True)
-
- # if we don't have an recursive loop we have to find the shadowed
- # variables at that point. Because loops can be nested but the loop
- # variable is a special one we have to enforce aliasing for it.
- if node.recursive:
- self.writeline(
- f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node
- )
- self.indent()
- self.buffer(loop_frame)
-
- # Use the same buffer for the else frame
- else_frame.buffer = loop_frame.buffer
-
- # make sure the loop variable is a special one and raise a template
- # assertion error if a loop tries to write to loop
- if extended_loop:
- self.writeline(f"{loop_ref} = missing")
-
- for name in node.find_all(nodes.Name):
- if name.ctx == "store" and name.name == "loop":
- self.fail(
- "Can't assign to special loop variable in for-loop target",
- name.lineno,
- )
-
- if node.else_:
- iteration_indicator = self.temporary_identifier()
- self.writeline(f"{iteration_indicator} = 1")
-
- self.writeline("async for " if self.environment.is_async else "for ", node)
- self.visit(node.target, loop_frame)
- if extended_loop:
- prefix = "Async" if self.environment.is_async else ""
- self.write(f", {loop_ref} in {prefix}LoopContext(")
- else:
- self.write(" in ")
-
- if node.test:
- self.write(f"{loop_filter_func}(")
- if node.recursive:
- self.write("reciter")
- else:
- if self.environment.is_async and not extended_loop:
- self.write("auto_aiter(")
- self.visit(node.iter, frame)
- if self.environment.is_async and not extended_loop:
- self.write(")")
- if node.test:
- self.write(")")
-
- if node.recursive:
- self.write(", undefined, loop_render_func, depth):")
- else:
- self.write(", undefined):" if extended_loop else ":")
-
- self.indent()
- self.enter_frame(loop_frame)
-
- self.blockvisit(node.body, loop_frame)
- if node.else_:
- self.writeline(f"{iteration_indicator} = 0")
- self.outdent()
- self.leave_frame(
- loop_frame, with_python_scope=node.recursive and not node.else_
- )
-
- if node.else_:
- self.writeline(f"if {iteration_indicator}:")
- self.indent()
- self.enter_frame(else_frame)
- self.blockvisit(node.else_, else_frame)
- self.leave_frame(else_frame)
- self.outdent()
-
- # if the node was recursive we have to return the buffer contents
- # and start the iteration code
- if node.recursive:
- self.return_buffer_contents(loop_frame)
- self.outdent()
- self.start_write(frame, node)
- if self.environment.is_async:
- self.write("await ")
- self.write("loop(")
- if self.environment.is_async:
- self.write("auto_aiter(")
- self.visit(node.iter, frame)
- if self.environment.is_async:
- self.write(")")
- self.write(", loop)")
- self.end_write(frame)
-
- def visit_If(self, node, frame):
- if_frame = frame.soft()
- self.writeline("if ", node)
- self.visit(node.test, if_frame)
- self.write(":")
- self.indent()
- self.blockvisit(node.body, if_frame)
- self.outdent()
- for elif_ in node.elif_:
- self.writeline("elif ", elif_)
- self.visit(elif_.test, if_frame)
- self.write(":")
- self.indent()
- self.blockvisit(elif_.body, if_frame)
- self.outdent()
- if node.else_:
- self.writeline("else:")
- self.indent()
- self.blockvisit(node.else_, if_frame)
- self.outdent()
-
- def visit_Macro(self, node, frame):
- macro_frame, macro_ref = self.macro_body(node, frame)
- self.newline()
- if frame.toplevel:
- if not node.name.startswith("_"):
- self.write(f"context.exported_vars.add({node.name!r})")
- self.writeline(f"context.vars[{node.name!r}] = ")
- self.write(f"{frame.symbols.ref(node.name)} = ")
- self.macro_def(macro_ref, macro_frame)
-
- def visit_CallBlock(self, node, frame):
- call_frame, macro_ref = self.macro_body(node, frame)
- self.writeline("caller = ")
- self.macro_def(macro_ref, call_frame)
- self.start_write(frame, node)
- self.visit_Call(node.call, frame, forward_caller=True)
- self.end_write(frame)
-
- def visit_FilterBlock(self, node, frame):
- filter_frame = frame.inner()
- filter_frame.symbols.analyze_node(node)
- self.enter_frame(filter_frame)
- self.buffer(filter_frame)
- self.blockvisit(node.body, filter_frame)
- self.start_write(frame, node)
- self.visit_Filter(node.filter, filter_frame)
- self.end_write(frame)
- self.leave_frame(filter_frame)
-
- def visit_With(self, node, frame):
- with_frame = frame.inner()
- with_frame.symbols.analyze_node(node)
- self.enter_frame(with_frame)
- for target, expr in zip(node.targets, node.values):
- self.newline()
- self.visit(target, with_frame)
- self.write(" = ")
- self.visit(expr, frame)
- self.blockvisit(node.body, with_frame)
- self.leave_frame(with_frame)
-
- def visit_ExprStmt(self, node, frame):
- self.newline(node)
- self.visit(node.node, frame)
-
- _FinalizeInfo = namedtuple("_FinalizeInfo", ("const", "src"))
- #: The default finalize function if the environment isn't configured
- #: with one. Or if the environment has one, this is called on that
- #: function's output for constants.
- _default_finalize = str
- _finalize = None
-
- def _make_finalize(self):
- """Build the finalize function to be used on constants and at
- runtime. Cached so it's only created once for all output nodes.
-
- Returns a ``namedtuple`` with the following attributes:
-
- ``const``
- A function to finalize constant data at compile time.
-
- ``src``
- Source code to output around nodes to be evaluated at
- runtime.
- """
- if self._finalize is not None:
- return self._finalize
-
- finalize = default = self._default_finalize
- src = None
-
- if self.environment.finalize:
- src = "environment.finalize("
- env_finalize = self.environment.finalize
-
- def finalize(value):
- return default(env_finalize(value))
-
- if getattr(env_finalize, "contextfunction", False) is True:
- src += "context, "
- finalize = None # noqa: F811
- elif getattr(env_finalize, "evalcontextfunction", False) is True:
- src += "context.eval_ctx, "
- finalize = None
- elif getattr(env_finalize, "environmentfunction", False) is True:
- src += "environment, "
-
- def finalize(value):
- return default(env_finalize(self.environment, value))
-
- self._finalize = self._FinalizeInfo(finalize, src)
- return self._finalize
-
- def _output_const_repr(self, group):
- """Given a group of constant values converted from ``Output``
- child nodes, produce a string to write to the template module
- source.
- """
- return repr(concat(group))
-
- def _output_child_to_const(self, node, frame, finalize):
- """Try to optimize a child of an ``Output`` node by trying to
- convert it to constant, finalized data at compile time.
-
- If :exc:`Impossible` is raised, the node is not constant and
- will be evaluated at runtime. Any other exception will also be
- evaluated at runtime for easier debugging.
- """
- const = node.as_const(frame.eval_ctx)
-
- if frame.eval_ctx.autoescape:
- const = escape(const)
-
- # Template data doesn't go through finalize.
- if isinstance(node, nodes.TemplateData):
- return str(const)
-
- return finalize.const(const)
-
- def _output_child_pre(self, node, frame, finalize):
- """Output extra source code before visiting a child of an
- ``Output`` node.
- """
- if frame.eval_ctx.volatile:
- self.write("(escape if context.eval_ctx.autoescape else str)(")
- elif frame.eval_ctx.autoescape:
- self.write("escape(")
- else:
- self.write("str(")
-
- if finalize.src is not None:
- self.write(finalize.src)
-
- def _output_child_post(self, node, frame, finalize):
- """Output extra source code after visiting a child of an
- ``Output`` node.
- """
- self.write(")")
-
- if finalize.src is not None:
- self.write(")")
-
- def visit_Output(self, node, frame):
- # If an extends is active, don't render outside a block.
- if frame.require_output_check:
- # A top-level extends is known to exist at compile time.
- if self.has_known_extends:
- return
-
- self.writeline("if parent_template is None:")
- self.indent()
-
- finalize = self._make_finalize()
- body = []
-
- # Evaluate constants at compile time if possible. Each item in
- # body will be either a list of static data or a node to be
- # evaluated at runtime.
- for child in node.nodes:
- try:
- if not (
- # If the finalize function requires runtime context,
- # constants can't be evaluated at compile time.
- finalize.const
- # Unless it's basic template data that won't be
- # finalized anyway.
- or isinstance(child, nodes.TemplateData)
- ):
- raise nodes.Impossible()
-
- const = self._output_child_to_const(child, frame, finalize)
- except (nodes.Impossible, Exception):
- # The node was not constant and needs to be evaluated at
- # runtime. Or another error was raised, which is easier
- # to debug at runtime.
- body.append(child)
- continue
-
- if body and isinstance(body[-1], list):
- body[-1].append(const)
- else:
- body.append([const])
-
- if frame.buffer is not None:
- if len(body) == 1:
- self.writeline(f"{frame.buffer}.append(")
- else:
- self.writeline(f"{frame.buffer}.extend((")
-
- self.indent()
-
- for item in body:
- if isinstance(item, list):
- # A group of constant data to join and output.
- val = self._output_const_repr(item)
-
- if frame.buffer is None:
- self.writeline("yield " + val)
- else:
- self.writeline(val + ",")
- else:
- if frame.buffer is None:
- self.writeline("yield ", item)
- else:
- self.newline(item)
-
- # A node to be evaluated at runtime.
- self._output_child_pre(item, frame, finalize)
- self.visit(item, frame)
- self._output_child_post(item, frame, finalize)
-
- if frame.buffer is not None:
- self.write(",")
-
- if frame.buffer is not None:
- self.outdent()
- self.writeline(")" if len(body) == 1 else "))")
-
- if frame.require_output_check:
- self.outdent()
-
- def visit_Assign(self, node, frame):
- self.push_assign_tracking()
- self.newline(node)
- self.visit(node.target, frame)
- self.write(" = ")
- self.visit(node.node, frame)
- self.pop_assign_tracking(frame)
-
- def visit_AssignBlock(self, node, frame):
- self.push_assign_tracking()
- block_frame = frame.inner()
- # This is a special case. Since a set block always captures we
- # will disable output checks. This way one can use set blocks
- # toplevel even in extended templates.
- block_frame.require_output_check = False
- block_frame.symbols.analyze_node(node)
- self.enter_frame(block_frame)
- self.buffer(block_frame)
- self.blockvisit(node.body, block_frame)
- self.newline(node)
- self.visit(node.target, frame)
- self.write(" = (Markup if context.eval_ctx.autoescape else identity)(")
- if node.filter is not None:
- self.visit_Filter(node.filter, block_frame)
- else:
- self.write(f"concat({block_frame.buffer})")
- self.write(")")
- self.pop_assign_tracking(frame)
- self.leave_frame(block_frame)
-
- # -- Expression Visitors
-
- def visit_Name(self, node, frame):
- if node.ctx == "store" and frame.toplevel:
- if self._assign_stack:
- self._assign_stack[-1].add(node.name)
- ref = frame.symbols.ref(node.name)
-
- # If we are looking up a variable we might have to deal with the
- # case where it's undefined. We can skip that case if the load
- # instruction indicates a parameter which are always defined.
- if node.ctx == "load":
- load = frame.symbols.find_load(ref)
- if not (
- load is not None
- and load[0] == VAR_LOAD_PARAMETER
- and not self.parameter_is_undeclared(ref)
- ):
- self.write(
- f"(undefined(name={node.name!r}) if {ref} is missing else {ref})"
- )
- return
-
- self.write(ref)
-
- def visit_NSRef(self, node, frame):
- # NSRefs can only be used to store values; since they use the normal
- # `foo.bar` notation they will be parsed as a normal attribute access
- # when used anywhere but in a `set` context
- ref = frame.symbols.ref(node.name)
- self.writeline(f"if not isinstance({ref}, Namespace):")
- self.indent()
- self.writeline(
- "raise TemplateRuntimeError"
- '("cannot assign attribute on non-namespace object")'
- )
- self.outdent()
- self.writeline(f"{ref}[{node.attr!r}]")
-
- def visit_Const(self, node, frame):
- val = node.as_const(frame.eval_ctx)
- if isinstance(val, float):
- self.write(str(val))
- else:
- self.write(repr(val))
-
- def visit_TemplateData(self, node, frame):
- try:
- self.write(repr(node.as_const(frame.eval_ctx)))
- except nodes.Impossible:
- self.write(
- f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})"
- )
-
- def visit_Tuple(self, node, frame):
- self.write("(")
- idx = -1
- for idx, item in enumerate(node.items):
- if idx:
- self.write(", ")
- self.visit(item, frame)
- self.write(",)" if idx == 0 else ")")
-
- def visit_List(self, node, frame):
- self.write("[")
- for idx, item in enumerate(node.items):
- if idx:
- self.write(", ")
- self.visit(item, frame)
- self.write("]")
-
- def visit_Dict(self, node, frame):
- self.write("{")
- for idx, item in enumerate(node.items):
- if idx:
- self.write(", ")
- self.visit(item.key, frame)
- self.write(": ")
- self.visit(item.value, frame)
- self.write("}")
-
- def binop(operator, interceptable=True): # noqa: B902
- @optimizeconst
- def visitor(self, node, frame):
- if (
- self.environment.sandboxed
- and operator in self.environment.intercepted_binops
- ):
- self.write(f"environment.call_binop(context, {operator!r}, ")
- self.visit(node.left, frame)
- self.write(", ")
- self.visit(node.right, frame)
- else:
- self.write("(")
- self.visit(node.left, frame)
- self.write(f" {operator} ")
- self.visit(node.right, frame)
- self.write(")")
-
- return visitor
-
- def uaop(operator, interceptable=True): # noqa: B902
- @optimizeconst
- def visitor(self, node, frame):
- if (
- self.environment.sandboxed
- and operator in self.environment.intercepted_unops
- ):
- self.write(f"environment.call_unop(context, {operator!r}, ")
- self.visit(node.node, frame)
- else:
- self.write("(" + operator)
- self.visit(node.node, frame)
- self.write(")")
-
- return visitor
-
- visit_Add = binop("+")
- visit_Sub = binop("-")
- visit_Mul = binop("*")
- visit_Div = binop("/")
- visit_FloorDiv = binop("//")
- visit_Pow = binop("**")
- visit_Mod = binop("%")
- visit_And = binop("and", interceptable=False)
- visit_Or = binop("or", interceptable=False)
- visit_Pos = uaop("+")
- visit_Neg = uaop("-")
- visit_Not = uaop("not ", interceptable=False)
- del binop, uaop
-
- @optimizeconst
- def visit_Concat(self, node, frame):
- if frame.eval_ctx.volatile:
- func_name = "(markup_join if context.eval_ctx.volatile else str_join)"
- elif frame.eval_ctx.autoescape:
- func_name = "markup_join"
- else:
- func_name = "str_join"
- self.write(f"{func_name}((")
- for arg in node.nodes:
- self.visit(arg, frame)
- self.write(", ")
- self.write("))")
-
- @optimizeconst
- def visit_Compare(self, node, frame):
- self.write("(")
- self.visit(node.expr, frame)
- for op in node.ops:
- self.visit(op, frame)
- self.write(")")
-
- def visit_Operand(self, node, frame):
- self.write(f" {operators[node.op]} ")
- self.visit(node.expr, frame)
-
- @optimizeconst
- def visit_Getattr(self, node, frame):
- if self.environment.is_async:
- self.write("(await auto_await(")
-
- self.write("environment.getattr(")
- self.visit(node.node, frame)
- self.write(f", {node.attr!r})")
-
- if self.environment.is_async:
- self.write("))")
-
- @optimizeconst
- def visit_Getitem(self, node, frame):
- # slices bypass the environment getitem method.
- if isinstance(node.arg, nodes.Slice):
- self.visit(node.node, frame)
- self.write("[")
- self.visit(node.arg, frame)
- self.write("]")
- else:
- if self.environment.is_async:
- self.write("(await auto_await(")
-
- self.write("environment.getitem(")
- self.visit(node.node, frame)
- self.write(", ")
- self.visit(node.arg, frame)
- self.write(")")
-
- if self.environment.is_async:
- self.write("))")
-
- def visit_Slice(self, node, frame):
- if node.start is not None:
- self.visit(node.start, frame)
- self.write(":")
- if node.stop is not None:
- self.visit(node.stop, frame)
- if node.step is not None:
- self.write(":")
- self.visit(node.step, frame)
-
- @optimizeconst
- def visit_Filter(self, node, frame):
- if self.environment.is_async:
- self.write("await auto_await(")
- self.write(self.filters[node.name] + "(")
- func = self.environment.filters.get(node.name)
- if func is None:
- self.fail(f"no filter named {node.name!r}", node.lineno)
- if getattr(func, "contextfilter", False) is True:
- self.write("context, ")
- elif getattr(func, "evalcontextfilter", False) is True:
- self.write("context.eval_ctx, ")
- elif getattr(func, "environmentfilter", False) is True:
- self.write("environment, ")
-
- # if the filter node is None we are inside a filter block
- # and want to write to the current buffer
- if node.node is not None:
- self.visit(node.node, frame)
- elif frame.eval_ctx.volatile:
- self.write(
- f"(Markup(concat({frame.buffer}))"
- f" if context.eval_ctx.autoescape else concat({frame.buffer}))"
- )
- elif frame.eval_ctx.autoescape:
- self.write(f"Markup(concat({frame.buffer}))")
- else:
- self.write(f"concat({frame.buffer})")
- self.signature(node, frame)
- self.write(")")
- if self.environment.is_async:
- self.write(")")
-
- @optimizeconst
- def visit_Test(self, node, frame):
- self.write(self.tests[node.name] + "(")
- if node.name not in self.environment.tests:
- self.fail(f"no test named {node.name!r}", node.lineno)
- self.visit(node.node, frame)
- self.signature(node, frame)
- self.write(")")
-
- @optimizeconst
- def visit_CondExpr(self, node, frame):
- def write_expr2():
- if node.expr2 is not None:
- return self.visit(node.expr2, frame)
- self.write(
- f'cond_expr_undefined("the inline if-expression on'
- f" {self.position(node)} evaluated to false and no else"
- f' section was defined.")'
- )
-
- self.write("(")
- self.visit(node.expr1, frame)
- self.write(" if ")
- self.visit(node.test, frame)
- self.write(" else ")
- write_expr2()
- self.write(")")
-
- @optimizeconst
- def visit_Call(self, node, frame, forward_caller=False):
- if self.environment.is_async:
- self.write("await auto_await(")
- if self.environment.sandboxed:
- self.write("environment.call(context, ")
- else:
- self.write("context.call(")
- self.visit(node.node, frame)
- extra_kwargs = {"caller": "caller"} if forward_caller else None
- self.signature(node, frame, extra_kwargs)
- self.write(")")
- if self.environment.is_async:
- self.write(")")
-
- def visit_Keyword(self, node, frame):
- self.write(node.key + "=")
- self.visit(node.value, frame)
-
- # -- Unused nodes for extensions
-
- def visit_MarkSafe(self, node, frame):
- self.write("Markup(")
- self.visit(node.expr, frame)
- self.write(")")
-
- def visit_MarkSafeIfAutoescape(self, node, frame):
- self.write("(Markup if context.eval_ctx.autoescape else identity)(")
- self.visit(node.expr, frame)
- self.write(")")
-
- def visit_EnvironmentAttribute(self, node, frame):
- self.write("environment." + node.name)
-
- def visit_ExtensionAttribute(self, node, frame):
- self.write(f"environment.extensions[{node.identifier!r}].{node.name}")
-
- def visit_ImportedName(self, node, frame):
- self.write(self.import_aliases[node.importname])
-
- def visit_InternalName(self, node, frame):
- self.write(node.name)
-
- def visit_ContextReference(self, node, frame):
- self.write("context")
-
- def visit_DerivedContextReference(self, node, frame):
- self.write(self.derive_context(frame))
-
- def visit_Continue(self, node, frame):
- self.writeline("continue", node)
-
- def visit_Break(self, node, frame):
- self.writeline("break", node)
-
- def visit_Scope(self, node, frame):
- scope_frame = frame.inner()
- scope_frame.symbols.analyze_node(node)
- self.enter_frame(scope_frame)
- self.blockvisit(node.body, scope_frame)
- self.leave_frame(scope_frame)
-
- def visit_OverlayScope(self, node, frame):
- ctx = self.temporary_identifier()
- self.writeline(f"{ctx} = {self.derive_context(frame)}")
- self.writeline(f"{ctx}.vars = ")
- self.visit(node.context, frame)
- self.push_context_reference(ctx)
-
- scope_frame = frame.inner(isolated=True)
- scope_frame.symbols.analyze_node(node)
- self.enter_frame(scope_frame)
- self.blockvisit(node.body, scope_frame)
- self.leave_frame(scope_frame)
- self.pop_context_reference()
-
- def visit_EvalContextModifier(self, node, frame):
- for keyword in node.options:
- self.writeline(f"context.eval_ctx.{keyword.key} = ")
- self.visit(keyword.value, frame)
- try:
- val = keyword.value.as_const(frame.eval_ctx)
- except nodes.Impossible:
- frame.eval_ctx.volatile = True
- else:
- setattr(frame.eval_ctx, keyword.key, val)
-
- def visit_ScopedEvalContextModifier(self, node, frame):
- old_ctx_name = self.temporary_identifier()
- saved_ctx = frame.eval_ctx.save()
- self.writeline(f"{old_ctx_name} = context.eval_ctx.save()")
- self.visit_EvalContextModifier(node, frame)
- for child in node.body:
- self.visit(child, frame)
- frame.eval_ctx.revert(saved_ctx)
- self.writeline(f"context.eval_ctx.revert({old_ctx_name})")
diff --git a/src/jinja2/constants.py b/src/jinja2/constants.py
deleted file mode 100644
index 41a1c23b..00000000
--- a/src/jinja2/constants.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#: list of lorem ipsum words used by the lipsum() helper function
-LOREM_IPSUM_WORDS = """\
-a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
-auctor augue bibendum blandit class commodo condimentum congue consectetuer
-consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
-diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
-elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
-faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
-hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
-justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
-luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
-mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
-nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
-penatibus per pharetra phasellus placerat platea porta porttitor posuere
-potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
-ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
-sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
-tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
-ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
-viverra volutpat vulputate"""
diff --git a/src/jinja2/debug.py b/src/jinja2/debug.py
deleted file mode 100644
index 5cac28ba..00000000
--- a/src/jinja2/debug.py
+++ /dev/null
@@ -1,261 +0,0 @@
-import platform
-import sys
-from types import CodeType
-
-from . import TemplateSyntaxError
-from .utils import internal_code
-from .utils import missing
-
-
-def rewrite_traceback_stack(source=None):
- """Rewrite the current exception to replace any tracebacks from
- within compiled template code with tracebacks that look like they
- came from the template source.
-
- This must be called within an ``except`` block.
-
- :param source: For ``TemplateSyntaxError``, the original source if
- known.
- :return: The original exception with the rewritten traceback.
- """
- _, exc_value, tb = sys.exc_info()
-
- if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
- exc_value.translated = True
- exc_value.source = source
- # Remove the old traceback, otherwise the frames from the
- # compiler still show up.
- exc_value.with_traceback(None)
- # Outside of runtime, so the frame isn't executing template
- # code, but it still needs to point at the template.
- tb = fake_traceback(
- exc_value, None, exc_value.filename or "<unknown>", exc_value.lineno
- )
- else:
- # Skip the frame for the render function.
- tb = tb.tb_next
-
- stack = []
-
- # Build the stack of traceback object, replacing any in template
- # code with the source file and line information.
- while tb is not None:
- # Skip frames decorated with @internalcode. These are internal
- # calls that aren't useful in template debugging output.
- if tb.tb_frame.f_code in internal_code:
- tb = tb.tb_next
- continue
-
- template = tb.tb_frame.f_globals.get("__jinja_template__")
-
- if template is not None:
- lineno = template.get_corresponding_lineno(tb.tb_lineno)
- fake_tb = fake_traceback(exc_value, tb, template.filename, lineno)
- stack.append(fake_tb)
- else:
- stack.append(tb)
-
- tb = tb.tb_next
-
- tb_next = None
-
- # Assign tb_next in reverse to avoid circular references.
- for tb in reversed(stack):
- tb_next = tb_set_next(tb, tb_next)
-
- return exc_value.with_traceback(tb_next)
-
-
-def fake_traceback(exc_value, tb, filename, lineno):
- """Produce a new traceback object that looks like it came from the
- template source instead of the compiled code. The filename, line
- number, and location name will point to the template, and the local
- variables will be the current template context.
-
- :param exc_value: The original exception to be re-raised to create
- the new traceback.
- :param tb: The original traceback to get the local variables and
- code info from.
- :param filename: The template filename.
- :param lineno: The line number in the template source.
- """
- if tb is not None:
- # Replace the real locals with the context that would be
- # available at that point in the template.
- locals = get_template_locals(tb.tb_frame.f_locals)
- locals.pop("__jinja_exception__", None)
- else:
- locals = {}
-
- globals = {
- "__name__": filename,
- "__file__": filename,
- "__jinja_exception__": exc_value,
- }
- # Raise an exception at the correct line number.
- code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec")
-
- # Build a new code object that points to the template file and
- # replaces the location with a block name.
- try:
- location = "template"
-
- if tb is not None:
- function = tb.tb_frame.f_code.co_name
-
- if function == "root":
- location = "top-level template code"
- elif function.startswith("block_"):
- location = f"block {function[6:]!r}"
-
- # Collect arguments for the new code object. CodeType only
- # accepts positional arguments, and arguments were inserted in
- # new Python versions.
- code_args = []
-
- for attr in (
- "argcount",
- "posonlyargcount", # Python 3.8
- "kwonlyargcount",
- "nlocals",
- "stacksize",
- "flags",
- "code", # codestring
- "consts", # constants
- "names",
- "varnames",
- ("filename", filename),
- ("name", location),
- "firstlineno",
- "lnotab",
- "freevars",
- "cellvars",
- ):
- if isinstance(attr, tuple):
- # Replace with given value.
- code_args.append(attr[1])
- continue
-
- try:
- # Copy original value if it exists.
- code_args.append(getattr(code, "co_" + attr))
- except AttributeError:
- # Some arguments were added later.
- continue
-
- code = CodeType(*code_args)
- except Exception:
- # Some environments such as Google App Engine don't support
- # modifying code objects.
- pass
-
- # Execute the new code, which is guaranteed to raise, and return
- # the new traceback without this frame.
- try:
- exec(code, globals, locals)
- except BaseException:
- return sys.exc_info()[2].tb_next
-
-
-def get_template_locals(real_locals):
- """Based on the runtime locals, get the context that would be
- available at that point in the template.
- """
- # Start with the current template context.
- ctx = real_locals.get("context")
-
- if ctx:
- data = ctx.get_all().copy()
- else:
- data = {}
-
- # Might be in a derived context that only sets local variables
- # rather than pushing a context. Local variables follow the scheme
- # l_depth_name. Find the highest-depth local that has a value for
- # each name.
- local_overrides = {}
-
- for name, value in real_locals.items():
- if not name.startswith("l_") or value is missing:
- # Not a template variable, or no longer relevant.
- continue
-
- try:
- _, depth, name = name.split("_", 2)
- depth = int(depth)
- except ValueError:
- continue
-
- cur_depth = local_overrides.get(name, (-1,))[0]
-
- if cur_depth < depth:
- local_overrides[name] = (depth, value)
-
- # Modify the context with any derived context.
- for name, (_, value) in local_overrides.items():
- if value is missing:
- data.pop(name, None)
- else:
- data[name] = value
-
- return data
-
-
-if sys.version_info >= (3, 7):
- # tb_next is directly assignable as of Python 3.7
- def tb_set_next(tb, tb_next):
- tb.tb_next = tb_next
- return tb
-
-
-elif platform.python_implementation() == "PyPy":
- # PyPy might have special support, and won't work with ctypes.
- try:
- import tputil
- except ImportError:
- # Without tproxy support, use the original traceback.
- def tb_set_next(tb, tb_next):
- return tb
-
- else:
- # With tproxy support, create a proxy around the traceback that
- # returns the new tb_next.
- def tb_set_next(tb, tb_next):
- def controller(op):
- if op.opname == "__getattribute__" and op.args[0] == "tb_next":
- return tb_next
-
- return op.delegate()
-
- return tputil.make_proxy(controller, obj=tb)
-
-
-else:
- # Use ctypes to assign tb_next at the C level since it's read-only
- # from Python.
- import ctypes
-
- class _CTraceback(ctypes.Structure):
- _fields_ = [
- # Extra PyObject slots when compiled with Py_TRACE_REFS.
- ("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()),
- # Only care about tb_next as an object, not a traceback.
- ("tb_next", ctypes.py_object),
- ]
-
- def tb_set_next(tb, tb_next):
- c_tb = _CTraceback.from_address(id(tb))
-
- # Clear out the old tb_next.
- if tb.tb_next is not None:
- c_tb_next = ctypes.py_object(tb.tb_next)
- c_tb.tb_next = ctypes.py_object()
- ctypes.pythonapi.Py_DecRef(c_tb_next)
-
- # Assign the new tb_next.
- if tb_next is not None:
- c_tb_next = ctypes.py_object(tb_next)
- ctypes.pythonapi.Py_IncRef(c_tb_next)
- c_tb.tb_next = c_tb_next
-
- return tb
diff --git a/src/jinja2/defaults.py b/src/jinja2/defaults.py
deleted file mode 100644
index 1f0b0ab0..00000000
--- a/src/jinja2/defaults.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401
-from .tests import TESTS as DEFAULT_TESTS # noqa: F401
-from .utils import Cycler
-from .utils import generate_lorem_ipsum
-from .utils import Joiner
-from .utils import Namespace
-
-# defaults for the parser / lexer
-BLOCK_START_STRING = "{%"
-BLOCK_END_STRING = "%}"
-VARIABLE_START_STRING = "{{"
-VARIABLE_END_STRING = "}}"
-COMMENT_START_STRING = "{#"
-COMMENT_END_STRING = "#}"
-LINE_STATEMENT_PREFIX = None
-LINE_COMMENT_PREFIX = None
-TRIM_BLOCKS = False
-LSTRIP_BLOCKS = False
-NEWLINE_SEQUENCE = "\n"
-KEEP_TRAILING_NEWLINE = False
-
-# default filters, tests and namespace
-
-DEFAULT_NAMESPACE = {
- "range": range,
- "dict": dict,
- "lipsum": generate_lorem_ipsum,
- "cycler": Cycler,
- "joiner": Joiner,
- "namespace": Namespace,
-}
-
-# default policies
-DEFAULT_POLICIES = {
- "compiler.ascii_str": True,
- "urlize.rel": "noopener",
- "urlize.target": None,
- "truncate.leeway": 5,
- "json.dumps_function": None,
- "json.dumps_kwargs": {"sort_keys": True},
- "ext.i18n.trimmed": False,
-}
diff --git a/src/jinja2/environment.py b/src/jinja2/environment.py
deleted file mode 100644
index 556f7255..00000000
--- a/src/jinja2/environment.py
+++ /dev/null
@@ -1,1331 +0,0 @@
-"""Classes for managing templates and their runtime and compile time
-options.
-"""
-import os
-import sys
-import weakref
-from functools import partial
-from functools import reduce
-
-from markupsafe import Markup
-
-from . import nodes
-from .compiler import CodeGenerator
-from .compiler import generate
-from .defaults import BLOCK_END_STRING
-from .defaults import BLOCK_START_STRING
-from .defaults import COMMENT_END_STRING
-from .defaults import COMMENT_START_STRING
-from .defaults import DEFAULT_FILTERS
-from .defaults import DEFAULT_NAMESPACE
-from .defaults import DEFAULT_POLICIES
-from .defaults import DEFAULT_TESTS
-from .defaults import KEEP_TRAILING_NEWLINE
-from .defaults import LINE_COMMENT_PREFIX
-from .defaults import LINE_STATEMENT_PREFIX
-from .defaults import LSTRIP_BLOCKS
-from .defaults import NEWLINE_SEQUENCE
-from .defaults import TRIM_BLOCKS
-from .defaults import VARIABLE_END_STRING
-from .defaults import VARIABLE_START_STRING
-from .exceptions import TemplateNotFound
-from .exceptions import TemplateRuntimeError
-from .exceptions import TemplatesNotFound
-from .exceptions import TemplateSyntaxError
-from .exceptions import UndefinedError
-from .lexer import get_lexer
-from .lexer import TokenStream
-from .nodes import EvalContext
-from .parser import Parser
-from .runtime import Context
-from .runtime import new_context
-from .runtime import Undefined
-from .utils import concat
-from .utils import consume
-from .utils import have_async_gen
-from .utils import import_string
-from .utils import internalcode
-from .utils import LRUCache
-from .utils import missing
-
-# for direct template usage we have up to ten living environments
-_spontaneous_environments = LRUCache(10)
-
-
-def get_spontaneous_environment(cls, *args):
- """Return a new spontaneous environment. A spontaneous environment
- is used for templates created directly rather than through an
- existing environment.
-
- :param cls: Environment class to create.
- :param args: Positional arguments passed to environment.
- """
- key = (cls, args)
-
- try:
- return _spontaneous_environments[key]
- except KeyError:
- _spontaneous_environments[key] = env = cls(*args)
- env.shared = True
- return env
-
-
-def create_cache(size):
- """Return the cache class for the given size."""
- if size == 0:
- return None
- if size < 0:
- return {}
- return LRUCache(size)
-
-
-def copy_cache(cache):
- """Create an empty copy of the given cache."""
- if cache is None:
- return None
- elif type(cache) is dict:
- return {}
- return LRUCache(cache.capacity)
-
-
-def load_extensions(environment, extensions):
- """Load the extensions from the list and bind it to the environment.
- Returns a dict of instantiated environments.
- """
- result = {}
- for extension in extensions:
- if isinstance(extension, str):
- extension = import_string(extension)
- result[extension.identifier] = extension(environment)
- return result
-
-
-def fail_for_missing_callable(thing, name):
- msg = f"no {thing} named {name!r}"
-
- if isinstance(name, Undefined):
- try:
- name._fail_with_undefined_error()
- except Exception as e:
- msg = f"{msg} ({e}; did you forget to quote the callable name?)"
- raise TemplateRuntimeError(msg)
-
-
-def _environment_sanity_check(environment):
- """Perform a sanity check on the environment."""
- assert issubclass(
- environment.undefined, Undefined
- ), "undefined must be a subclass of undefined because filters depend on it."
- assert (
- environment.block_start_string
- != environment.variable_start_string
- != environment.comment_start_string
- ), "block, variable and comment start strings must be different"
- assert environment.newline_sequence in {
- "\r",
- "\r\n",
- "\n",
- }, "newline_sequence set to unknown line ending string."
- return environment
-
-
-class Environment:
- r"""The core component of Jinja is the `Environment`. It contains
- important shared variables like configuration, filters, tests,
- globals and others. Instances of this class may be modified if
- they are not shared and if no template was loaded so far.
- Modifications on environments after the first template was loaded
- will lead to surprising effects and undefined behavior.
-
- Here are the possible initialization parameters:
-
- `block_start_string`
- The string marking the beginning of a block. Defaults to ``'{%'``.
-
- `block_end_string`
- The string marking the end of a block. Defaults to ``'%}'``.
-
- `variable_start_string`
- The string marking the beginning of a print statement.
- Defaults to ``'{{'``.
-
- `variable_end_string`
- The string marking the end of a print statement. Defaults to
- ``'}}'``.
-
- `comment_start_string`
- The string marking the beginning of a comment. Defaults to ``'{#'``.
-
- `comment_end_string`
- The string marking the end of a comment. Defaults to ``'#}'``.
-
- `line_statement_prefix`
- If given and a string, this will be used as prefix for line based
- statements. See also :ref:`line-statements`.
-
- `line_comment_prefix`
- If given and a string, this will be used as prefix for line based
- comments. See also :ref:`line-statements`.
-
- .. versionadded:: 2.2
-
- `trim_blocks`
- If this is set to ``True`` the first newline after a block is
- removed (block, not variable tag!). Defaults to `False`.
-
- `lstrip_blocks`
- If this is set to ``True`` leading spaces and tabs are stripped
- from the start of a line to a block. Defaults to `False`.
-
- `newline_sequence`
- The sequence that starts a newline. Must be one of ``'\r'``,
- ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a
- useful default for Linux and OS X systems as well as web
- applications.
-
- `keep_trailing_newline`
- Preserve the trailing newline when rendering templates.
- The default is ``False``, which causes a single newline,
- if present, to be stripped from the end of the template.
-
- .. versionadded:: 2.7
-
- `extensions`
- List of Jinja extensions to use. This can either be import paths
- as strings or extension classes. For more information have a
- look at :ref:`the extensions documentation <jinja-extensions>`.
-
- `optimized`
- should the optimizer be enabled? Default is ``True``.
-
- `undefined`
- :class:`Undefined` or a subclass of it that is used to represent
- undefined values in the template.
-
- `finalize`
- A callable that can be used to process the result of a variable
- expression before it is output. For example one can convert
- ``None`` implicitly into an empty string here.
-
- `autoescape`
- If set to ``True`` the XML/HTML autoescaping feature is enabled by
- default. For more details about autoescaping see
- :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also
- be a callable that is passed the template name and has to
- return ``True`` or ``False`` depending on autoescape should be
- enabled by default.
-
- .. versionchanged:: 2.4
- `autoescape` can now be a function
-
- `loader`
- The template loader for this environment.
-
- `cache_size`
- The size of the cache. Per default this is ``400`` which means
- that if more than 400 templates are loaded the loader will clean
- out the least recently used template. If the cache size is set to
- ``0`` templates are recompiled all the time, if the cache size is
- ``-1`` the cache will not be cleaned.
-
- .. versionchanged:: 2.8
- The cache size was increased to 400 from a low 50.
-
- `auto_reload`
- Some loaders load templates from locations where the template
- sources may change (ie: file system or database). If
- ``auto_reload`` is set to ``True`` (default) every time a template is
- requested the loader checks if the source changed and if yes, it
- will reload the template. For higher performance it's possible to
- disable that.
-
- `bytecode_cache`
- If set to a bytecode cache object, this object will provide a
- cache for the internal Jinja bytecode so that templates don't
- have to be parsed if they were not changed.
-
- See :ref:`bytecode-cache` for more information.
-
- `enable_async`
- If set to true this enables async template execution which
- allows using async functions and generators.
- """
-
- #: if this environment is sandboxed. Modifying this variable won't make
- #: the environment sandboxed though. For a real sandboxed environment
- #: have a look at jinja2.sandbox. This flag alone controls the code
- #: generation by the compiler.
- sandboxed = False
-
- #: True if the environment is just an overlay
- overlayed = False
-
- #: the environment this environment is linked to if it is an overlay
- linked_to = None
-
- #: shared environments have this set to `True`. A shared environment
- #: must not be modified
- shared = False
-
- #: the class that is used for code generation. See
- #: :class:`~jinja2.compiler.CodeGenerator` for more information.
- code_generator_class = CodeGenerator
-
- #: the context class thatis used for templates. See
- #: :class:`~jinja2.runtime.Context` for more information.
- context_class = Context
-
- def __init__(
- self,
- block_start_string=BLOCK_START_STRING,
- block_end_string=BLOCK_END_STRING,
- variable_start_string=VARIABLE_START_STRING,
- variable_end_string=VARIABLE_END_STRING,
- comment_start_string=COMMENT_START_STRING,
- comment_end_string=COMMENT_END_STRING,
- line_statement_prefix=LINE_STATEMENT_PREFIX,
- line_comment_prefix=LINE_COMMENT_PREFIX,
- trim_blocks=TRIM_BLOCKS,
- lstrip_blocks=LSTRIP_BLOCKS,
- newline_sequence=NEWLINE_SEQUENCE,
- keep_trailing_newline=KEEP_TRAILING_NEWLINE,
- extensions=(),
- optimized=True,
- undefined=Undefined,
- finalize=None,
- autoescape=False,
- loader=None,
- cache_size=400,
- auto_reload=True,
- bytecode_cache=None,
- enable_async=False,
- ):
- # !!Important notice!!
- # The constructor accepts quite a few arguments that should be
- # passed by keyword rather than position. However it's important to
- # not change the order of arguments because it's used at least
- # internally in those cases:
- # - spontaneous environments (i18n extension and Template)
- # - unittests
- # If parameter changes are required only add parameters at the end
- # and don't change the arguments (or the defaults!) of the arguments
- # existing already.
-
- # lexer / parser information
- self.block_start_string = block_start_string
- self.block_end_string = block_end_string
- self.variable_start_string = variable_start_string
- self.variable_end_string = variable_end_string
- self.comment_start_string = comment_start_string
- self.comment_end_string = comment_end_string
- self.line_statement_prefix = line_statement_prefix
- self.line_comment_prefix = line_comment_prefix
- self.trim_blocks = trim_blocks
- self.lstrip_blocks = lstrip_blocks
- self.newline_sequence = newline_sequence
- self.keep_trailing_newline = keep_trailing_newline
-
- # runtime information
- self.undefined = undefined
- self.optimized = optimized
- self.finalize = finalize
- self.autoescape = autoescape
-
- # defaults
- self.filters = DEFAULT_FILTERS.copy()
- self.tests = DEFAULT_TESTS.copy()
- self.globals = DEFAULT_NAMESPACE.copy()
-
- # set the loader provided
- self.loader = loader
- self.cache = create_cache(cache_size)
- self.bytecode_cache = bytecode_cache
- self.auto_reload = auto_reload
-
- # configurable policies
- self.policies = DEFAULT_POLICIES.copy()
-
- # load extensions
- self.extensions = load_extensions(self, extensions)
-
- self.enable_async = enable_async
- self.is_async = self.enable_async and have_async_gen
- if self.is_async:
- # runs patch_all() to enable async support
- from . import asyncsupport # noqa: F401
-
- _environment_sanity_check(self)
-
- def add_extension(self, extension):
- """Adds an extension after the environment was created.
-
- .. versionadded:: 2.5
- """
- self.extensions.update(load_extensions(self, [extension]))
-
- def extend(self, **attributes):
- """Add the items to the instance of the environment if they do not exist
- yet. This is used by :ref:`extensions <writing-extensions>` to register
- callbacks and configuration values without breaking inheritance.
- """
- for key, value in attributes.items():
- if not hasattr(self, key):
- setattr(self, key, value)
-
- def overlay(
- self,
- block_start_string=missing,
- block_end_string=missing,
- variable_start_string=missing,
- variable_end_string=missing,
- comment_start_string=missing,
- comment_end_string=missing,
- line_statement_prefix=missing,
- line_comment_prefix=missing,
- trim_blocks=missing,
- lstrip_blocks=missing,
- extensions=missing,
- optimized=missing,
- undefined=missing,
- finalize=missing,
- autoescape=missing,
- loader=missing,
- cache_size=missing,
- auto_reload=missing,
- bytecode_cache=missing,
- ):
- """Create a new overlay environment that shares all the data with the
- current environment except for cache and the overridden attributes.
- Extensions cannot be removed for an overlayed environment. An overlayed
- environment automatically gets all the extensions of the environment it
- is linked to plus optional extra extensions.
-
- Creating overlays should happen after the initial environment was set
- up completely. Not all attributes are truly linked, some are just
- copied over so modifications on the original environment may not shine
- through.
- """
- args = dict(locals())
- del args["self"], args["cache_size"], args["extensions"]
-
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.overlayed = True
- rv.linked_to = self
-
- for key, value in args.items():
- if value is not missing:
- setattr(rv, key, value)
-
- if cache_size is not missing:
- rv.cache = create_cache(cache_size)
- else:
- rv.cache = copy_cache(self.cache)
-
- rv.extensions = {}
- for key, value in self.extensions.items():
- rv.extensions[key] = value.bind(rv)
- if extensions is not missing:
- rv.extensions.update(load_extensions(rv, extensions))
-
- return _environment_sanity_check(rv)
-
- lexer = property(get_lexer, doc="The lexer for this environment.")
-
- def iter_extensions(self):
- """Iterates over the extensions by priority."""
- return iter(sorted(self.extensions.values(), key=lambda x: x.priority))
-
- def getitem(self, obj, argument):
- """Get an item or attribute of an object but prefer the item."""
- try:
- return obj[argument]
- except (AttributeError, TypeError, LookupError):
- if isinstance(argument, str):
- try:
- attr = str(argument)
- except Exception:
- pass
- else:
- try:
- return getattr(obj, attr)
- except AttributeError:
- pass
- return self.undefined(obj=obj, name=argument)
-
- def getattr(self, obj, attribute):
- """Get an item or attribute of an object but prefer the attribute.
- Unlike :meth:`getitem` the attribute *must* be a bytestring.
- """
- try:
- return getattr(obj, attribute)
- except AttributeError:
- pass
- try:
- return obj[attribute]
- except (TypeError, LookupError, AttributeError):
- return self.undefined(obj=obj, name=attribute)
-
- def call_filter(
- self, name, value, args=None, kwargs=None, context=None, eval_ctx=None
- ):
- """Invokes a filter on a value the same way the compiler does.
-
- This might return a coroutine if the filter is running from an
- environment in async mode and the filter supports async
- execution. It's your responsibility to await this if needed.
-
- .. versionadded:: 2.7
- """
- func = self.filters.get(name)
- if func is None:
- fail_for_missing_callable("filter", name)
- args = [value] + list(args or ())
- if getattr(func, "contextfilter", False) is True:
- if context is None:
- raise TemplateRuntimeError(
- "Attempted to invoke context filter without context"
- )
- args.insert(0, context)
- elif getattr(func, "evalcontextfilter", False) is True:
- if eval_ctx is None:
- if context is not None:
- eval_ctx = context.eval_ctx
- else:
- eval_ctx = EvalContext(self)
- args.insert(0, eval_ctx)
- elif getattr(func, "environmentfilter", False) is True:
- args.insert(0, self)
- return func(*args, **(kwargs or {}))
-
- def call_test(self, name, value, args=None, kwargs=None):
- """Invokes a test on a value the same way the compiler does it.
-
- .. versionadded:: 2.7
- """
- func = self.tests.get(name)
- if func is None:
- fail_for_missing_callable("test", name)
- return func(value, *(args or ()), **(kwargs or {}))
-
- @internalcode
- def parse(self, source, name=None, filename=None):
- """Parse the sourcecode and return the abstract syntax tree. This
- tree of nodes is used by the compiler to convert the template into
- executable source- or bytecode. This is useful for debugging or to
- extract information from templates.
-
- If you are :ref:`developing Jinja extensions <writing-extensions>`
- this gives you a good overview of the node tree generated.
- """
- try:
- return self._parse(source, name, filename)
- except TemplateSyntaxError:
- self.handle_exception(source=source)
-
- def _parse(self, source, name, filename):
- """Internal parsing function used by `parse` and `compile`."""
- return Parser(self, source, name, filename).parse()
-
- def lex(self, source, name=None, filename=None):
- """Lex the given sourcecode and return a generator that yields
- tokens as tuples in the form ``(lineno, token_type, value)``.
- This can be useful for :ref:`extension development <writing-extensions>`
- and debugging templates.
-
- This does not perform preprocessing. If you want the preprocessing
- of the extensions to be applied you have to filter source through
- the :meth:`preprocess` method.
- """
- source = str(source)
- try:
- return self.lexer.tokeniter(source, name, filename)
- except TemplateSyntaxError:
- self.handle_exception(source=source)
-
- def preprocess(self, source, name=None, filename=None):
- """Preprocesses the source with all extensions. This is automatically
- called for all parsing and compiling methods but *not* for :meth:`lex`
- because there you usually only want the actual source tokenized.
- """
- return reduce(
- lambda s, e: e.preprocess(s, name, filename),
- self.iter_extensions(),
- str(source),
- )
-
- def _tokenize(self, source, name, filename=None, state=None):
- """Called by the parser to do the preprocessing and filtering
- for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
- """
- source = self.preprocess(source, name, filename)
- stream = self.lexer.tokenize(source, name, filename, state)
- for ext in self.iter_extensions():
- stream = ext.filter_stream(stream)
- if not isinstance(stream, TokenStream):
- stream = TokenStream(stream, name, filename)
- return stream
-
- def _generate(self, source, name, filename, defer_init=False):
- """Internal hook that can be overridden to hook a different generate
- method in.
-
- .. versionadded:: 2.5
- """
- return generate(
- source,
- self,
- name,
- filename,
- defer_init=defer_init,
- optimized=self.optimized,
- )
-
- def _compile(self, source, filename):
- """Internal hook that can be overridden to hook a different compile
- method in.
-
- .. versionadded:: 2.5
- """
- return compile(source, filename, "exec")
-
- @internalcode
- def compile(self, source, name=None, filename=None, raw=False, defer_init=False):
- """Compile a node or template source code. The `name` parameter is
- the load name of the template after it was joined using
- :meth:`join_path` if necessary, not the filename on the file system.
- the `filename` parameter is the estimated filename of the template on
- the file system. If the template came from a database or memory this
- can be omitted.
-
- The return value of this method is a python code object. If the `raw`
- parameter is `True` the return value will be a string with python
- code equivalent to the bytecode returned otherwise. This method is
- mainly used internally.
-
- `defer_init` is use internally to aid the module code generator. This
- causes the generated code to be able to import without the global
- environment variable to be set.
-
- .. versionadded:: 2.4
- `defer_init` parameter added.
- """
- source_hint = None
- try:
- if isinstance(source, str):
- source_hint = source
- source = self._parse(source, name, filename)
- source = self._generate(source, name, filename, defer_init=defer_init)
- if raw:
- return source
- if filename is None:
- filename = "<template>"
- return self._compile(source, filename)
- except TemplateSyntaxError:
- self.handle_exception(source=source_hint)
-
- def compile_expression(self, source, undefined_to_none=True):
- """A handy helper method that returns a callable that accepts keyword
- arguments that appear as variables in the expression. If called it
- returns the result of the expression.
-
- This is useful if applications want to use the same rules as Jinja
- in template "configuration files" or similar situations.
-
- Example usage:
-
- >>> env = Environment()
- >>> expr = env.compile_expression('foo == 42')
- >>> expr(foo=23)
- False
- >>> expr(foo=42)
- True
-
- Per default the return value is converted to `None` if the
- expression returns an undefined value. This can be changed
- by setting `undefined_to_none` to `False`.
-
- >>> env.compile_expression('var')() is None
- True
- >>> env.compile_expression('var', undefined_to_none=False)()
- Undefined
-
- .. versionadded:: 2.1
- """
- parser = Parser(self, source, state="variable")
- try:
- expr = parser.parse_expression()
- if not parser.stream.eos:
- raise TemplateSyntaxError(
- "chunk after expression", parser.stream.current.lineno, None, None
- )
- expr.set_environment(self)
- except TemplateSyntaxError:
- if sys.exc_info() is not None:
- self.handle_exception(source=source)
-
- body = [nodes.Assign(nodes.Name("result", "store"), expr, lineno=1)]
- template = self.from_string(nodes.Template(body, lineno=1))
- return TemplateExpression(template, undefined_to_none)
-
- def compile_templates(
- self,
- target,
- extensions=None,
- filter_func=None,
- zip="deflated",
- log_function=None,
- ignore_errors=True,
- ):
- """Finds all the templates the loader can find, compiles them
- and stores them in `target`. If `zip` is `None`, instead of in a
- zipfile, the templates will be stored in a directory.
- By default a deflate zip algorithm is used. To switch to
- the stored algorithm, `zip` can be set to ``'stored'``.
-
- `extensions` and `filter_func` are passed to :meth:`list_templates`.
- Each template returned will be compiled to the target folder or
- zipfile.
-
- By default template compilation errors are ignored. In case a
- log function is provided, errors are logged. If you want template
- syntax errors to abort the compilation you can set `ignore_errors`
- to `False` and you will get an exception on syntax errors.
-
- .. versionadded:: 2.4
- """
- from .loaders import ModuleLoader
-
- if log_function is None:
-
- def log_function(x):
- pass
-
- def write_file(filename, data):
- if zip:
- info = ZipInfo(filename)
- info.external_attr = 0o755 << 16
- zip_file.writestr(info, data)
- else:
- if isinstance(data, str):
- data = data.encode("utf8")
-
- with open(os.path.join(target, filename), "wb") as f:
- f.write(data)
-
- if zip is not None:
- from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
-
- zip_file = ZipFile(
- target, "w", dict(deflated=ZIP_DEFLATED, stored=ZIP_STORED)[zip]
- )
- log_function(f"Compiling into Zip archive {target!r}")
- else:
- if not os.path.isdir(target):
- os.makedirs(target)
- log_function(f"Compiling into folder {target!r}")
-
- try:
- for name in self.list_templates(extensions, filter_func):
- source, filename, _ = self.loader.get_source(self, name)
- try:
- code = self.compile(source, name, filename, True, True)
- except TemplateSyntaxError as e:
- if not ignore_errors:
- raise
- log_function(f'Could not compile "{name}": {e}')
- continue
-
- filename = ModuleLoader.get_module_filename(name)
-
- write_file(filename, code)
- log_function(f'Compiled "{name}" as {filename}')
- finally:
- if zip:
- zip_file.close()
-
- log_function("Finished compiling templates")
-
- def list_templates(self, extensions=None, filter_func=None):
- """Returns a list of templates for this environment. This requires
- that the loader supports the loader's
- :meth:`~BaseLoader.list_templates` method.
-
- If there are other files in the template folder besides the
- actual templates, the returned list can be filtered. There are two
- ways: either `extensions` is set to a list of file extensions for
- templates, or a `filter_func` can be provided which is a callable that
- is passed a template name and should return `True` if it should end up
- in the result list.
-
- If the loader does not support that, a :exc:`TypeError` is raised.
-
- .. versionadded:: 2.4
- """
- names = self.loader.list_templates()
-
- if extensions is not None:
- if filter_func is not None:
- raise TypeError(
- "either extensions or filter_func can be passed, but not both"
- )
-
- def filter_func(x):
- return "." in x and x.rsplit(".", 1)[1] in extensions
-
- if filter_func is not None:
- names = [name for name in names if filter_func(name)]
-
- return names
-
- def handle_exception(self, source=None):
- """Exception handling helper. This is used internally to either raise
- rewritten exceptions or return a rendered traceback for the template.
- """
- from .debug import rewrite_traceback_stack
-
- raise rewrite_traceback_stack(source=source)
-
- def join_path(self, template, parent):
- """Join a template with the parent. By default all the lookups are
- relative to the loader root so this method returns the `template`
- parameter unchanged, but if the paths should be relative to the
- parent template, this function can be used to calculate the real
- template name.
-
- Subclasses may override this method and implement template path
- joining here.
- """
- return template
-
- @internalcode
- def _load_template(self, name, globals):
- if self.loader is None:
- raise TypeError("no loader for this environment specified")
- cache_key = (weakref.ref(self.loader), name)
- if self.cache is not None:
- template = self.cache.get(cache_key)
- if template is not None and (
- not self.auto_reload or template.is_up_to_date
- ):
- return template
- template = self.loader.load(self, name, globals)
- if self.cache is not None:
- self.cache[cache_key] = template
- return template
-
- @internalcode
- def get_template(self, name, parent=None, globals=None):
- """Load a template from the loader. If a loader is configured this
- method asks the loader for the template and returns a :class:`Template`.
- If the `parent` parameter is not `None`, :meth:`join_path` is called
- to get the real template name before loading.
-
- The `globals` parameter can be used to provide template wide globals.
- These variables are available in the context at render time.
-
- If the template does not exist a :exc:`TemplateNotFound` exception is
- raised.
-
- .. versionchanged:: 2.4
- If `name` is a :class:`Template` object it is returned from the
- function unchanged.
- """
- if isinstance(name, Template):
- return name
- if parent is not None:
- name = self.join_path(name, parent)
- return self._load_template(name, self.make_globals(globals))
-
- @internalcode
- def select_template(self, names, parent=None, globals=None):
- """Works like :meth:`get_template` but tries a number of templates
- before it fails. If it cannot find any of the templates, it will
- raise a :exc:`TemplatesNotFound` exception.
-
- .. versionchanged:: 2.11
- If names is :class:`Undefined`, an :exc:`UndefinedError` is
- raised instead. If no templates were found and names
- contains :class:`Undefined`, the message is more helpful.
-
- .. versionchanged:: 2.4
- If `names` contains a :class:`Template` object it is returned
- from the function unchanged.
-
- .. versionadded:: 2.3
- """
- if isinstance(names, Undefined):
- names._fail_with_undefined_error()
-
- if not names:
- raise TemplatesNotFound(
- message="Tried to select from an empty list of templates."
- )
- globals = self.make_globals(globals)
- for name in names:
- if isinstance(name, Template):
- return name
- if parent is not None:
- name = self.join_path(name, parent)
- try:
- return self._load_template(name, globals)
- except (TemplateNotFound, UndefinedError):
- pass
- raise TemplatesNotFound(names)
-
- @internalcode
- def get_or_select_template(self, template_name_or_list, parent=None, globals=None):
- """Does a typecheck and dispatches to :meth:`select_template`
- if an iterable of template names is given, otherwise to
- :meth:`get_template`.
-
- .. versionadded:: 2.3
- """
- if isinstance(template_name_or_list, (str, Undefined)):
- return self.get_template(template_name_or_list, parent, globals)
- elif isinstance(template_name_or_list, Template):
- return template_name_or_list
- return self.select_template(template_name_or_list, parent, globals)
-
- def from_string(self, source, globals=None, template_class=None):
- """Load a template from a string. This parses the source given and
- returns a :class:`Template` object.
- """
- globals = self.make_globals(globals)
- cls = template_class or self.template_class
- return cls.from_code(self, self.compile(source), globals, None)
-
- def make_globals(self, d):
- """Return a dict for the globals."""
- if not d:
- return self.globals
- return dict(self.globals, **d)
-
-
-class Template:
- """The central template object. This class represents a compiled template
- and is used to evaluate it.
-
- Normally the template object is generated from an :class:`Environment` but
- it also has a constructor that makes it possible to create a template
- instance directly using the constructor. It takes the same arguments as
- the environment constructor but it's not possible to specify a loader.
-
- Every template object has a few methods and members that are guaranteed
- to exist. However it's important that a template object should be
- considered immutable. Modifications on the object are not supported.
-
- Template objects created from the constructor rather than an environment
- do have an `environment` attribute that points to a temporary environment
- that is probably shared with other templates created with the constructor
- and compatible settings.
-
- >>> template = Template('Hello {{ name }}!')
- >>> template.render(name='John Doe') == u'Hello John Doe!'
- True
- >>> stream = template.stream(name='John Doe')
- >>> next(stream) == u'Hello John Doe!'
- True
- >>> next(stream)
- Traceback (most recent call last):
- ...
- StopIteration
- """
-
- #: Type of environment to create when creating a template directly
- #: rather than through an existing environment.
- environment_class = Environment
-
- def __new__(
- cls,
- source,
- block_start_string=BLOCK_START_STRING,
- block_end_string=BLOCK_END_STRING,
- variable_start_string=VARIABLE_START_STRING,
- variable_end_string=VARIABLE_END_STRING,
- comment_start_string=COMMENT_START_STRING,
- comment_end_string=COMMENT_END_STRING,
- line_statement_prefix=LINE_STATEMENT_PREFIX,
- line_comment_prefix=LINE_COMMENT_PREFIX,
- trim_blocks=TRIM_BLOCKS,
- lstrip_blocks=LSTRIP_BLOCKS,
- newline_sequence=NEWLINE_SEQUENCE,
- keep_trailing_newline=KEEP_TRAILING_NEWLINE,
- extensions=(),
- optimized=True,
- undefined=Undefined,
- finalize=None,
- autoescape=False,
- enable_async=False,
- ):
- env = get_spontaneous_environment(
- cls.environment_class,
- block_start_string,
- block_end_string,
- variable_start_string,
- variable_end_string,
- comment_start_string,
- comment_end_string,
- line_statement_prefix,
- line_comment_prefix,
- trim_blocks,
- lstrip_blocks,
- newline_sequence,
- keep_trailing_newline,
- frozenset(extensions),
- optimized,
- undefined,
- finalize,
- autoescape,
- None,
- 0,
- False,
- None,
- enable_async,
- )
- return env.from_string(source, template_class=cls)
-
- @classmethod
- def from_code(cls, environment, code, globals, uptodate=None):
- """Creates a template object from compiled code and the globals. This
- is used by the loaders and environment to create a template object.
- """
- namespace = {"environment": environment, "__file__": code.co_filename}
- exec(code, namespace)
- rv = cls._from_namespace(environment, namespace, globals)
- rv._uptodate = uptodate
- return rv
-
- @classmethod
- def from_module_dict(cls, environment, module_dict, globals):
- """Creates a template object from a module. This is used by the
- module loader to create a template object.
-
- .. versionadded:: 2.4
- """
- return cls._from_namespace(environment, module_dict, globals)
-
- @classmethod
- def _from_namespace(cls, environment, namespace, globals):
- t = object.__new__(cls)
- t.environment = environment
- t.globals = globals
- t.name = namespace["name"]
- t.filename = namespace["__file__"]
- t.blocks = namespace["blocks"]
-
- # render function and module
- t.root_render_func = namespace["root"]
- t._module = None
-
- # debug and loader helpers
- t._debug_info = namespace["debug_info"]
- t._uptodate = None
-
- # store the reference
- namespace["environment"] = environment
- namespace["__jinja_template__"] = t
-
- return t
-
- def render(self, *args, **kwargs):
- """This method accepts the same arguments as the `dict` constructor:
- A dict, a dict subclass or some keyword arguments. If no arguments
- are given the context will be empty. These two calls do the same::
-
- template.render(knights='that say nih')
- template.render({'knights': 'that say nih'})
-
- This will return the rendered template as a string.
- """
- vars = dict(*args, **kwargs)
- try:
- return concat(self.root_render_func(self.new_context(vars)))
- except Exception:
- self.environment.handle_exception()
-
- def render_async(self, *args, **kwargs):
- """This works similar to :meth:`render` but returns a coroutine
- that when awaited returns the entire rendered template string. This
- requires the async feature to be enabled.
-
- Example usage::
-
- await template.render_async(knights='that say nih; asynchronously')
- """
- # see asyncsupport for the actual implementation
- raise NotImplementedError(
- "This feature is not available for this version of Python"
- )
-
- def stream(self, *args, **kwargs):
- """Works exactly like :meth:`generate` but returns a
- :class:`TemplateStream`.
- """
- return TemplateStream(self.generate(*args, **kwargs))
-
- def generate(self, *args, **kwargs):
- """For very large templates it can be useful to not render the whole
- template at once but evaluate each statement after another and yield
- piece for piece. This method basically does exactly that and returns
- a generator that yields one item after another as strings.
-
- It accepts the same arguments as :meth:`render`.
- """
- vars = dict(*args, **kwargs)
- try:
- yield from self.root_render_func(self.new_context(vars))
- except Exception:
- yield self.environment.handle_exception()
-
- def generate_async(self, *args, **kwargs):
- """An async version of :meth:`generate`. Works very similarly but
- returns an async iterator instead.
- """
- # see asyncsupport for the actual implementation
- raise NotImplementedError(
- "This feature is not available for this version of Python"
- )
-
- def new_context(self, vars=None, shared=False, locals=None):
- """Create a new :class:`Context` for this template. The vars
- provided will be passed to the template. Per default the globals
- are added to the context. If shared is set to `True` the data
- is passed as is to the context without adding the globals.
-
- `locals` can be a dict of local variables for internal usage.
- """
- return new_context(
- self.environment, self.name, self.blocks, vars, shared, self.globals, locals
- )
-
- def make_module(self, vars=None, shared=False, locals=None):
- """This method works like the :attr:`module` attribute when called
- without arguments but it will evaluate the template on every call
- rather than caching it. It's also possible to provide
- a dict which is then used as context. The arguments are the same
- as for the :meth:`new_context` method.
- """
- return TemplateModule(self, self.new_context(vars, shared, locals))
-
- def make_module_async(self, vars=None, shared=False, locals=None):
- """As template module creation can invoke template code for
- asynchronous executions this method must be used instead of the
- normal :meth:`make_module` one. Likewise the module attribute
- becomes unavailable in async mode.
- """
- # see asyncsupport for the actual implementation
- raise NotImplementedError(
- "This feature is not available for this version of Python"
- )
-
- @internalcode
- def _get_default_module(self, ctx=None):
- """If a context is passed in, this means that the template was
- imported. Imported templates have access to the current template's
- globals by default, but they can only be accessed via the context
- during runtime.
-
- If there are new globals, we need to create a new
- module because the cached module is already rendered and will not have
- access to globals from the current context. This new module is not
- cached as :attr:`_module` because the template can be imported elsewhere,
- and it should have access to only the current template's globals.
- """
- if ctx is not None:
- globals = {
- key: ctx.parent[key] for key in ctx.globals_keys - self.globals.keys()
- }
- if globals:
- return self.make_module(globals)
- if self._module is not None:
- return self._module
- self._module = rv = self.make_module()
- return rv
-
- @property
- def module(self):
- """The template as module. This is used for imports in the
- template runtime but is also useful if one wants to access
- exported template variables from the Python layer:
-
- >>> t = Template('{% macro foo() %}42{% endmacro %}23')
- >>> str(t.module)
- '23'
- >>> t.module.foo() == u'42'
- True
-
- This attribute is not available if async mode is enabled.
- """
- return self._get_default_module()
-
- def get_corresponding_lineno(self, lineno):
- """Return the source line number of a line number in the
- generated bytecode as they are not in sync.
- """
- for template_line, code_line in reversed(self.debug_info):
- if code_line <= lineno:
- return template_line
- return 1
-
- @property
- def is_up_to_date(self):
- """If this variable is `False` there is a newer version available."""
- if self._uptodate is None:
- return True
- return self._uptodate()
-
- @property
- def debug_info(self):
- """The debug info mapping."""
- if self._debug_info:
- return [tuple(map(int, x.split("="))) for x in self._debug_info.split("&")]
- return []
-
- def __repr__(self):
- if self.name is None:
- name = f"memory:{id(self):x}"
- else:
- name = repr(self.name)
- return f"<{self.__class__.__name__} {name}>"
-
-
-class TemplateModule:
- """Represents an imported template. All the exported names of the
- template are available as attributes on this object. Additionally
- converting it into a string renders the contents.
- """
-
- def __init__(self, template, context, body_stream=None):
- if body_stream is None:
- if context.environment.is_async:
- raise RuntimeError(
- "Async mode requires a body stream "
- "to be passed to a template module. Use "
- "the async methods of the API you are "
- "using."
- )
- body_stream = list(template.root_render_func(context))
- self._body_stream = body_stream
- self.__dict__.update(context.get_exported())
- self.__name__ = template.name
-
- def __html__(self):
- return Markup(concat(self._body_stream))
-
- def __str__(self):
- return concat(self._body_stream)
-
- def __repr__(self):
- if self.__name__ is None:
- name = f"memory:{id(self):x}"
- else:
- name = repr(self.__name__)
- return f"<{self.__class__.__name__} {name}>"
-
-
-class TemplateExpression:
- """The :meth:`jinja2.Environment.compile_expression` method returns an
- instance of this object. It encapsulates the expression-like access
- to the template with an expression it wraps.
- """
-
- def __init__(self, template, undefined_to_none):
- self._template = template
- self._undefined_to_none = undefined_to_none
-
- def __call__(self, *args, **kwargs):
- context = self._template.new_context(dict(*args, **kwargs))
- consume(self._template.root_render_func(context))
- rv = context.vars["result"]
- if self._undefined_to_none and isinstance(rv, Undefined):
- rv = None
- return rv
-
-
-class TemplateStream:
- """A template stream works pretty much like an ordinary python generator
- but it can buffer multiple items to reduce the number of total iterations.
- Per default the output is unbuffered which means that for every unbuffered
- instruction in the template one string is yielded.
-
- If buffering is enabled with a buffer size of 5, five items are combined
- into a new string. This is mainly useful if you are streaming
- big templates to a client via WSGI which flushes after each iteration.
- """
-
- def __init__(self, gen):
- self._gen = gen
- self.disable_buffering()
-
- def dump(self, fp, encoding=None, errors="strict"):
- """Dump the complete stream into a file or file-like object.
- Per default strings are written, if you want to encode
- before writing specify an `encoding`.
-
- Example usage::
-
- Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
- """
- close = False
- if isinstance(fp, str):
- if encoding is None:
- encoding = "utf-8"
- fp = open(fp, "wb")
- close = True
- try:
- if encoding is not None:
- iterable = (x.encode(encoding, errors) for x in self)
- else:
- iterable = self
- if hasattr(fp, "writelines"):
- fp.writelines(iterable)
- else:
- for item in iterable:
- fp.write(item)
- finally:
- if close:
- fp.close()
-
- def disable_buffering(self):
- """Disable the output buffering."""
- self._next = partial(next, self._gen)
- self.buffered = False
-
- def _buffered_generator(self, size):
- buf = []
- c_size = 0
- push = buf.append
-
- while 1:
- try:
- while c_size < size:
- c = next(self._gen)
- push(c)
- if c:
- c_size += 1
- except StopIteration:
- if not c_size:
- return
- yield concat(buf)
- del buf[:]
- c_size = 0
-
- def enable_buffering(self, size=5):
- """Enable buffering. Buffer `size` items before yielding them."""
- if size <= 1:
- raise ValueError("buffer size too small")
-
- self.buffered = True
- self._next = partial(next, self._buffered_generator(size))
-
- def __iter__(self):
- return self
-
- def __next__(self):
- return self._next()
-
-
-# hook in default template class. if anyone reads this comment: ignore that
-# it's possible to use custom templates ;-)
-Environment.template_class = Template
diff --git a/src/jinja2/exceptions.py b/src/jinja2/exceptions.py
deleted file mode 100644
index 07cfba26..00000000
--- a/src/jinja2/exceptions.py
+++ /dev/null
@@ -1,147 +0,0 @@
-class TemplateError(Exception):
- """Baseclass for all template errors."""
-
- def __init__(self, message=None):
- super().__init__(message)
-
- @property
- def message(self):
- if self.args:
- return self.args[0]
-
-
-class TemplateNotFound(IOError, LookupError, TemplateError):
- """Raised if a template does not exist.
-
- .. versionchanged:: 2.11
- If the given name is :class:`Undefined` and no message was
- provided, an :exc:`UndefinedError` is raised.
- """
-
- # Silence the Python warning about message being deprecated since
- # it's not valid here.
- message = None
-
- def __init__(self, name, message=None):
- IOError.__init__(self, name)
-
- if message is None:
- from .runtime import Undefined
-
- if isinstance(name, Undefined):
- name._fail_with_undefined_error()
-
- message = name
-
- self.message = message
- self.name = name
- self.templates = [name]
-
- def __str__(self):
- return self.message
-
-
-class TemplatesNotFound(TemplateNotFound):
- """Like :class:`TemplateNotFound` but raised if multiple templates
- are selected. This is a subclass of :class:`TemplateNotFound`
- exception, so just catching the base exception will catch both.
-
- .. versionchanged:: 2.11
- If a name in the list of names is :class:`Undefined`, a message
- about it being undefined is shown rather than the empty string.
-
- .. versionadded:: 2.2
- """
-
- def __init__(self, names=(), message=None):
- if message is None:
- from .runtime import Undefined
-
- parts = []
-
- for name in names:
- if isinstance(name, Undefined):
- parts.append(name._undefined_message)
- else:
- parts.append(name)
-
- message = "none of the templates given were found: " + ", ".join(
- map(str, parts)
- )
- TemplateNotFound.__init__(self, names[-1] if names else None, message)
- self.templates = list(names)
-
-
-class TemplateSyntaxError(TemplateError):
- """Raised to tell the user that there is a problem with the template."""
-
- def __init__(self, message, lineno, name=None, filename=None):
- TemplateError.__init__(self, message)
- self.lineno = lineno
- self.name = name
- self.filename = filename
- self.source = None
-
- # this is set to True if the debug.translate_syntax_error
- # function translated the syntax error into a new traceback
- self.translated = False
-
- def __str__(self):
- # for translated errors we only return the message
- if self.translated:
- return self.message
-
- # otherwise attach some stuff
- location = f"line {self.lineno}"
- name = self.filename or self.name
- if name:
- location = f'File "{name}", {location}'
- lines = [self.message, " " + location]
-
- # if the source is set, add the line to the output
- if self.source is not None:
- try:
- line = self.source.splitlines()[self.lineno - 1]
- except IndexError:
- line = None
- if line:
- lines.append(" " + line.strip())
-
- return "\n".join(lines)
-
- def __reduce__(self):
- # https://bugs.python.org/issue1692335 Exceptions that take
- # multiple required arguments have problems with pickling.
- # Without this, raises TypeError: __init__() missing 1 required
- # positional argument: 'lineno'
- return self.__class__, (self.message, self.lineno, self.name, self.filename)
-
-
-class TemplateAssertionError(TemplateSyntaxError):
- """Like a template syntax error, but covers cases where something in the
- template caused an error at compile time that wasn't necessarily caused
- by a syntax error. However it's a direct subclass of
- :exc:`TemplateSyntaxError` and has the same attributes.
- """
-
-
-class TemplateRuntimeError(TemplateError):
- """A generic runtime error in the template engine. Under some situations
- Jinja may raise this exception.
- """
-
-
-class UndefinedError(TemplateRuntimeError):
- """Raised if a template tries to operate on :class:`Undefined`."""
-
-
-class SecurityError(TemplateRuntimeError):
- """Raised if a template tries to do something insecure if the
- sandbox is enabled.
- """
-
-
-class FilterArgumentError(TemplateRuntimeError):
- """This error is raised if a filter was called with inappropriate
- arguments
- """
diff --git a/src/jinja2/ext.py b/src/jinja2/ext.py
deleted file mode 100644
index 533ff179..00000000
--- a/src/jinja2/ext.py
+++ /dev/null
@@ -1,700 +0,0 @@
-"""Extension API for adding custom tags and behavior."""
-import pprint
-import re
-from sys import version_info
-
-from markupsafe import Markup
-
-from . import nodes
-from .defaults import BLOCK_END_STRING
-from .defaults import BLOCK_START_STRING
-from .defaults import COMMENT_END_STRING
-from .defaults import COMMENT_START_STRING
-from .defaults import KEEP_TRAILING_NEWLINE
-from .defaults import LINE_COMMENT_PREFIX
-from .defaults import LINE_STATEMENT_PREFIX
-from .defaults import LSTRIP_BLOCKS
-from .defaults import NEWLINE_SEQUENCE
-from .defaults import TRIM_BLOCKS
-from .defaults import VARIABLE_END_STRING
-from .defaults import VARIABLE_START_STRING
-from .environment import Environment
-from .exceptions import TemplateAssertionError
-from .exceptions import TemplateSyntaxError
-from .nodes import ContextReference
-from .runtime import concat
-from .utils import contextfunction
-from .utils import import_string
-
-# I18N functions available in Jinja templates. If the I18N library
-# provides ugettext, it will be assigned to gettext.
-GETTEXT_FUNCTIONS = ("_", "gettext", "ngettext")
-_ws_re = re.compile(r"\s*\n\s*")
-
-
-class ExtensionRegistry(type):
- """Gives the extension an unique identifier."""
-
- def __new__(mcs, name, bases, d):
- rv = type.__new__(mcs, name, bases, d)
- rv.identifier = f"{rv.__module__}.{rv.__name__}"
- return rv
-
-
-class Extension(metaclass=ExtensionRegistry):
- """Extensions can be used to add extra functionality to the Jinja template
- system at the parser level. Custom extensions are bound to an environment
- but may not store environment specific data on `self`. The reason for
- this is that an extension can be bound to another environment (for
- overlays) by creating a copy and reassigning the `environment` attribute.
-
- As extensions are created by the environment they cannot accept any
- arguments for configuration. One may want to work around that by using
- a factory function, but that is not possible as extensions are identified
- by their import name. The correct way to configure the extension is
- storing the configuration values on the environment. Because this way the
- environment ends up acting as central configuration storage the
- attributes may clash which is why extensions have to ensure that the names
- they choose for configuration are not too generic. ``prefix`` for example
- is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
- name as includes the name of the extension (fragment cache).
- """
-
- #: if this extension parses this is the list of tags it's listening to.
- tags = set()
-
- #: the priority of that extension. This is especially useful for
- #: extensions that preprocess values. A lower value means higher
- #: priority.
- #:
- #: .. versionadded:: 2.4
- priority = 100
-
- def __init__(self, environment):
- self.environment = environment
-
- def bind(self, environment):
- """Create a copy of this extension bound to another environment."""
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.environment = environment
- return rv
-
- def preprocess(self, source, name, filename=None):
- """This method is called before the actual lexing and can be used to
- preprocess the source. The `filename` is optional. The return value
- must be the preprocessed source.
- """
- return source
-
- def filter_stream(self, stream):
- """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
- to filter tokens returned. This method has to return an iterable of
- :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
- :class:`~jinja2.lexer.TokenStream`.
- """
- return stream
-
- def parse(self, parser):
- """If any of the :attr:`tags` matched this method is called with the
- parser as first argument. The token the parser stream is pointing at
- is the name token that matched. This method has to return one or a
- list of multiple nodes.
- """
- raise NotImplementedError()
-
- def attr(self, name, lineno=None):
- """Return an attribute node for the current extension. This is useful
- to pass constants on extensions to generated template code.
-
- ::
-
- self.attr('_my_attribute', lineno=lineno)
- """
- return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
-
- def call_method(
- self, name, args=None, kwargs=None, dyn_args=None, dyn_kwargs=None, lineno=None
- ):
- """Call a method of the extension. This is a shortcut for
- :meth:`attr` + :class:`jinja2.nodes.Call`.
- """
- if args is None:
- args = []
- if kwargs is None:
- kwargs = []
- return nodes.Call(
- self.attr(name, lineno=lineno),
- args,
- kwargs,
- dyn_args,
- dyn_kwargs,
- lineno=lineno,
- )
-
-
-@contextfunction
-def _gettext_alias(__context, *args, **kwargs):
- return __context.call(__context.resolve("gettext"), *args, **kwargs)
-
-
-def _make_new_gettext(func):
- @contextfunction
- def gettext(__context, __string, **variables):
- rv = __context.call(func, __string)
- if __context.eval_ctx.autoescape:
- rv = Markup(rv)
- # Always treat as a format string, even if there are no
- # variables. This makes translation strings more consistent
- # and predictable. This requires escaping
- return rv % variables
-
- return gettext
-
-
-def _make_new_ngettext(func):
- @contextfunction
- def ngettext(__context, __singular, __plural, __num, **variables):
- variables.setdefault("num", __num)
- rv = __context.call(func, __singular, __plural, __num)
- if __context.eval_ctx.autoescape:
- rv = Markup(rv)
- # Always treat as a format string, see gettext comment above.
- return rv % variables
-
- return ngettext
-
-
-class InternationalizationExtension(Extension):
- """This extension adds gettext support to Jinja."""
-
- tags = {"trans"}
-
- # TODO: the i18n extension is currently reevaluating values in a few
- # situations. Take this example:
- # {% trans count=something() %}{{ count }} foo{% pluralize
- # %}{{ count }} fooss{% endtrans %}
- # something is called twice here. One time for the gettext value and
- # the other time for the n-parameter of the ngettext function.
-
- def __init__(self, environment):
- Extension.__init__(self, environment)
- environment.globals["_"] = _gettext_alias
- environment.extend(
- install_gettext_translations=self._install,
- install_null_translations=self._install_null,
- install_gettext_callables=self._install_callables,
- uninstall_gettext_translations=self._uninstall,
- extract_translations=self._extract,
- newstyle_gettext=False,
- )
-
- def _install(self, translations, newstyle=None):
- # ugettext and ungettext are preferred in case the I18N library
- # is providing compatibility with older Python versions.
- gettext = getattr(translations, "ugettext", None)
- if gettext is None:
- gettext = translations.gettext
- ngettext = getattr(translations, "ungettext", None)
- if ngettext is None:
- ngettext = translations.ngettext
- self._install_callables(gettext, ngettext, newstyle)
-
- def _install_null(self, newstyle=None):
- self._install_callables(
- lambda x: x, lambda s, p, n: s if n == 1 else p, newstyle
- )
-
- def _install_callables(self, gettext, ngettext, newstyle=None):
- if newstyle is not None:
- self.environment.newstyle_gettext = newstyle
- if self.environment.newstyle_gettext:
- gettext = _make_new_gettext(gettext)
- ngettext = _make_new_ngettext(ngettext)
- self.environment.globals.update(gettext=gettext, ngettext=ngettext)
-
- def _uninstall(self, translations):
- for key in "gettext", "ngettext":
- self.environment.globals.pop(key, None)
-
- def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
- if isinstance(source, str):
- source = self.environment.parse(source)
- return extract_from_ast(source, gettext_functions)
-
- def parse(self, parser):
- """Parse a translatable tag."""
- lineno = next(parser.stream).lineno
- num_called_num = False
-
- # find all the variables referenced. Additionally a variable can be
- # defined in the body of the trans block too, but this is checked at
- # a later state.
- plural_expr = None
- plural_expr_assignment = None
- variables = {}
- trimmed = None
- while parser.stream.current.type != "block_end":
- if variables:
- parser.stream.expect("comma")
-
- # skip colon for python compatibility
- if parser.stream.skip_if("colon"):
- break
-
- name = parser.stream.expect("name")
- if name.value in variables:
- parser.fail(
- f"translatable variable {name.value!r} defined twice.",
- name.lineno,
- exc=TemplateAssertionError,
- )
-
- # expressions
- if parser.stream.current.type == "assign":
- next(parser.stream)
- variables[name.value] = var = parser.parse_expression()
- elif trimmed is None and name.value in ("trimmed", "notrimmed"):
- trimmed = name.value == "trimmed"
- continue
- else:
- variables[name.value] = var = nodes.Name(name.value, "load")
-
- if plural_expr is None:
- if isinstance(var, nodes.Call):
- plural_expr = nodes.Name("_trans", "load")
- variables[name.value] = plural_expr
- plural_expr_assignment = nodes.Assign(
- nodes.Name("_trans", "store"), var
- )
- else:
- plural_expr = var
- num_called_num = name.value == "num"
-
- parser.stream.expect("block_end")
-
- plural = None
- have_plural = False
- referenced = set()
-
- # now parse until endtrans or pluralize
- singular_names, singular = self._parse_block(parser, True)
- if singular_names:
- referenced.update(singular_names)
- if plural_expr is None:
- plural_expr = nodes.Name(singular_names[0], "load")
- num_called_num = singular_names[0] == "num"
-
- # if we have a pluralize block, we parse that too
- if parser.stream.current.test("name:pluralize"):
- have_plural = True
- next(parser.stream)
- if parser.stream.current.type != "block_end":
- name = parser.stream.expect("name")
- if name.value not in variables:
- parser.fail(
- f"unknown variable {name.value!r} for pluralization",
- name.lineno,
- exc=TemplateAssertionError,
- )
- plural_expr = variables[name.value]
- num_called_num = name.value == "num"
- parser.stream.expect("block_end")
- plural_names, plural = self._parse_block(parser, False)
- next(parser.stream)
- referenced.update(plural_names)
- else:
- next(parser.stream)
-
- # register free names as simple name expressions
- for var in referenced:
- if var not in variables:
- variables[var] = nodes.Name(var, "load")
-
- if not have_plural:
- plural_expr = None
- elif plural_expr is None:
- parser.fail("pluralize without variables", lineno)
-
- if trimmed is None:
- trimmed = self.environment.policies["ext.i18n.trimmed"]
- if trimmed:
- singular = self._trim_whitespace(singular)
- if plural:
- plural = self._trim_whitespace(plural)
-
- node = self._make_node(
- singular,
- plural,
- variables,
- plural_expr,
- bool(referenced),
- num_called_num and have_plural,
- )
- node.set_lineno(lineno)
- if plural_expr_assignment is not None:
- return [plural_expr_assignment, node]
- else:
- return node
-
- def _trim_whitespace(self, string, _ws_re=_ws_re):
- return _ws_re.sub(" ", string.strip())
-
- def _parse_block(self, parser, allow_pluralize):
- """Parse until the next block tag with a given name."""
- referenced = []
- buf = []
- while 1:
- if parser.stream.current.type == "data":
- buf.append(parser.stream.current.value.replace("%", "%%"))
- next(parser.stream)
- elif parser.stream.current.type == "variable_begin":
- next(parser.stream)
- name = parser.stream.expect("name").value
- referenced.append(name)
- buf.append(f"%({name})s")
- parser.stream.expect("variable_end")
- elif parser.stream.current.type == "block_begin":
- next(parser.stream)
- if parser.stream.current.test("name:endtrans"):
- break
- elif parser.stream.current.test("name:pluralize"):
- if allow_pluralize:
- break
- parser.fail(
- "a translatable section can have only one pluralize section"
- )
- parser.fail(
- "control structures in translatable sections are not allowed"
- )
- elif parser.stream.eos:
- parser.fail("unclosed translation block")
- else:
- raise RuntimeError("internal parser error")
-
- return referenced, concat(buf)
-
- def _make_node(
- self, singular, plural, variables, plural_expr, vars_referenced, num_called_num
- ):
- """Generates a useful node from the data provided."""
- # no variables referenced? no need to escape for old style
- # gettext invocations only if there are vars.
- if not vars_referenced and not self.environment.newstyle_gettext:
- singular = singular.replace("%%", "%")
- if plural:
- plural = plural.replace("%%", "%")
-
- # singular only:
- if plural_expr is None:
- gettext = nodes.Name("gettext", "load")
- node = nodes.Call(gettext, [nodes.Const(singular)], [], None, None)
-
- # singular and plural
- else:
- ngettext = nodes.Name("ngettext", "load")
- node = nodes.Call(
- ngettext,
- [nodes.Const(singular), nodes.Const(plural), plural_expr],
- [],
- None,
- None,
- )
-
- # in case newstyle gettext is used, the method is powerful
- # enough to handle the variable expansion and autoescape
- # handling itself
- if self.environment.newstyle_gettext:
- for key, value in variables.items():
- # the function adds that later anyways in case num was
- # called num, so just skip it.
- if num_called_num and key == "num":
- continue
- node.kwargs.append(nodes.Keyword(key, value))
-
- # otherwise do that here
- else:
- # mark the return value as safe if we are in an
- # environment with autoescaping turned on
- node = nodes.MarkSafeIfAutoescape(node)
- if variables:
- node = nodes.Mod(
- node,
- nodes.Dict(
- [
- nodes.Pair(nodes.Const(key), value)
- for key, value in variables.items()
- ]
- ),
- )
- return nodes.Output([node])
-
-
-class ExprStmtExtension(Extension):
- """Adds a `do` tag to Jinja that works like the print statement just
- that it doesn't print the return value.
- """
-
- tags = {"do"}
-
- def parse(self, parser):
- node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
- node.node = parser.parse_tuple()
- return node
-
-
-class LoopControlExtension(Extension):
- """Adds break and continue to the template engine."""
-
- tags = {"break", "continue"}
-
- def parse(self, parser):
- token = next(parser.stream)
- if token.value == "break":
- return nodes.Break(lineno=token.lineno)
- return nodes.Continue(lineno=token.lineno)
-
-
-class WithExtension(Extension):
- pass
-
-
-class AutoEscapeExtension(Extension):
- pass
-
-
-class DebugExtension(Extension):
- """A ``{% debug %}`` tag that dumps the available variables,
- filters, and tests.
-
- .. code-block:: html+jinja
-
- <pre>{% debug %}</pre>
-
- .. code-block:: text
-
- {'context': {'cycler': <class 'jinja2.utils.Cycler'>,
- ...,
- 'namespace': <class 'jinja2.utils.Namespace'>},
- 'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd',
- ..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'],
- 'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined',
- ..., 'odd', 'sameas', 'sequence', 'string', 'undefined', 'upper']}
-
- .. versionadded:: 2.11.0
- """
-
- tags = {"debug"}
-
- def parse(self, parser):
- lineno = parser.stream.expect("name:debug").lineno
- context = ContextReference()
- result = self.call_method("_render", [context], lineno=lineno)
- return nodes.Output([result], lineno=lineno)
-
- def _render(self, context):
- result = {
- "context": context.get_all(),
- "filters": sorted(self.environment.filters.keys()),
- "tests": sorted(self.environment.tests.keys()),
- }
-
- # Set the depth since the intent is to show the top few names.
- if version_info[:2] >= (3, 4):
- return pprint.pformat(result, depth=3, compact=True)
- else:
- return pprint.pformat(result, depth=3)
-
-
-def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True):
- """Extract localizable strings from the given template node. Per
- default this function returns matches in babel style that means non string
- parameters as well as keyword arguments are returned as `None`. This
- allows Babel to figure out what you really meant if you are using
- gettext functions that allow keyword arguments for placeholder expansion.
- If you don't want that behavior set the `babel_style` parameter to `False`
- which causes only strings to be returned and parameters are always stored
- in tuples. As a consequence invalid gettext calls (calls without a single
- string parameter or string parameters after non-string parameters) are
- skipped.
-
- This example explains the behavior:
-
- >>> from jinja2 import Environment
- >>> env = Environment()
- >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
- >>> list(extract_from_ast(node))
- [(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
- >>> list(extract_from_ast(node, babel_style=False))
- [(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
-
- For every string found this function yields a ``(lineno, function,
- message)`` tuple, where:
-
- * ``lineno`` is the number of the line on which the string was found,
- * ``function`` is the name of the ``gettext`` function used (if the
- string was extracted from embedded Python code), and
- * ``message`` is the string, or a tuple of strings for functions
- with multiple string arguments.
-
- This extraction function operates on the AST and is because of that unable
- to extract any comments. For comment support you have to use the babel
- extraction interface or extract comments yourself.
- """
- for node in node.find_all(nodes.Call):
- if (
- not isinstance(node.node, nodes.Name)
- or node.node.name not in gettext_functions
- ):
- continue
-
- strings = []
- for arg in node.args:
- if isinstance(arg, nodes.Const) and isinstance(arg.value, str):
- strings.append(arg.value)
- else:
- strings.append(None)
-
- for _ in node.kwargs:
- strings.append(None)
- if node.dyn_args is not None:
- strings.append(None)
- if node.dyn_kwargs is not None:
- strings.append(None)
-
- if not babel_style:
- strings = tuple(x for x in strings if x is not None)
- if not strings:
- continue
- else:
- if len(strings) == 1:
- strings = strings[0]
- else:
- strings = tuple(strings)
- yield node.lineno, node.node.name, strings
-
-
-class _CommentFinder:
- """Helper class to find comments in a token stream. Can only
- find comments for gettext calls forwards. Once the comment
- from line 4 is found, a comment for line 1 will not return a
- usable value.
- """
-
- def __init__(self, tokens, comment_tags):
- self.tokens = tokens
- self.comment_tags = comment_tags
- self.offset = 0
- self.last_lineno = 0
-
- def find_backwards(self, offset):
- try:
- for _, token_type, token_value in reversed(
- self.tokens[self.offset : offset]
- ):
- if token_type in ("comment", "linecomment"):
- try:
- prefix, comment = token_value.split(None, 1)
- except ValueError:
- continue
- if prefix in self.comment_tags:
- return [comment.rstrip()]
- return []
- finally:
- self.offset = offset
-
- def find_comments(self, lineno):
- if not self.comment_tags or self.last_lineno > lineno:
- return []
- for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]):
- if token_lineno > lineno:
- return self.find_backwards(self.offset + idx)
- return self.find_backwards(len(self.tokens))
-
-
-def babel_extract(fileobj, keywords, comment_tags, options):
- """Babel extraction method for Jinja templates.
-
- .. versionchanged:: 2.3
- Basic support for translation comments was added. If `comment_tags`
- is now set to a list of keywords for extraction, the extractor will
- try to find the best preceding comment that begins with one of the
- keywords. For best results, make sure to not have more than one
- gettext call in one line of code and the matching comment in the
- same line or the line before.
-
- .. versionchanged:: 2.5.1
- The `newstyle_gettext` flag can be set to `True` to enable newstyle
- gettext calls.
-
- .. versionchanged:: 2.7
- A `silent` option can now be provided. If set to `False` template
- syntax errors are propagated instead of being ignored.
-
- :param fileobj: the file-like object the messages should be extracted from
- :param keywords: a list of keywords (i.e. function names) that should be
- recognized as translation functions
- :param comment_tags: a list of translator tags to search for and include
- in the results.
- :param options: a dictionary of additional options (optional)
- :return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
- (comments will be empty currently)
- """
- extensions = set()
- for extension in options.get("extensions", "").split(","):
- extension = extension.strip()
- if not extension:
- continue
- extensions.add(import_string(extension))
- if InternationalizationExtension not in extensions:
- extensions.add(InternationalizationExtension)
-
- def getbool(options, key, default=False):
- return options.get(key, str(default)).lower() in ("1", "on", "yes", "true")
-
- silent = getbool(options, "silent", True)
- environment = Environment(
- options.get("block_start_string", BLOCK_START_STRING),
- options.get("block_end_string", BLOCK_END_STRING),
- options.get("variable_start_string", VARIABLE_START_STRING),
- options.get("variable_end_string", VARIABLE_END_STRING),
- options.get("comment_start_string", COMMENT_START_STRING),
- options.get("comment_end_string", COMMENT_END_STRING),
- options.get("line_statement_prefix") or LINE_STATEMENT_PREFIX,
- options.get("line_comment_prefix") or LINE_COMMENT_PREFIX,
- getbool(options, "trim_blocks", TRIM_BLOCKS),
- getbool(options, "lstrip_blocks", LSTRIP_BLOCKS),
- NEWLINE_SEQUENCE,
- getbool(options, "keep_trailing_newline", KEEP_TRAILING_NEWLINE),
- frozenset(extensions),
- cache_size=0,
- auto_reload=False,
- )
-
- if getbool(options, "trimmed"):
- environment.policies["ext.i18n.trimmed"] = True
- if getbool(options, "newstyle_gettext"):
- environment.newstyle_gettext = True
-
- source = fileobj.read().decode(options.get("encoding", "utf-8"))
- try:
- node = environment.parse(source)
- tokens = list(environment.lex(environment.preprocess(source)))
- except TemplateSyntaxError:
- if not silent:
- raise
- # skip templates with syntax errors
- return
-
- finder = _CommentFinder(tokens, comment_tags)
- for lineno, func, message in extract_from_ast(node, keywords):
- yield lineno, func, message, finder.find_comments(lineno)
-
-
-#: nicer import names
-i18n = InternationalizationExtension
-do = ExprStmtExtension
-loopcontrols = LoopControlExtension
-with_ = WithExtension
-autoescape = AutoEscapeExtension
-debug = DebugExtension
diff --git a/src/jinja2/filters.py b/src/jinja2/filters.py
deleted file mode 100644
index c257d4c5..00000000
--- a/src/jinja2/filters.py
+++ /dev/null
@@ -1,1361 +0,0 @@
-"""Built-in template filters used with the ``|`` operator."""
-import math
-import random
-import re
-from collections import abc
-from collections import namedtuple
-from itertools import chain
-from itertools import groupby
-
-from markupsafe import escape
-from markupsafe import Markup
-from markupsafe import soft_str
-
-from .exceptions import FilterArgumentError
-from .runtime import Undefined
-from .utils import htmlsafe_json_dumps
-from .utils import pformat
-from .utils import url_quote
-from .utils import urlize
-
-_word_re = re.compile(r"\w+")
-_word_beginning_split_re = re.compile(r"([-\s({\[<]+)")
-
-
-def contextfilter(f):
- """Decorator for marking context dependent filters. The current
- :class:`Context` will be passed as first argument.
- """
- f.contextfilter = True
- return f
-
-
-def evalcontextfilter(f):
- """Decorator for marking eval-context dependent filters. An eval
- context object is passed as first argument. For more information
- about the eval context, see :ref:`eval-context`.
-
- .. versionadded:: 2.4
- """
- f.evalcontextfilter = True
- return f
-
-
-def environmentfilter(f):
- """Decorator for marking environment dependent filters. The current
- :class:`Environment` is passed to the filter as first argument.
- """
- f.environmentfilter = True
- return f
-
-
-def ignore_case(value):
- """For use as a postprocessor for :func:`make_attrgetter`. Converts strings
- to lowercase and returns other types as-is."""
- return value.lower() if isinstance(value, str) else value
-
-
-def make_attrgetter(environment, attribute, postprocess=None, default=None):
- """Returns a callable that looks up the given attribute from a
- passed object with the rules of the environment. Dots are allowed
- to access attributes of attributes. Integer parts in paths are
- looked up as integers.
- """
- attribute = _prepare_attribute_parts(attribute)
-
- def attrgetter(item):
- for part in attribute:
- item = environment.getitem(item, part)
-
- if default and isinstance(item, Undefined):
- item = default
-
- if postprocess is not None:
- item = postprocess(item)
-
- return item
-
- return attrgetter
-
-
-def make_multi_attrgetter(environment, attribute, postprocess=None):
- """Returns a callable that looks up the given comma separated
- attributes from a passed object with the rules of the environment.
- Dots are allowed to access attributes of each attribute. Integer
- parts in paths are looked up as integers.
-
- The value returned by the returned callable is a list of extracted
- attribute values.
-
- Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc.
- """
- attribute_parts = (
- attribute.split(",") if isinstance(attribute, str) else [attribute]
- )
- attribute = [
- _prepare_attribute_parts(attribute_part) for attribute_part in attribute_parts
- ]
-
- def attrgetter(item):
- items = [None] * len(attribute)
- for i, attribute_part in enumerate(attribute):
- item_i = item
- for part in attribute_part:
- item_i = environment.getitem(item_i, part)
-
- if postprocess is not None:
- item_i = postprocess(item_i)
-
- items[i] = item_i
- return items
-
- return attrgetter
-
-
-def _prepare_attribute_parts(attr):
- if attr is None:
- return []
- elif isinstance(attr, str):
- return [int(x) if x.isdigit() else x for x in attr.split(".")]
- else:
- return [attr]
-
-
-def do_forceescape(value):
- """Enforce HTML escaping. This will probably double escape variables."""
- if hasattr(value, "__html__"):
- value = value.__html__()
- return escape(str(value))
-
-
-def do_urlencode(value):
- """Quote data for use in a URL path or query using UTF-8.
-
- Basic wrapper around :func:`urllib.parse.quote` when given a
- string, or :func:`urllib.parse.urlencode` for a dict or iterable.
-
- :param value: Data to quote. A string will be quoted directly. A
- dict or iterable of ``(key, value)`` pairs will be joined as a
- query string.
-
- When given a string, "/" is not quoted. HTTP servers treat "/" and
- "%2F" equivalently in paths. If you need quoted slashes, use the
- ``|replace("/", "%2F")`` filter.
-
- .. versionadded:: 2.7
- """
- if isinstance(value, str) or not isinstance(value, abc.Iterable):
- return url_quote(value)
-
- if isinstance(value, dict):
- items = value.items()
- else:
- items = iter(value)
-
- return "&".join(
- f"{url_quote(k, for_qs=True)}={url_quote(v, for_qs=True)}" for k, v in items
- )
-
-
-@evalcontextfilter
-def do_replace(eval_ctx, s, old, new, count=None):
- """Return a copy of the value with all occurrences of a substring
- replaced with a new one. The first argument is the substring
- that should be replaced, the second is the replacement string.
- If the optional third argument ``count`` is given, only the first
- ``count`` occurrences are replaced:
-
- .. sourcecode:: jinja
-
- {{ "Hello World"|replace("Hello", "Goodbye") }}
- -> Goodbye World
-
- {{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
- -> d'oh, d'oh, aaargh
- """
- if count is None:
- count = -1
- if not eval_ctx.autoescape:
- return str(s).replace(str(old), str(new), count)
- if (
- hasattr(old, "__html__")
- or hasattr(new, "__html__")
- and not hasattr(s, "__html__")
- ):
- s = escape(s)
- else:
- s = soft_str(s)
- return s.replace(soft_str(old), soft_str(new), count)
-
-
-def do_upper(s):
- """Convert a value to uppercase."""
- return soft_str(s).upper()
-
-
-def do_lower(s):
- """Convert a value to lowercase."""
- return soft_str(s).lower()
-
-
-@evalcontextfilter
-def do_xmlattr(_eval_ctx, d, autospace=True):
- """Create an SGML/XML attribute string based on the items in a dict.
- All values that are neither `none` nor `undefined` are automatically
- escaped:
-
- .. sourcecode:: html+jinja
-
- <ul{{ {'class': 'my_list', 'missing': none,
- 'id': 'list-%d'|format(variable)}|xmlattr }}>
- ...
- </ul>
-
- Results in something like this:
-
- .. sourcecode:: html
-
- <ul class="my_list" id="list-42">
- ...
- </ul>
-
- As you can see it automatically prepends a space in front of the item
- if the filter returned something unless the second parameter is false.
- """
- rv = " ".join(
- f'{escape(key)}="{escape(value)}"'
- for key, value in d.items()
- if value is not None and not isinstance(value, Undefined)
- )
- if autospace and rv:
- rv = " " + rv
- if _eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
-
-def do_capitalize(s):
- """Capitalize a value. The first character will be uppercase, all others
- lowercase.
- """
- return soft_str(s).capitalize()
-
-
-def do_title(s):
- """Return a titlecased version of the value. I.e. words will start with
- uppercase letters, all remaining characters are lowercase.
- """
- return "".join(
- [
- item[0].upper() + item[1:].lower()
- for item in _word_beginning_split_re.split(soft_str(s))
- if item
- ]
- )
-
-
-def do_dictsort(value, case_sensitive=False, by="key", reverse=False):
- """Sort a dict and yield (key, value) pairs. Because python dicts are
- unsorted you may want to use this function to order them by either
- key or value:
-
- .. sourcecode:: jinja
-
- {% for item in mydict|dictsort %}
- sort the dict by key, case insensitive
-
- {% for item in mydict|dictsort(reverse=true) %}
- sort the dict by key, case insensitive, reverse order
-
- {% for item in mydict|dictsort(true) %}
- sort the dict by key, case sensitive
-
- {% for item in mydict|dictsort(false, 'value') %}
- sort the dict by value, case insensitive
- """
- if by == "key":
- pos = 0
- elif by == "value":
- pos = 1
- else:
- raise FilterArgumentError('You can only sort by either "key" or "value"')
-
- def sort_func(item):
- value = item[pos]
-
- if not case_sensitive:
- value = ignore_case(value)
-
- return value
-
- return sorted(value.items(), key=sort_func, reverse=reverse)
-
-
-@environmentfilter
-def do_sort(environment, value, reverse=False, case_sensitive=False, attribute=None):
- """Sort an iterable using Python's :func:`sorted`.
-
- .. sourcecode:: jinja
-
- {% for city in cities|sort %}
- ...
- {% endfor %}
-
- :param reverse: Sort descending instead of ascending.
- :param case_sensitive: When sorting strings, sort upper and lower
- case separately.
- :param attribute: When sorting objects or dicts, an attribute or
- key to sort by. Can use dot notation like ``"address.city"``.
- Can be a list of attributes like ``"age,name"``.
-
- The sort is stable, it does not change the relative order of
- elements that compare equal. This makes it is possible to chain
- sorts on different attributes and ordering.
-
- .. sourcecode:: jinja
-
- {% for user in users|sort(attribute="name")
- |sort(reverse=true, attribute="age") %}
- ...
- {% endfor %}
-
- As a shortcut to chaining when the direction is the same for all
- attributes, pass a comma separate list of attributes.
-
- .. sourcecode:: jinja
-
- {% for user users|sort(attribute="age,name") %}
- ...
- {% endfor %}
-
- .. versionchanged:: 2.11.0
- The ``attribute`` parameter can be a comma separated list of
- attributes, e.g. ``"age,name"``.
-
- .. versionchanged:: 2.6
- The ``attribute`` parameter was added.
- """
- key_func = make_multi_attrgetter(
- environment, attribute, postprocess=ignore_case if not case_sensitive else None
- )
- return sorted(value, key=key_func, reverse=reverse)
-
-
-@environmentfilter
-def do_unique(environment, value, case_sensitive=False, attribute=None):
- """Returns a list of unique items from the given iterable.
-
- .. sourcecode:: jinja
-
- {{ ['foo', 'bar', 'foobar', 'FooBar']|unique|list }}
- -> ['foo', 'bar', 'foobar']
-
- The unique items are yielded in the same order as their first occurrence in
- the iterable passed to the filter.
-
- :param case_sensitive: Treat upper and lower case strings as distinct.
- :param attribute: Filter objects with unique values for this attribute.
- """
- getter = make_attrgetter(
- environment, attribute, postprocess=ignore_case if not case_sensitive else None
- )
- seen = set()
-
- for item in value:
- key = getter(item)
-
- if key not in seen:
- seen.add(key)
- yield item
-
-
-def _min_or_max(environment, value, func, case_sensitive, attribute):
- it = iter(value)
-
- try:
- first = next(it)
- except StopIteration:
- return environment.undefined("No aggregated item, sequence was empty.")
-
- key_func = make_attrgetter(
- environment, attribute, postprocess=ignore_case if not case_sensitive else None
- )
- return func(chain([first], it), key=key_func)
-
-
-@environmentfilter
-def do_min(environment, value, case_sensitive=False, attribute=None):
- """Return the smallest item from the sequence.
-
- .. sourcecode:: jinja
-
- {{ [1, 2, 3]|min }}
- -> 1
-
- :param case_sensitive: Treat upper and lower case strings as distinct.
- :param attribute: Get the object with the min value of this attribute.
- """
- return _min_or_max(environment, value, min, case_sensitive, attribute)
-
-
-@environmentfilter
-def do_max(environment, value, case_sensitive=False, attribute=None):
- """Return the largest item from the sequence.
-
- .. sourcecode:: jinja
-
- {{ [1, 2, 3]|max }}
- -> 3
-
- :param case_sensitive: Treat upper and lower case strings as distinct.
- :param attribute: Get the object with the max value of this attribute.
- """
- return _min_or_max(environment, value, max, case_sensitive, attribute)
-
-
-def do_default(value, default_value="", boolean=False):
- """If the value is undefined it will return the passed default value,
- otherwise the value of the variable:
-
- .. sourcecode:: jinja
-
- {{ my_variable|default('my_variable is not defined') }}
-
- This will output the value of ``my_variable`` if the variable was
- defined, otherwise ``'my_variable is not defined'``. If you want
- to use default with variables that evaluate to false you have to
- set the second parameter to `true`:
-
- .. sourcecode:: jinja
-
- {{ ''|default('the string was empty', true) }}
-
- .. versionchanged:: 2.11
- It's now possible to configure the :class:`~jinja2.Environment` with
- :class:`~jinja2.ChainableUndefined` to make the `default` filter work
- on nested elements and attributes that may contain undefined values
- in the chain without getting an :exc:`~jinja2.UndefinedError`.
- """
- if isinstance(value, Undefined) or (boolean and not value):
- return default_value
- return value
-
-
-@evalcontextfilter
-def do_join(eval_ctx, value, d="", attribute=None):
- """Return a string which is the concatenation of the strings in the
- sequence. The separator between elements is an empty string per
- default, you can define it with the optional parameter:
-
- .. sourcecode:: jinja
-
- {{ [1, 2, 3]|join('|') }}
- -> 1|2|3
-
- {{ [1, 2, 3]|join }}
- -> 123
-
- It is also possible to join certain attributes of an object:
-
- .. sourcecode:: jinja
-
- {{ users|join(', ', attribute='username') }}
-
- .. versionadded:: 2.6
- The `attribute` parameter was added.
- """
- if attribute is not None:
- value = map(make_attrgetter(eval_ctx.environment, attribute), value)
-
- # no automatic escaping? joining is a lot easier then
- if not eval_ctx.autoescape:
- return str(d).join(map(str, value))
-
- # if the delimiter doesn't have an html representation we check
- # if any of the items has. If yes we do a coercion to Markup
- if not hasattr(d, "__html__"):
- value = list(value)
- do_escape = False
- for idx, item in enumerate(value):
- if hasattr(item, "__html__"):
- do_escape = True
- else:
- value[idx] = str(item)
- if do_escape:
- d = escape(d)
- else:
- d = str(d)
- return d.join(value)
-
- # no html involved, to normal joining
- return soft_str(d).join(map(soft_str, value))
-
-
-def do_center(value, width=80):
- """Centers the value in a field of a given width."""
- return str(value).center(width)
-
-
-@environmentfilter
-def do_first(environment, seq):
- """Return the first item of a sequence."""
- try:
- return next(iter(seq))
- except StopIteration:
- return environment.undefined("No first item, sequence was empty.")
-
-
-@environmentfilter
-def do_last(environment, seq):
- """
- Return the last item of a sequence.
-
- Note: Does not work with generators. You may want to explicitly
- convert it to a list:
-
- .. sourcecode:: jinja
-
- {{ data | selectattr('name', '==', 'Jinja') | list | last }}
- """
- try:
- return next(iter(reversed(seq)))
- except StopIteration:
- return environment.undefined("No last item, sequence was empty.")
-
-
-@contextfilter
-def do_random(context, seq):
- """Return a random item from the sequence."""
- try:
- return random.choice(seq)
- except IndexError:
- return context.environment.undefined("No random item, sequence was empty.")
-
-
-def do_filesizeformat(value, binary=False):
- """Format the value like a 'human-readable' file size (i.e. 13 kB,
- 4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
- Giga, etc.), if the second parameter is set to `True` the binary
- prefixes are used (Mebi, Gibi).
- """
- bytes = float(value)
- base = 1024 if binary else 1000
- prefixes = [
- ("KiB" if binary else "kB"),
- ("MiB" if binary else "MB"),
- ("GiB" if binary else "GB"),
- ("TiB" if binary else "TB"),
- ("PiB" if binary else "PB"),
- ("EiB" if binary else "EB"),
- ("ZiB" if binary else "ZB"),
- ("YiB" if binary else "YB"),
- ]
- if bytes == 1:
- return "1 Byte"
- elif bytes < base:
- return f"{int(bytes)} Bytes"
- else:
- for i, prefix in enumerate(prefixes):
- unit = base ** (i + 2)
- if bytes < unit:
- return f"{base * bytes / unit:.1f} {prefix}"
- return f"{base * bytes / unit:.1f} {prefix}"
-
-
-def do_pprint(value):
- """Pretty print a variable. Useful for debugging."""
- return pformat(value)
-
-
-@evalcontextfilter
-def do_urlize(
- eval_ctx, value, trim_url_limit=None, nofollow=False, target=None, rel=None
-):
- """Converts URLs in plain text into clickable links.
-
- If you pass the filter an additional integer it will shorten the urls
- to that number. Also a third argument exists that makes the urls
- "nofollow":
-
- .. sourcecode:: jinja
-
- {{ mytext|urlize(40, true) }}
- links are shortened to 40 chars and defined with rel="nofollow"
-
- If *target* is specified, the ``target`` attribute will be added to the
- ``<a>`` tag:
-
- .. sourcecode:: jinja
-
- {{ mytext|urlize(40, target='_blank') }}
-
- .. versionchanged:: 2.8
- The ``target`` parameter was added.
- """
- policies = eval_ctx.environment.policies
- rel = set((rel or "").split() or [])
- if nofollow:
- rel.add("nofollow")
- rel.update((policies["urlize.rel"] or "").split())
- if target is None:
- target = policies["urlize.target"]
- rel = " ".join(sorted(rel)) or None
- rv = urlize(value, trim_url_limit, rel=rel, target=target)
- if eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
-
-def do_indent(s, width=4, first=False, blank=False):
- """Return a copy of the string with each line indented by 4 spaces. The
- first line and blank lines are not indented by default.
-
- :param width: Number of spaces to indent by.
- :param first: Don't skip indenting the first line.
- :param blank: Don't skip indenting empty lines.
-
- .. versionchanged:: 2.10
- Blank lines are not indented by default.
-
- Rename the ``indentfirst`` argument to ``first``.
- """
- indention = " " * width
- newline = "\n"
-
- if isinstance(s, Markup):
- indention = Markup(indention)
- newline = Markup(newline)
-
- s += newline # this quirk is necessary for splitlines method
-
- if blank:
- rv = (newline + indention).join(s.splitlines())
- else:
- lines = s.splitlines()
- rv = lines.pop(0)
-
- if lines:
- rv += newline + newline.join(
- indention + line if line else line for line in lines
- )
-
- if first:
- rv = indention + rv
-
- return rv
-
-
-@environmentfilter
-def do_truncate(env, s, length=255, killwords=False, end="...", leeway=None):
- """Return a truncated copy of the string. The length is specified
- with the first parameter which defaults to ``255``. If the second
- parameter is ``true`` the filter will cut the text at length. Otherwise
- it will discard the last word. If the text was in fact
- truncated it will append an ellipsis sign (``"..."``). If you want a
- different ellipsis sign than ``"..."`` you can specify it using the
- third parameter. Strings that only exceed the length by the tolerance
- margin given in the fourth parameter will not be truncated.
-
- .. sourcecode:: jinja
-
- {{ "foo bar baz qux"|truncate(9) }}
- -> "foo..."
- {{ "foo bar baz qux"|truncate(9, True) }}
- -> "foo ba..."
- {{ "foo bar baz qux"|truncate(11) }}
- -> "foo bar baz qux"
- {{ "foo bar baz qux"|truncate(11, False, '...', 0) }}
- -> "foo bar..."
-
- The default leeway on newer Jinja versions is 5 and was 0 before but
- can be reconfigured globally.
- """
- if leeway is None:
- leeway = env.policies["truncate.leeway"]
- assert length >= len(end), f"expected length >= {len(end)}, got {length}"
- assert leeway >= 0, f"expected leeway >= 0, got {leeway}"
- if len(s) <= length + leeway:
- return s
- if killwords:
- return s[: length - len(end)] + end
- result = s[: length - len(end)].rsplit(" ", 1)[0]
- return result + end
-
-
-@environmentfilter
-def do_wordwrap(
- environment,
- s,
- width=79,
- break_long_words=True,
- wrapstring=None,
- break_on_hyphens=True,
-):
- """Wrap a string to the given width. Existing newlines are treated
- as paragraphs to be wrapped separately.
-
- :param s: Original text to wrap.
- :param width: Maximum length of wrapped lines.
- :param break_long_words: If a word is longer than ``width``, break
- it across lines.
- :param break_on_hyphens: If a word contains hyphens, it may be split
- across lines.
- :param wrapstring: String to join each wrapped line. Defaults to
- :attr:`Environment.newline_sequence`.
-
- .. versionchanged:: 2.11
- Existing newlines are treated as paragraphs wrapped separately.
-
- .. versionchanged:: 2.11
- Added the ``break_on_hyphens`` parameter.
-
- .. versionchanged:: 2.7
- Added the ``wrapstring`` parameter.
- """
-
- import textwrap
-
- if not wrapstring:
- wrapstring = environment.newline_sequence
-
- # textwrap.wrap doesn't consider existing newlines when wrapping.
- # If the string has a newline before width, wrap will still insert
- # a newline at width, resulting in a short line. Instead, split and
- # wrap each paragraph individually.
- return wrapstring.join(
- [
- wrapstring.join(
- textwrap.wrap(
- line,
- width=width,
- expand_tabs=False,
- replace_whitespace=False,
- break_long_words=break_long_words,
- break_on_hyphens=break_on_hyphens,
- )
- )
- for line in s.splitlines()
- ]
- )
-
-
-def do_wordcount(s):
- """Count the words in that string."""
- return len(_word_re.findall(soft_str(s)))
-
-
-def do_int(value, default=0, base=10):
- """Convert the value into an integer. If the
- conversion doesn't work it will return ``0``. You can
- override this default using the first parameter. You
- can also override the default base (10) in the second
- parameter, which handles input with prefixes such as
- 0b, 0o and 0x for bases 2, 8 and 16 respectively.
- The base is ignored for decimal numbers and non-string values.
- """
- try:
- if isinstance(value, str):
- return int(value, base)
- return int(value)
- except (TypeError, ValueError):
- # this quirk is necessary so that "42.23"|int gives 42.
- try:
- return int(float(value))
- except (TypeError, ValueError):
- return default
-
-
-def do_float(value, default=0.0):
- """Convert the value into a floating point number. If the
- conversion doesn't work it will return ``0.0``. You can
- override this default using the first parameter.
- """
- try:
- return float(value)
- except (TypeError, ValueError):
- return default
-
-
-def do_format(value, *args, **kwargs):
- """Apply the given values to a `printf-style`_ format string, like
- ``string % values``.
-
- .. sourcecode:: jinja
-
- {{ "%s, %s!"|format(greeting, name) }}
- Hello, World!
-
- In most cases it should be more convenient and efficient to use the
- ``%`` operator or :meth:`str.format`.
-
- .. code-block:: text
-
- {{ "%s, %s!" % (greeting, name) }}
- {{ "{}, {}!".format(greeting, name) }}
-
- .. _printf-style: https://docs.python.org/library/stdtypes.html
- #printf-style-string-formatting
- """
- if args and kwargs:
- raise FilterArgumentError(
- "can't handle positional and keyword arguments at the same time"
- )
- return soft_str(value) % (kwargs or args)
-
-
-def do_trim(value, chars=None):
- """Strip leading and trailing characters, by default whitespace."""
- return soft_str(value).strip(chars)
-
-
-def do_striptags(value):
- """Strip SGML/XML tags and replace adjacent whitespace by one space."""
- if hasattr(value, "__html__"):
- value = value.__html__()
- return Markup(str(value)).striptags()
-
-
-def do_slice(value, slices, fill_with=None):
- """Slice an iterator and return a list of lists containing
- those items. Useful if you want to create a div containing
- three ul tags that represent columns:
-
- .. sourcecode:: html+jinja
-
- <div class="columnwrapper">
- {%- for column in items|slice(3) %}
- <ul class="column-{{ loop.index }}">
- {%- for item in column %}
- <li>{{ item }}</li>
- {%- endfor %}
- </ul>
- {%- endfor %}
- </div>
-
- If you pass it a second argument it's used to fill missing
- values on the last iteration.
- """
- seq = list(value)
- length = len(seq)
- items_per_slice = length // slices
- slices_with_extra = length % slices
- offset = 0
- for slice_number in range(slices):
- start = offset + slice_number * items_per_slice
- if slice_number < slices_with_extra:
- offset += 1
- end = offset + (slice_number + 1) * items_per_slice
- tmp = seq[start:end]
- if fill_with is not None and slice_number >= slices_with_extra:
- tmp.append(fill_with)
- yield tmp
-
-
-def do_batch(value, linecount, fill_with=None):
- """
- A filter that batches items. It works pretty much like `slice`
- just the other way round. It returns a list of lists with the
- given number of items. If you provide a second parameter this
- is used to fill up missing items. See this example:
-
- .. sourcecode:: html+jinja
-
- <table>
- {%- for row in items|batch(3, '&nbsp;') %}
- <tr>
- {%- for column in row %}
- <td>{{ column }}</td>
- {%- endfor %}
- </tr>
- {%- endfor %}
- </table>
- """
- tmp = []
- for item in value:
- if len(tmp) == linecount:
- yield tmp
- tmp = []
- tmp.append(item)
- if tmp:
- if fill_with is not None and len(tmp) < linecount:
- tmp += [fill_with] * (linecount - len(tmp))
- yield tmp
-
-
-def do_round(value, precision=0, method="common"):
- """Round the number to a given precision. The first
- parameter specifies the precision (default is ``0``), the
- second the rounding method:
-
- - ``'common'`` rounds either up or down
- - ``'ceil'`` always rounds up
- - ``'floor'`` always rounds down
-
- If you don't specify a method ``'common'`` is used.
-
- .. sourcecode:: jinja
-
- {{ 42.55|round }}
- -> 43.0
- {{ 42.55|round(1, 'floor') }}
- -> 42.5
-
- Note that even if rounded to 0 precision, a float is returned. If
- you need a real integer, pipe it through `int`:
-
- .. sourcecode:: jinja
-
- {{ 42.55|round|int }}
- -> 43
- """
- if method not in {"common", "ceil", "floor"}:
- raise FilterArgumentError("method must be common, ceil or floor")
- if method == "common":
- return round(value, precision)
- func = getattr(math, method)
- return func(value * (10 ** precision)) / (10 ** precision)
-
-
-# Use a regular tuple repr here. This is what we did in the past and we
-# really want to hide this custom type as much as possible. In particular
-# we do not want to accidentally expose an auto generated repr in case
-# people start to print this out in comments or something similar for
-# debugging.
-_GroupTuple = namedtuple("_GroupTuple", ["grouper", "list"])
-_GroupTuple.__repr__ = tuple.__repr__
-_GroupTuple.__str__ = tuple.__str__
-
-
-@environmentfilter
-def do_groupby(environment, value, attribute):
- """Group a sequence of objects by an attribute using Python's
- :func:`itertools.groupby`. The attribute can use dot notation for
- nested access, like ``"address.city"``. Unlike Python's ``groupby``,
- the values are sorted first so only one group is returned for each
- unique value.
-
- For example, a list of ``User`` objects with a ``city`` attribute
- can be rendered in groups. In this example, ``grouper`` refers to
- the ``city`` value of the group.
-
- .. sourcecode:: html+jinja
-
- <ul>{% for city, items in users|groupby("city") %}
- <li>{{ city }}
- <ul>{% for user in items %}
- <li>{{ user.name }}
- {% endfor %}</ul>
- </li>
- {% endfor %}</ul>
-
- ``groupby`` yields namedtuples of ``(grouper, list)``, which
- can be used instead of the tuple unpacking above. ``grouper`` is the
- value of the attribute, and ``list`` is the items with that value.
-
- .. sourcecode:: html+jinja
-
- <ul>{% for group in users|groupby("city") %}
- <li>{{ group.grouper }}: {{ group.list|join(", ") }}
- {% endfor %}</ul>
-
- .. versionchanged:: 2.6
- The attribute supports dot notation for nested access.
- """
- expr = make_attrgetter(environment, attribute)
- return [
- _GroupTuple(key, list(values))
- for key, values in groupby(sorted(value, key=expr), expr)
- ]
-
-
-@environmentfilter
-def do_sum(environment, iterable, attribute=None, start=0):
- """Returns the sum of a sequence of numbers plus the value of parameter
- 'start' (which defaults to 0). When the sequence is empty it returns
- start.
-
- It is also possible to sum up only certain attributes:
-
- .. sourcecode:: jinja
-
- Total: {{ items|sum(attribute='price') }}
-
- .. versionchanged:: 2.6
- The `attribute` parameter was added to allow suming up over
- attributes. Also the `start` parameter was moved on to the right.
- """
- if attribute is not None:
- iterable = map(make_attrgetter(environment, attribute), iterable)
- return sum(iterable, start)
-
-
-def do_list(value):
- """Convert the value into a list. If it was a string the returned list
- will be a list of characters.
- """
- return list(value)
-
-
-def do_mark_safe(value):
- """Mark the value as safe which means that in an environment with automatic
- escaping enabled this variable will not be escaped.
- """
- return Markup(value)
-
-
-def do_mark_unsafe(value):
- """Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
- return str(value)
-
-
-def do_reverse(value):
- """Reverse the object or return an iterator that iterates over it the other
- way round.
- """
- if isinstance(value, str):
- return value[::-1]
- try:
- return reversed(value)
- except TypeError:
- try:
- rv = list(value)
- rv.reverse()
- return rv
- except TypeError:
- raise FilterArgumentError("argument must be iterable")
-
-
-@environmentfilter
-def do_attr(environment, obj, name):
- """Get an attribute of an object. ``foo|attr("bar")`` works like
- ``foo.bar`` just that always an attribute is returned and items are not
- looked up.
-
- See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
- """
- try:
- name = str(name)
- except UnicodeError:
- pass
- else:
- try:
- value = getattr(obj, name)
- except AttributeError:
- pass
- else:
- if environment.sandboxed and not environment.is_safe_attribute(
- obj, name, value
- ):
- return environment.unsafe_undefined(obj, name)
- return value
- return environment.undefined(obj=obj, name=name)
-
-
-@contextfilter
-def do_map(*args, **kwargs):
- """Applies a filter on a sequence of objects or looks up an attribute.
- This is useful when dealing with lists of objects but you are really
- only interested in a certain value of it.
-
- The basic usage is mapping on an attribute. Imagine you have a list
- of users but you are only interested in a list of usernames:
-
- .. sourcecode:: jinja
-
- Users on this page: {{ users|map(attribute='username')|join(', ') }}
-
- You can specify a ``default`` value to use if an object in the list
- does not have the given attribute.
-
- .. sourcecode:: jinja
-
- {{ users|map(attribute="username", default="Anonymous")|join(", ") }}
-
- Alternatively you can let it invoke a filter by passing the name of the
- filter and the arguments afterwards. A good example would be applying a
- text conversion filter on a sequence:
-
- .. sourcecode:: jinja
-
- Users on this page: {{ titles|map('lower')|join(', ') }}
-
- Similar to a generator comprehension such as:
-
- .. code-block:: python
-
- (u.username for u in users)
- (u.username or "Anonymous" for u in users)
- (do_lower(x) for x in titles)
-
- .. versionchanged:: 2.11.0
- Added the ``default`` parameter.
-
- .. versionadded:: 2.7
- """
- seq, func = prepare_map(args, kwargs)
- if seq:
- for item in seq:
- yield func(item)
-
-
-@contextfilter
-def do_select(*args, **kwargs):
- """Filters a sequence of objects by applying a test to each object,
- and only selecting the objects with the test succeeding.
-
- If no test is specified, each object will be evaluated as a boolean.
-
- Example usage:
-
- .. sourcecode:: jinja
-
- {{ numbers|select("odd") }}
- {{ numbers|select("odd") }}
- {{ numbers|select("divisibleby", 3) }}
- {{ numbers|select("lessthan", 42) }}
- {{ strings|select("equalto", "mystring") }}
-
- Similar to a generator comprehension such as:
-
- .. code-block:: python
-
- (n for n in numbers if test_odd(n))
- (n for n in numbers if test_divisibleby(n, 3))
-
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: x, False)
-
-
-@contextfilter
-def do_reject(*args, **kwargs):
- """Filters a sequence of objects by applying a test to each object,
- and rejecting the objects with the test succeeding.
-
- If no test is specified, each object will be evaluated as a boolean.
-
- Example usage:
-
- .. sourcecode:: jinja
-
- {{ numbers|reject("odd") }}
-
- Similar to a generator comprehension such as:
-
- .. code-block:: python
-
- (n for n in numbers if not test_odd(n))
-
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: not x, False)
-
-
-@contextfilter
-def do_selectattr(*args, **kwargs):
- """Filters a sequence of objects by applying a test to the specified
- attribute of each object, and only selecting the objects with the
- test succeeding.
-
- If no test is specified, the attribute's value will be evaluated as
- a boolean.
-
- Example usage:
-
- .. sourcecode:: jinja
-
- {{ users|selectattr("is_active") }}
- {{ users|selectattr("email", "none") }}
-
- Similar to a generator comprehension such as:
-
- .. code-block:: python
-
- (u for user in users if user.is_active)
- (u for user in users if test_none(user.email))
-
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: x, True)
-
-
-@contextfilter
-def do_rejectattr(*args, **kwargs):
- """Filters a sequence of objects by applying a test to the specified
- attribute of each object, and rejecting the objects with the test
- succeeding.
-
- If no test is specified, the attribute's value will be evaluated as
- a boolean.
-
- .. sourcecode:: jinja
-
- {{ users|rejectattr("is_active") }}
- {{ users|rejectattr("email", "none") }}
-
- Similar to a generator comprehension such as:
-
- .. code-block:: python
-
- (u for user in users if not user.is_active)
- (u for user in users if not test_none(user.email))
-
- .. versionadded:: 2.7
- """
- return select_or_reject(args, kwargs, lambda x: not x, True)
-
-
-@evalcontextfilter
-def do_tojson(eval_ctx, value, indent=None):
- """Dumps a structure to JSON so that it's safe to use in ``<script>``
- tags. It accepts the same arguments and returns a JSON string. Note that
- this is available in templates through the ``|tojson`` filter which will
- also mark the result as safe. Due to how this function escapes certain
- characters this is safe even if used outside of ``<script>`` tags.
-
- The following characters are escaped in strings:
-
- - ``<``
- - ``>``
- - ``&``
- - ``'``
-
- This makes it safe to embed such strings in any place in HTML with the
- notable exception of double quoted attributes. In that case single
- quote your attributes or HTML escape it in addition.
-
- The indent parameter can be used to enable pretty printing. Set it to
- the number of spaces that the structures should be indented with.
-
- Note that this filter is for use in HTML contexts only.
-
- .. versionadded:: 2.9
- """
- policies = eval_ctx.environment.policies
- dumper = policies["json.dumps_function"]
- options = policies["json.dumps_kwargs"]
- if indent is not None:
- options = dict(options)
- options["indent"] = indent
- return htmlsafe_json_dumps(value, dumper=dumper, **options)
-
-
-def prepare_map(args, kwargs):
- context = args[0]
- seq = args[1]
-
- if len(args) == 2 and "attribute" in kwargs:
- attribute = kwargs.pop("attribute")
- default = kwargs.pop("default", None)
- if kwargs:
- raise FilterArgumentError(
- f"Unexpected keyword argument {next(iter(kwargs))!r}"
- )
- func = make_attrgetter(context.environment, attribute, default=default)
- else:
- try:
- name = args[2]
- args = args[3:]
- except LookupError:
- raise FilterArgumentError("map requires a filter argument")
-
- def func(item):
- return context.environment.call_filter(
- name, item, args, kwargs, context=context
- )
-
- return seq, func
-
-
-def prepare_select_or_reject(args, kwargs, modfunc, lookup_attr):
- context = args[0]
- seq = args[1]
- if lookup_attr:
- try:
- attr = args[2]
- except LookupError:
- raise FilterArgumentError("Missing parameter for attribute name")
- transfunc = make_attrgetter(context.environment, attr)
- off = 1
- else:
- off = 0
-
- def transfunc(x):
- return x
-
- try:
- name = args[2 + off]
- args = args[3 + off :]
-
- def func(item):
- return context.environment.call_test(name, item, args, kwargs)
-
- except LookupError:
- func = bool
-
- return seq, lambda item: modfunc(func(transfunc(item)))
-
-
-def select_or_reject(args, kwargs, modfunc, lookup_attr):
- seq, func = prepare_select_or_reject(args, kwargs, modfunc, lookup_attr)
- if seq:
- for item in seq:
- if func(item):
- yield item
-
-
-FILTERS = {
- "abs": abs,
- "attr": do_attr,
- "batch": do_batch,
- "capitalize": do_capitalize,
- "center": do_center,
- "count": len,
- "d": do_default,
- "default": do_default,
- "dictsort": do_dictsort,
- "e": escape,
- "escape": escape,
- "filesizeformat": do_filesizeformat,
- "first": do_first,
- "float": do_float,
- "forceescape": do_forceescape,
- "format": do_format,
- "groupby": do_groupby,
- "indent": do_indent,
- "int": do_int,
- "join": do_join,
- "last": do_last,
- "length": len,
- "list": do_list,
- "lower": do_lower,
- "map": do_map,
- "min": do_min,
- "max": do_max,
- "pprint": do_pprint,
- "random": do_random,
- "reject": do_reject,
- "rejectattr": do_rejectattr,
- "replace": do_replace,
- "reverse": do_reverse,
- "round": do_round,
- "safe": do_mark_safe,
- "select": do_select,
- "selectattr": do_selectattr,
- "slice": do_slice,
- "sort": do_sort,
- "string": soft_str,
- "striptags": do_striptags,
- "sum": do_sum,
- "title": do_title,
- "trim": do_trim,
- "truncate": do_truncate,
- "unique": do_unique,
- "upper": do_upper,
- "urlencode": do_urlencode,
- "urlize": do_urlize,
- "wordcount": do_wordcount,
- "wordwrap": do_wordwrap,
- "xmlattr": do_xmlattr,
- "tojson": do_tojson,
-}
diff --git a/src/jinja2/idtracking.py b/src/jinja2/idtracking.py
deleted file mode 100644
index 78cad916..00000000
--- a/src/jinja2/idtracking.py
+++ /dev/null
@@ -1,289 +0,0 @@
-from .visitor import NodeVisitor
-
-VAR_LOAD_PARAMETER = "param"
-VAR_LOAD_RESOLVE = "resolve"
-VAR_LOAD_ALIAS = "alias"
-VAR_LOAD_UNDEFINED = "undefined"
-
-
-def find_symbols(nodes, parent_symbols=None):
- sym = Symbols(parent=parent_symbols)
- visitor = FrameSymbolVisitor(sym)
- for node in nodes:
- visitor.visit(node)
- return sym
-
-
-def symbols_for_node(node, parent_symbols=None):
- sym = Symbols(parent=parent_symbols)
- sym.analyze_node(node)
- return sym
-
-
-class Symbols:
- def __init__(self, parent=None, level=None):
- if level is None:
- if parent is None:
- level = 0
- else:
- level = parent.level + 1
- self.level = level
- self.parent = parent
- self.refs = {}
- self.loads = {}
- self.stores = set()
-
- def analyze_node(self, node, **kwargs):
- visitor = RootVisitor(self)
- visitor.visit(node, **kwargs)
-
- def _define_ref(self, name, load=None):
- ident = f"l_{self.level}_{name}"
- self.refs[name] = ident
- if load is not None:
- self.loads[ident] = load
- return ident
-
- def find_load(self, target):
- if target in self.loads:
- return self.loads[target]
- if self.parent is not None:
- return self.parent.find_load(target)
-
- def find_ref(self, name):
- if name in self.refs:
- return self.refs[name]
- if self.parent is not None:
- return self.parent.find_ref(name)
-
- def ref(self, name):
- rv = self.find_ref(name)
- if rv is None:
- raise AssertionError(
- "Tried to resolve a name to a reference that was"
- f" unknown to the frame ({name!r})"
- )
- return rv
-
- def copy(self):
- rv = object.__new__(self.__class__)
- rv.__dict__.update(self.__dict__)
- rv.refs = self.refs.copy()
- rv.loads = self.loads.copy()
- rv.stores = self.stores.copy()
- return rv
-
- def store(self, name):
- self.stores.add(name)
-
- # If we have not see the name referenced yet, we need to figure
- # out what to set it to.
- if name not in self.refs:
- # If there is a parent scope we check if the name has a
- # reference there. If it does it means we might have to alias
- # to a variable there.
- if self.parent is not None:
- outer_ref = self.parent.find_ref(name)
- if outer_ref is not None:
- self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref))
- return
-
- # Otherwise we can just set it to undefined.
- self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
-
- def declare_parameter(self, name):
- self.stores.add(name)
- return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
-
- def load(self, name):
- target = self.find_ref(name)
- if target is None:
- self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
-
- def branch_update(self, branch_symbols):
- stores = {}
- for branch in branch_symbols:
- for target in branch.stores:
- if target in self.stores:
- continue
- stores[target] = stores.get(target, 0) + 1
-
- for sym in branch_symbols:
- self.refs.update(sym.refs)
- self.loads.update(sym.loads)
- self.stores.update(sym.stores)
-
- for name, branch_count in stores.items():
- if branch_count == len(branch_symbols):
- continue
- target = self.find_ref(name)
- assert target is not None, "should not happen"
-
- if self.parent is not None:
- outer_target = self.parent.find_ref(name)
- if outer_target is not None:
- self.loads[target] = (VAR_LOAD_ALIAS, outer_target)
- continue
- self.loads[target] = (VAR_LOAD_RESOLVE, name)
-
- def dump_stores(self):
- rv = {}
- node = self
- while node is not None:
- for name in node.stores:
- if name not in rv:
- rv[name] = self.find_ref(name)
- node = node.parent
- return rv
-
- def dump_param_targets(self):
- rv = set()
- node = self
- while node is not None:
- for target, (instr, _) in self.loads.items():
- if instr == VAR_LOAD_PARAMETER:
- rv.add(target)
- node = node.parent
- return rv
-
-
-class RootVisitor(NodeVisitor):
- def __init__(self, symbols):
- self.sym_visitor = FrameSymbolVisitor(symbols)
-
- def _simple_visit(self, node, **kwargs):
- for child in node.iter_child_nodes():
- self.sym_visitor.visit(child)
-
- visit_Template = (
- visit_Block
- ) = (
- visit_Macro
- ) = (
- visit_FilterBlock
- ) = visit_Scope = visit_If = visit_ScopedEvalContextModifier = _simple_visit
-
- def visit_AssignBlock(self, node, **kwargs):
- for child in node.body:
- self.sym_visitor.visit(child)
-
- def visit_CallBlock(self, node, **kwargs):
- for child in node.iter_child_nodes(exclude=("call",)):
- self.sym_visitor.visit(child)
-
- def visit_OverlayScope(self, node, **kwargs):
- for child in node.body:
- self.sym_visitor.visit(child)
-
- def visit_For(self, node, for_branch="body", **kwargs):
- if for_branch == "body":
- self.sym_visitor.visit(node.target, store_as_param=True)
- branch = node.body
- elif for_branch == "else":
- branch = node.else_
- elif for_branch == "test":
- self.sym_visitor.visit(node.target, store_as_param=True)
- if node.test is not None:
- self.sym_visitor.visit(node.test)
- return
- else:
- raise RuntimeError("Unknown for branch")
- for item in branch or ():
- self.sym_visitor.visit(item)
-
- def visit_With(self, node, **kwargs):
- for target in node.targets:
- self.sym_visitor.visit(target)
- for child in node.body:
- self.sym_visitor.visit(child)
-
- def generic_visit(self, node, *args, **kwargs):
- raise NotImplementedError(
- f"Cannot find symbols for {node.__class__.__name__!r}"
- )
-
-
-class FrameSymbolVisitor(NodeVisitor):
- """A visitor for `Frame.inspect`."""
-
- def __init__(self, symbols):
- self.symbols = symbols
-
- def visit_Name(self, node, store_as_param=False, **kwargs):
- """All assignments to names go through this function."""
- if store_as_param or node.ctx == "param":
- self.symbols.declare_parameter(node.name)
- elif node.ctx == "store":
- self.symbols.store(node.name)
- elif node.ctx == "load":
- self.symbols.load(node.name)
-
- def visit_NSRef(self, node, **kwargs):
- self.symbols.load(node.name)
-
- def visit_If(self, node, **kwargs):
- self.visit(node.test, **kwargs)
-
- original_symbols = self.symbols
-
- def inner_visit(nodes):
- self.symbols = rv = original_symbols.copy()
- for subnode in nodes:
- self.visit(subnode, **kwargs)
- self.symbols = original_symbols
- return rv
-
- body_symbols = inner_visit(node.body)
- elif_symbols = inner_visit(node.elif_)
- else_symbols = inner_visit(node.else_ or ())
-
- self.symbols.branch_update([body_symbols, elif_symbols, else_symbols])
-
- def visit_Macro(self, node, **kwargs):
- self.symbols.store(node.name)
-
- def visit_Import(self, node, **kwargs):
- self.generic_visit(node, **kwargs)
- self.symbols.store(node.target)
-
- def visit_FromImport(self, node, **kwargs):
- self.generic_visit(node, **kwargs)
- for name in node.names:
- if isinstance(name, tuple):
- self.symbols.store(name[1])
- else:
- self.symbols.store(name)
-
- def visit_Assign(self, node, **kwargs):
- """Visit assignments in the correct order."""
- self.visit(node.node, **kwargs)
- self.visit(node.target, **kwargs)
-
- def visit_For(self, node, **kwargs):
- """Visiting stops at for blocks. However the block sequence
- is visited as part of the outer scope.
- """
- self.visit(node.iter, **kwargs)
-
- def visit_CallBlock(self, node, **kwargs):
- self.visit(node.call, **kwargs)
-
- def visit_FilterBlock(self, node, **kwargs):
- self.visit(node.filter, **kwargs)
-
- def visit_With(self, node, **kwargs):
- for target in node.values:
- self.visit(target)
-
- def visit_AssignBlock(self, node, **kwargs):
- """Stop visiting at block assigns."""
- self.visit(node.target, **kwargs)
-
- def visit_Scope(self, node, **kwargs):
- """Stop visiting at scopes."""
-
- def visit_Block(self, node, **kwargs):
- """Stop visiting at blocks."""
-
- def visit_OverlayScope(self, node, **kwargs):
- """Do not visit into overlay scopes."""
diff --git a/src/jinja2/lexer.py b/src/jinja2/lexer.py
deleted file mode 100644
index 082a051d..00000000
--- a/src/jinja2/lexer.py
+++ /dev/null
@@ -1,801 +0,0 @@
-"""Implements a Jinja / Python combination lexer. The ``Lexer`` class
-is used to do some preprocessing. It filters out invalid operators like
-the bitshift operators we don't allow in templates. It separates
-template code and python code in expressions.
-"""
-import re
-from ast import literal_eval
-from collections import deque
-from operator import itemgetter
-from sys import intern
-
-from ._identifier import pattern as name_re
-from .exceptions import TemplateSyntaxError
-from .utils import LRUCache
-
-# cache for the lexers. Exists in order to be able to have multiple
-# environments with the same lexer
-_lexer_cache = LRUCache(50)
-
-# static regular expressions
-whitespace_re = re.compile(r"\s+")
-newline_re = re.compile(r"(\r\n|\r|\n)")
-string_re = re.compile(
- r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S
-)
-integer_re = re.compile(r"(\d+_)*\d+")
-float_re = re.compile(
- r"""
- (?<!\.) # doesn't start with a .
- (\d+_)*\d+ # digits, possibly _ separated
- (
- (\.(\d+_)*\d+)? # optional fractional part
- e[+\-]?(\d+_)*\d+ # exponent part
- |
- \.(\d+_)*\d+ # required fractional part
- )
- """,
- re.IGNORECASE | re.VERBOSE,
-)
-
-# internal the tokens and keep references to them
-TOKEN_ADD = intern("add")
-TOKEN_ASSIGN = intern("assign")
-TOKEN_COLON = intern("colon")
-TOKEN_COMMA = intern("comma")
-TOKEN_DIV = intern("div")
-TOKEN_DOT = intern("dot")
-TOKEN_EQ = intern("eq")
-TOKEN_FLOORDIV = intern("floordiv")
-TOKEN_GT = intern("gt")
-TOKEN_GTEQ = intern("gteq")
-TOKEN_LBRACE = intern("lbrace")
-TOKEN_LBRACKET = intern("lbracket")
-TOKEN_LPAREN = intern("lparen")
-TOKEN_LT = intern("lt")
-TOKEN_LTEQ = intern("lteq")
-TOKEN_MOD = intern("mod")
-TOKEN_MUL = intern("mul")
-TOKEN_NE = intern("ne")
-TOKEN_PIPE = intern("pipe")
-TOKEN_POW = intern("pow")
-TOKEN_RBRACE = intern("rbrace")
-TOKEN_RBRACKET = intern("rbracket")
-TOKEN_RPAREN = intern("rparen")
-TOKEN_SEMICOLON = intern("semicolon")
-TOKEN_SUB = intern("sub")
-TOKEN_TILDE = intern("tilde")
-TOKEN_WHITESPACE = intern("whitespace")
-TOKEN_FLOAT = intern("float")
-TOKEN_INTEGER = intern("integer")
-TOKEN_NAME = intern("name")
-TOKEN_STRING = intern("string")
-TOKEN_OPERATOR = intern("operator")
-TOKEN_BLOCK_BEGIN = intern("block_begin")
-TOKEN_BLOCK_END = intern("block_end")
-TOKEN_VARIABLE_BEGIN = intern("variable_begin")
-TOKEN_VARIABLE_END = intern("variable_end")
-TOKEN_RAW_BEGIN = intern("raw_begin")
-TOKEN_RAW_END = intern("raw_end")
-TOKEN_COMMENT_BEGIN = intern("comment_begin")
-TOKEN_COMMENT_END = intern("comment_end")
-TOKEN_COMMENT = intern("comment")
-TOKEN_LINESTATEMENT_BEGIN = intern("linestatement_begin")
-TOKEN_LINESTATEMENT_END = intern("linestatement_end")
-TOKEN_LINECOMMENT_BEGIN = intern("linecomment_begin")
-TOKEN_LINECOMMENT_END = intern("linecomment_end")
-TOKEN_LINECOMMENT = intern("linecomment")
-TOKEN_DATA = intern("data")
-TOKEN_INITIAL = intern("initial")
-TOKEN_EOF = intern("eof")
-
-# bind operators to token types
-operators = {
- "+": TOKEN_ADD,
- "-": TOKEN_SUB,
- "/": TOKEN_DIV,
- "//": TOKEN_FLOORDIV,
- "*": TOKEN_MUL,
- "%": TOKEN_MOD,
- "**": TOKEN_POW,
- "~": TOKEN_TILDE,
- "[": TOKEN_LBRACKET,
- "]": TOKEN_RBRACKET,
- "(": TOKEN_LPAREN,
- ")": TOKEN_RPAREN,
- "{": TOKEN_LBRACE,
- "}": TOKEN_RBRACE,
- "==": TOKEN_EQ,
- "!=": TOKEN_NE,
- ">": TOKEN_GT,
- ">=": TOKEN_GTEQ,
- "<": TOKEN_LT,
- "<=": TOKEN_LTEQ,
- "=": TOKEN_ASSIGN,
- ".": TOKEN_DOT,
- ":": TOKEN_COLON,
- "|": TOKEN_PIPE,
- ",": TOKEN_COMMA,
- ";": TOKEN_SEMICOLON,
-}
-
-reverse_operators = {v: k for k, v in operators.items()}
-assert len(operators) == len(reverse_operators), "operators dropped"
-operator_re = re.compile(
- f"({'|'.join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))})"
-)
-
-ignored_tokens = frozenset(
- [
- TOKEN_COMMENT_BEGIN,
- TOKEN_COMMENT,
- TOKEN_COMMENT_END,
- TOKEN_WHITESPACE,
- TOKEN_LINECOMMENT_BEGIN,
- TOKEN_LINECOMMENT_END,
- TOKEN_LINECOMMENT,
- ]
-)
-ignore_if_empty = frozenset(
- [TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT]
-)
-
-
-def _describe_token_type(token_type):
- if token_type in reverse_operators:
- return reverse_operators[token_type]
- return {
- TOKEN_COMMENT_BEGIN: "begin of comment",
- TOKEN_COMMENT_END: "end of comment",
- TOKEN_COMMENT: "comment",
- TOKEN_LINECOMMENT: "comment",
- TOKEN_BLOCK_BEGIN: "begin of statement block",
- TOKEN_BLOCK_END: "end of statement block",
- TOKEN_VARIABLE_BEGIN: "begin of print statement",
- TOKEN_VARIABLE_END: "end of print statement",
- TOKEN_LINESTATEMENT_BEGIN: "begin of line statement",
- TOKEN_LINESTATEMENT_END: "end of line statement",
- TOKEN_DATA: "template data / text",
- TOKEN_EOF: "end of template",
- }.get(token_type, token_type)
-
-
-def describe_token(token):
- """Returns a description of the token."""
- if token.type == TOKEN_NAME:
- return token.value
- return _describe_token_type(token.type)
-
-
-def describe_token_expr(expr):
- """Like `describe_token` but for token expressions."""
- if ":" in expr:
- type, value = expr.split(":", 1)
- if type == TOKEN_NAME:
- return value
- else:
- type = expr
- return _describe_token_type(type)
-
-
-def count_newlines(value):
- """Count the number of newline characters in the string. This is
- useful for extensions that filter a stream.
- """
- return len(newline_re.findall(value))
-
-
-def compile_rules(environment):
- """Compiles all the rules from the environment into a list of rules."""
- e = re.escape
- rules = [
- (
- len(environment.comment_start_string),
- TOKEN_COMMENT_BEGIN,
- e(environment.comment_start_string),
- ),
- (
- len(environment.block_start_string),
- TOKEN_BLOCK_BEGIN,
- e(environment.block_start_string),
- ),
- (
- len(environment.variable_start_string),
- TOKEN_VARIABLE_BEGIN,
- e(environment.variable_start_string),
- ),
- ]
-
- if environment.line_statement_prefix is not None:
- rules.append(
- (
- len(environment.line_statement_prefix),
- TOKEN_LINESTATEMENT_BEGIN,
- r"^[ \t\v]*" + e(environment.line_statement_prefix),
- )
- )
- if environment.line_comment_prefix is not None:
- rules.append(
- (
- len(environment.line_comment_prefix),
- TOKEN_LINECOMMENT_BEGIN,
- r"(?:^|(?<=\S))[^\S\r\n]*" + e(environment.line_comment_prefix),
- )
- )
-
- return [x[1:] for x in sorted(rules, reverse=True)]
-
-
-class Failure:
- """Class that raises a `TemplateSyntaxError` if called.
- Used by the `Lexer` to specify known errors.
- """
-
- def __init__(self, message, cls=TemplateSyntaxError):
- self.message = message
- self.error_class = cls
-
- def __call__(self, lineno, filename):
- raise self.error_class(self.message, lineno, filename)
-
-
-class Token(tuple):
- """Token class."""
-
- __slots__ = ()
- lineno, type, value = (property(itemgetter(x)) for x in range(3))
-
- def __new__(cls, lineno, type, value):
- return tuple.__new__(cls, (lineno, intern(str(type)), value))
-
- def __str__(self):
- if self.type in reverse_operators:
- return reverse_operators[self.type]
- elif self.type == "name":
- return self.value
- return self.type
-
- def test(self, expr):
- """Test a token against a token expression. This can either be a
- token type or ``'token_type:token_value'``. This can only test
- against string values and types.
- """
- # here we do a regular string equality check as test_any is usually
- # passed an iterable of not interned strings.
- if self.type == expr:
- return True
- elif ":" in expr:
- return expr.split(":", 1) == [self.type, self.value]
- return False
-
- def test_any(self, *iterable):
- """Test against multiple token expressions."""
- for expr in iterable:
- if self.test(expr):
- return True
- return False
-
- def __repr__(self):
- return f"Token({self.lineno!r}, {self.type!r}, {self.value!r})"
-
-
-class TokenStreamIterator:
- """The iterator for tokenstreams. Iterate over the stream
- until the eof token is reached.
- """
-
- def __init__(self, stream):
- self.stream = stream
-
- def __iter__(self):
- return self
-
- def __next__(self):
- token = self.stream.current
- if token.type is TOKEN_EOF:
- self.stream.close()
- raise StopIteration()
- next(self.stream)
- return token
-
-
-class TokenStream:
- """A token stream is an iterable that yields :class:`Token`\\s. The
- parser however does not iterate over it but calls :meth:`next` to go
- one token ahead. The current active token is stored as :attr:`current`.
- """
-
- def __init__(self, generator, name, filename):
- self._iter = iter(generator)
- self._pushed = deque()
- self.name = name
- self.filename = filename
- self.closed = False
- self.current = Token(1, TOKEN_INITIAL, "")
- next(self)
-
- def __iter__(self):
- return TokenStreamIterator(self)
-
- def __bool__(self):
- return bool(self._pushed) or self.current.type is not TOKEN_EOF
-
- __nonzero__ = __bool__ # py2
-
- @property
- def eos(self):
- """Are we at the end of the stream?"""
- return not self
-
- def push(self, token):
- """Push a token back to the stream."""
- self._pushed.append(token)
-
- def look(self):
- """Look at the next token."""
- old_token = next(self)
- result = self.current
- self.push(result)
- self.current = old_token
- return result
-
- def skip(self, n=1):
- """Got n tokens ahead."""
- for _ in range(n):
- next(self)
-
- def next_if(self, expr):
- """Perform the token test and return the token if it matched.
- Otherwise the return value is `None`.
- """
- if self.current.test(expr):
- return next(self)
-
- def skip_if(self, expr):
- """Like :meth:`next_if` but only returns `True` or `False`."""
- return self.next_if(expr) is not None
-
- def __next__(self):
- """Go one token ahead and return the old one.
-
- Use the built-in :func:`next` instead of calling this directly.
- """
- rv = self.current
- if self._pushed:
- self.current = self._pushed.popleft()
- elif self.current.type is not TOKEN_EOF:
- try:
- self.current = next(self._iter)
- except StopIteration:
- self.close()
- return rv
-
- def close(self):
- """Close the stream."""
- self.current = Token(self.current.lineno, TOKEN_EOF, "")
- self._iter = None
- self.closed = True
-
- def expect(self, expr):
- """Expect a given token type and return it. This accepts the same
- argument as :meth:`jinja2.lexer.Token.test`.
- """
- if not self.current.test(expr):
- expr = describe_token_expr(expr)
- if self.current.type is TOKEN_EOF:
- raise TemplateSyntaxError(
- f"unexpected end of template, expected {expr!r}.",
- self.current.lineno,
- self.name,
- self.filename,
- )
- raise TemplateSyntaxError(
- f"expected token {expr!r}, got {describe_token(self.current)!r}",
- self.current.lineno,
- self.name,
- self.filename,
- )
- try:
- return self.current
- finally:
- next(self)
-
-
-def get_lexer(environment):
- """Return a lexer which is probably cached."""
- key = (
- environment.block_start_string,
- environment.block_end_string,
- environment.variable_start_string,
- environment.variable_end_string,
- environment.comment_start_string,
- environment.comment_end_string,
- environment.line_statement_prefix,
- environment.line_comment_prefix,
- environment.trim_blocks,
- environment.lstrip_blocks,
- environment.newline_sequence,
- environment.keep_trailing_newline,
- )
- lexer = _lexer_cache.get(key)
- if lexer is None:
- lexer = Lexer(environment)
- _lexer_cache[key] = lexer
- return lexer
-
-
-class OptionalLStrip(tuple):
- """A special tuple for marking a point in the state that can have
- lstrip applied.
- """
-
- __slots__ = ()
-
- # Even though it looks like a no-op, creating instances fails
- # without this.
- def __new__(cls, *members, **kwargs):
- return super().__new__(cls, members)
-
-
-class Lexer:
- """Class that implements a lexer for a given environment. Automatically
- created by the environment class, usually you don't have to do that.
-
- Note that the lexer is not automatically bound to an environment.
- Multiple environments can share the same lexer.
- """
-
- def __init__(self, environment):
- # shortcuts
- e = re.escape
-
- def c(x):
- return re.compile(x, re.M | re.S)
-
- # lexing rules for tags
- tag_rules = [
- (whitespace_re, TOKEN_WHITESPACE, None),
- (float_re, TOKEN_FLOAT, None),
- (integer_re, TOKEN_INTEGER, None),
- (name_re, TOKEN_NAME, None),
- (string_re, TOKEN_STRING, None),
- (operator_re, TOKEN_OPERATOR, None),
- ]
-
- # assemble the root lexing rule. because "|" is ungreedy
- # we have to sort by length so that the lexer continues working
- # as expected when we have parsing rules like <% for block and
- # <%= for variables. (if someone wants asp like syntax)
- # variables are just part of the rules if variable processing
- # is required.
- root_tag_rules = compile_rules(environment)
-
- block_start_re = e(environment.block_start_string)
- block_end_re = e(environment.block_end_string)
- comment_end_re = e(environment.comment_end_string)
- variable_end_re = e(environment.variable_end_string)
-
- # block suffix if trimming is enabled
- block_suffix_re = "\\n?" if environment.trim_blocks else ""
-
- # If lstrip is enabled, it should not be applied if there is any
- # non-whitespace between the newline and block.
- self.lstrip_unless_re = c(r"[^ \t]") if environment.lstrip_blocks else None
-
- self.newline_sequence = environment.newline_sequence
- self.keep_trailing_newline = environment.keep_trailing_newline
-
- root_raw_re = (
- fr"(?P<raw_begin>{block_start_re}(\-|\+|)\s*raw\s*"
- fr"(?:\-{block_end_re}\s*|{block_end_re}))"
- )
- root_parts_re = "|".join(
- [root_raw_re] + [fr"(?P<{n}>{r}(\-|\+|))" for n, r in root_tag_rules]
- )
-
- # global lexing rules
- self.rules = {
- "root": [
- # directives
- (
- c(fr"(.*?)(?:{root_parts_re})"),
- OptionalLStrip(TOKEN_DATA, "#bygroup"),
- "#bygroup",
- ),
- # data
- (c(".+"), TOKEN_DATA, None),
- ],
- # comments
- TOKEN_COMMENT_BEGIN: [
- (
- c(
- fr"(.*?)((?:\+{comment_end_re}|\-{comment_end_re}\s*"
- fr"|{comment_end_re}{block_suffix_re}))"
- ),
- (TOKEN_COMMENT, TOKEN_COMMENT_END),
- "#pop",
- ),
- (c(r"(.)"), (Failure("Missing end of comment tag"),), None),
- ],
- # blocks
- TOKEN_BLOCK_BEGIN: [
- (
- c(
- fr"(?:\+{block_end_re}|\-{block_end_re}\s*"
- fr"|{block_end_re}{block_suffix_re})"
- ),
- TOKEN_BLOCK_END,
- "#pop",
- ),
- ]
- + tag_rules,
- # variables
- TOKEN_VARIABLE_BEGIN: [
- (
- c(fr"\-{variable_end_re}\s*|{variable_end_re}"),
- TOKEN_VARIABLE_END,
- "#pop",
- )
- ]
- + tag_rules,
- # raw block
- TOKEN_RAW_BEGIN: [
- (
- c(
- fr"(.*?)((?:{block_start_re}(\-|\+|))\s*endraw\s*"
- fr"(?:\+{block_end_re}|\-{block_end_re}\s*"
- fr"|{block_end_re}{block_suffix_re}))"
- ),
- OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END),
- "#pop",
- ),
- (c(r"(.)"), (Failure("Missing end of raw directive"),), None),
- ],
- # line statements
- TOKEN_LINESTATEMENT_BEGIN: [
- (c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
- ]
- + tag_rules,
- # line comments
- TOKEN_LINECOMMENT_BEGIN: [
- (
- c(r"(.*?)()(?=\n|$)"),
- (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END),
- "#pop",
- )
- ],
- }
-
- def _normalize_newlines(self, value):
- """Replace all newlines with the configured sequence in strings
- and template data.
- """
- return newline_re.sub(self.newline_sequence, value)
-
- def tokenize(self, source, name=None, filename=None, state=None):
- """Calls tokeniter + tokenize and wraps it in a token stream."""
- stream = self.tokeniter(source, name, filename, state)
- return TokenStream(self.wrap(stream, name, filename), name, filename)
-
- def wrap(self, stream, name=None, filename=None):
- """This is called with the stream as returned by `tokenize` and wraps
- every token in a :class:`Token` and converts the value.
- """
- for lineno, token, value in stream:
- if token in ignored_tokens:
- continue
- elif token == TOKEN_LINESTATEMENT_BEGIN:
- token = TOKEN_BLOCK_BEGIN
- elif token == TOKEN_LINESTATEMENT_END:
- token = TOKEN_BLOCK_END
- # we are not interested in those tokens in the parser
- elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END):
- continue
- elif token == TOKEN_DATA:
- value = self._normalize_newlines(value)
- elif token == "keyword":
- token = value
- elif token == TOKEN_NAME:
- value = str(value)
- if not value.isidentifier():
- raise TemplateSyntaxError(
- "Invalid character in identifier", lineno, name, filename
- )
- elif token == TOKEN_STRING:
- # try to unescape string
- try:
- value = (
- self._normalize_newlines(value[1:-1])
- .encode("ascii", "backslashreplace")
- .decode("unicode-escape")
- )
- except Exception as e:
- msg = str(e).split(":")[-1].strip()
- raise TemplateSyntaxError(msg, lineno, name, filename)
- elif token == TOKEN_INTEGER:
- value = int(value.replace("_", ""))
- elif token == TOKEN_FLOAT:
- # remove all "_" first to support more Python versions
- value = literal_eval(value.replace("_", ""))
- elif token == TOKEN_OPERATOR:
- token = operators[value]
- yield Token(lineno, token, value)
-
- def tokeniter(self, source, name, filename=None, state=None):
- """This method tokenizes the text and returns the tokens in a
- generator. Use this method if you just want to tokenize a template.
- """
- lines = source.splitlines()
- if self.keep_trailing_newline and source:
- if source.endswith(("\r\n", "\r", "\n")):
- lines.append("")
- source = "\n".join(lines)
- pos = 0
- lineno = 1
- stack = ["root"]
- if state is not None and state != "root":
- assert state in ("variable", "block"), "invalid state"
- stack.append(state + "_begin")
- statetokens = self.rules[stack[-1]]
- source_length = len(source)
- balancing_stack = []
- lstrip_unless_re = self.lstrip_unless_re
- newlines_stripped = 0
- line_starting = True
-
- while 1:
- # tokenizer loop
- for regex, tokens, new_state in statetokens:
- m = regex.match(source, pos)
- # if no match we try again with the next rule
- if m is None:
- continue
-
- # we only match blocks and variables if braces / parentheses
- # are balanced. continue parsing with the lower rule which
- # is the operator rule. do this only if the end tags look
- # like operators
- if balancing_stack and tokens in (
- TOKEN_VARIABLE_END,
- TOKEN_BLOCK_END,
- TOKEN_LINESTATEMENT_END,
- ):
- continue
-
- # tuples support more options
- if isinstance(tokens, tuple):
- groups = m.groups()
-
- if isinstance(tokens, OptionalLStrip):
- # Rule supports lstrip. Match will look like
- # text, block type, whitespace control, type, control, ...
- text = groups[0]
-
- # Skipping the text and first type, every other group is the
- # whitespace control for each type. One of the groups will be
- # -, +, or empty string instead of None.
- strip_sign = next(g for g in groups[2::2] if g is not None)
-
- if strip_sign == "-":
- # Strip all whitespace between the text and the tag.
- stripped = text.rstrip()
- newlines_stripped = text[len(stripped) :].count("\n")
- groups = (stripped,) + groups[1:]
- elif (
- # Not marked for preserving whitespace.
- strip_sign != "+"
- # lstrip is enabled.
- and lstrip_unless_re is not None
- # Not a variable expression.
- and not m.groupdict().get(TOKEN_VARIABLE_BEGIN)
- ):
- # The start of text between the last newline and the tag.
- l_pos = text.rfind("\n") + 1
- if l_pos > 0 or line_starting:
- # If there's only whitespace between the newline and the
- # tag, strip it.
- if not lstrip_unless_re.search(text, l_pos):
- groups = (text[:l_pos],) + groups[1:]
-
- for idx, token in enumerate(tokens):
- # failure group
- if token.__class__ is Failure:
- raise token(lineno, filename)
- # bygroup is a bit more complex, in that case we
- # yield for the current token the first named
- # group that matched
- elif token == "#bygroup":
- for key, value in m.groupdict().items():
- if value is not None:
- yield lineno, key, value
- lineno += value.count("\n")
- break
- else:
- raise RuntimeError(
- f"{regex!r} wanted to resolve the token dynamically"
- " but no group matched"
- )
- # normal group
- else:
- data = groups[idx]
- if data or token not in ignore_if_empty:
- yield lineno, token, data
- lineno += data.count("\n") + newlines_stripped
- newlines_stripped = 0
-
- # strings as token just are yielded as it.
- else:
- data = m.group()
- # update brace/parentheses balance
- if tokens == TOKEN_OPERATOR:
- if data == "{":
- balancing_stack.append("}")
- elif data == "(":
- balancing_stack.append(")")
- elif data == "[":
- balancing_stack.append("]")
- elif data in ("}", ")", "]"):
- if not balancing_stack:
- raise TemplateSyntaxError(
- f"unexpected '{data}'", lineno, name, filename
- )
- expected_op = balancing_stack.pop()
- if expected_op != data:
- raise TemplateSyntaxError(
- f"unexpected '{data}', expected '{expected_op}'",
- lineno,
- name,
- filename,
- )
- # yield items
- if data or tokens not in ignore_if_empty:
- yield lineno, tokens, data
- lineno += data.count("\n")
-
- line_starting = m.group()[-1:] == "\n"
-
- # fetch new position into new variable so that we can check
- # if there is a internal parsing error which would result
- # in an infinite loop
- pos2 = m.end()
-
- # handle state changes
- if new_state is not None:
- # remove the uppermost state
- if new_state == "#pop":
- stack.pop()
- # resolve the new state by group checking
- elif new_state == "#bygroup":
- for key, value in m.groupdict().items():
- if value is not None:
- stack.append(key)
- break
- else:
- raise RuntimeError(
- f"{regex!r} wanted to resolve the new state dynamically"
- f" but no group matched"
- )
- # direct state name given
- else:
- stack.append(new_state)
- statetokens = self.rules[stack[-1]]
- # we are still at the same position and no stack change.
- # this means a loop without break condition, avoid that and
- # raise error
- elif pos2 == pos:
- raise RuntimeError(
- f"{regex!r} yielded empty string without stack change"
- )
- # publish new function and start again
- pos = pos2
- break
- # if loop terminated without break we haven't found a single match
- # either we are at the end of the file or we have a problem
- else:
- # end of text
- if pos >= source_length:
- return
- # something went wrong
- raise TemplateSyntaxError(
- f"unexpected char {source[pos]!r} at {pos}", lineno, name, filename
- )
diff --git a/src/jinja2/loaders.py b/src/jinja2/loaders.py
deleted file mode 100644
index 6b71b835..00000000
--- a/src/jinja2/loaders.py
+++ /dev/null
@@ -1,566 +0,0 @@
-"""API and implementations for loading templates from different data
-sources.
-"""
-import importlib.util
-import os
-import sys
-import weakref
-import zipimport
-from collections import abc
-from hashlib import sha1
-from importlib import import_module
-from types import ModuleType
-
-from .exceptions import TemplateNotFound
-from .utils import internalcode
-from .utils import open_if_exists
-
-
-def split_template_path(template):
- """Split a path into segments and perform a sanity check. If it detects
- '..' in the path it will raise a `TemplateNotFound` error.
- """
- pieces = []
- for piece in template.split("/"):
- if (
- os.path.sep in piece
- or (os.path.altsep and os.path.altsep in piece)
- or piece == os.path.pardir
- ):
- raise TemplateNotFound(template)
- elif piece and piece != ".":
- pieces.append(piece)
- return pieces
-
-
-class BaseLoader:
- """Baseclass for all loaders. Subclass this and override `get_source` to
- implement a custom loading mechanism. The environment provides a
- `get_template` method that calls the loader's `load` method to get the
- :class:`Template` object.
-
- A very basic example for a loader that looks up templates on the file
- system could look like this::
-
- from jinja2 import BaseLoader, TemplateNotFound
- from os.path import join, exists, getmtime
-
- class MyLoader(BaseLoader):
-
- def __init__(self, path):
- self.path = path
-
- def get_source(self, environment, template):
- path = join(self.path, template)
- if not exists(path):
- raise TemplateNotFound(template)
- mtime = getmtime(path)
- with open(path) as f:
- source = f.read()
- return source, path, lambda: mtime == getmtime(path)
- """
-
- #: if set to `False` it indicates that the loader cannot provide access
- #: to the source of templates.
- #:
- #: .. versionadded:: 2.4
- has_source_access = True
-
- def get_source(self, environment, template):
- """Get the template source, filename and reload helper for a template.
- It's passed the environment and template name and has to return a
- tuple in the form ``(source, filename, uptodate)`` or raise a
- `TemplateNotFound` error if it can't locate the template.
-
- The source part of the returned tuple must be the source of the
- template as a string. The filename should be the name of the
- file on the filesystem if it was loaded from there, otherwise
- ``None``. The filename is used by Python for the tracebacks
- if no loader extension is used.
-
- The last item in the tuple is the `uptodate` function. If auto
- reloading is enabled it's always called to check if the template
- changed. No arguments are passed so the function must store the
- old state somewhere (for example in a closure). If it returns `False`
- the template will be reloaded.
- """
- if not self.has_source_access:
- raise RuntimeError(
- f"{self.__class__.__name__} cannot provide access to the source"
- )
- raise TemplateNotFound(template)
-
- def list_templates(self):
- """Iterates over all templates. If the loader does not support that
- it should raise a :exc:`TypeError` which is the default behavior.
- """
- raise TypeError("this loader cannot iterate over all templates")
-
- @internalcode
- def load(self, environment, name, globals=None):
- """Loads a template. This method looks up the template in the cache
- or loads one by calling :meth:`get_source`. Subclasses should not
- override this method as loaders working on collections of other
- loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
- will not call this method but `get_source` directly.
- """
- code = None
- if globals is None:
- globals = {}
-
- # first we try to get the source for this template together
- # with the filename and the uptodate function.
- source, filename, uptodate = self.get_source(environment, name)
-
- # try to load the code from the bytecode cache if there is a
- # bytecode cache configured.
- bcc = environment.bytecode_cache
- if bcc is not None:
- bucket = bcc.get_bucket(environment, name, filename, source)
- code = bucket.code
-
- # if we don't have code so far (not cached, no longer up to
- # date) etc. we compile the template
- if code is None:
- code = environment.compile(source, name, filename)
-
- # if the bytecode cache is available and the bucket doesn't
- # have a code so far, we give the bucket the new code and put
- # it back to the bytecode cache.
- if bcc is not None and bucket.code is None:
- bucket.code = code
- bcc.set_bucket(bucket)
-
- return environment.template_class.from_code(
- environment, code, globals, uptodate
- )
-
-
-class FileSystemLoader(BaseLoader):
- """Load templates from a directory in the file system.
-
- The path can be relative or absolute. Relative paths are relative to
- the current working directory.
-
- .. code-block:: python
-
- loader = FileSystemLoader("templates")
-
- A list of paths can be given. The directories will be searched in
- order, stopping at the first matching template.
-
- .. code-block:: python
-
- loader = FileSystemLoader(["/override/templates", "/default/templates"])
-
- :param searchpath: A path, or list of paths, to the directory that
- contains the templates.
- :param encoding: Use this encoding to read the text from template
- files.
- :param followlinks: Follow symbolic links in the path.
-
- .. versionchanged:: 2.8
- Added the ``followlinks`` parameter.
- """
-
- def __init__(self, searchpath, encoding="utf-8", followlinks=False):
- if not isinstance(searchpath, abc.Iterable) or isinstance(searchpath, str):
- searchpath = [searchpath]
-
- self.searchpath = list(searchpath)
- self.encoding = encoding
- self.followlinks = followlinks
-
- def get_source(self, environment, template):
- pieces = split_template_path(template)
- for searchpath in self.searchpath:
- filename = os.path.join(searchpath, *pieces)
- f = open_if_exists(filename)
- if f is None:
- continue
- try:
- contents = f.read().decode(self.encoding)
- finally:
- f.close()
-
- mtime = os.path.getmtime(filename)
-
- def uptodate():
- try:
- return os.path.getmtime(filename) == mtime
- except OSError:
- return False
-
- return contents, filename, uptodate
- raise TemplateNotFound(template)
-
- def list_templates(self):
- found = set()
- for searchpath in self.searchpath:
- walk_dir = os.walk(searchpath, followlinks=self.followlinks)
- for dirpath, _, filenames in walk_dir:
- for filename in filenames:
- template = (
- os.path.join(dirpath, filename)[len(searchpath) :]
- .strip(os.path.sep)
- .replace(os.path.sep, "/")
- )
- if template[:2] == "./":
- template = template[2:]
- if template not in found:
- found.add(template)
- return sorted(found)
-
-
-class PackageLoader(BaseLoader):
- """Load templates from a directory in a Python package.
-
- :param package_name: Import name of the package that contains the
- template directory.
- :param package_path: Directory within the imported package that
- contains the templates.
- :param encoding: Encoding of template files.
-
- The following example looks up templates in the ``pages`` directory
- within the ``project.ui`` package.
-
- .. code-block:: python
-
- loader = PackageLoader("project.ui", "pages")
-
- Only packages installed as directories (standard pip behavior) or
- zip/egg files (less common) are supported. The Python API for
- introspecting data in packages is too limited to support other
- installation methods the way this loader requires.
-
- There is limited support for :pep:`420` namespace packages. The
- template directory is assumed to only be in one namespace
- contributor. Zip files contributing to a namespace are not
- supported.
-
- .. versionchanged:: 3.0
- No longer uses ``setuptools`` as a dependency.
-
- .. versionchanged:: 3.0
- Limited PEP 420 namespace package support.
- """
-
- def __init__(self, package_name, package_path="templates", encoding="utf-8"):
- if package_path == os.path.curdir:
- package_path = ""
- elif package_path[:2] == os.path.curdir + os.path.sep:
- package_path = package_path[2:]
-
- package_path = os.path.normpath(package_path).rstrip(os.path.sep)
- self.package_path = package_path
- self.package_name = package_name
- self.encoding = encoding
-
- # Make sure the package exists. This also makes namespace
- # packages work, otherwise get_loader returns None.
- import_module(package_name)
- spec = importlib.util.find_spec(package_name)
- self._loader = loader = spec.loader
- self._archive = None
- self._template_root = None
-
- if isinstance(loader, zipimport.zipimporter):
- self._archive = loader.archive
- pkgdir = next(iter(spec.submodule_search_locations))
- self._template_root = os.path.join(pkgdir, package_path)
- elif spec.submodule_search_locations:
- # This will be one element for regular packages and multiple
- # for namespace packages.
- for root in spec.submodule_search_locations:
- root = os.path.join(root, package_path)
-
- if os.path.isdir(root):
- self._template_root = root
- break
-
- if self._template_root is None:
- raise ValueError(
- f"The {package_name!r} package was not installed in a"
- " way that PackageLoader understands."
- )
-
- def get_source(self, environment, template):
- p = os.path.join(self._template_root, *split_template_path(template))
-
- if self._archive is None:
- # Package is a directory.
- if not os.path.isfile(p):
- raise TemplateNotFound(template)
-
- with open(p, "rb") as f:
- source = f.read()
-
- mtime = os.path.getmtime(p)
-
- def up_to_date():
- return os.path.isfile(p) and os.path.getmtime(p) == mtime
-
- else:
- # Package is a zip file.
- try:
- source = self._loader.get_data(p)
- except OSError:
- raise TemplateNotFound(template)
-
- # Could use the zip's mtime for all template mtimes, but
- # would need to safely reload the module if it's out of
- # date, so just report it as always current.
- up_to_date = None
-
- return source.decode(self.encoding), p, up_to_date
-
- def list_templates(self):
- results = []
-
- if self._archive is None:
- # Package is a directory.
- offset = len(self._template_root)
-
- for dirpath, _, filenames in os.walk(self._template_root):
- dirpath = dirpath[offset:].lstrip(os.path.sep)
- results.extend(
- os.path.join(dirpath, name).replace(os.path.sep, "/")
- for name in filenames
- )
- else:
- if not hasattr(self._loader, "_files"):
- raise TypeError(
- "This zip import does not have the required"
- " metadata to list templates."
- )
-
- # Package is a zip file.
- prefix = (
- self._template_root[len(self._archive) :].lstrip(os.path.sep)
- + os.path.sep
- )
- offset = len(prefix)
-
- for name in self._loader._files.keys():
- # Find names under the templates directory that aren't directories.
- if name.startswith(prefix) and name[-1] != os.path.sep:
- results.append(name[offset:].replace(os.path.sep, "/"))
-
- results.sort()
- return results
-
-
-class DictLoader(BaseLoader):
- """Loads a template from a Python dict mapping template names to
- template source. This loader is useful for unittesting:
-
- >>> loader = DictLoader({'index.html': 'source here'})
-
- Because auto reloading is rarely useful this is disabled per default.
- """
-
- def __init__(self, mapping):
- self.mapping = mapping
-
- def get_source(self, environment, template):
- if template in self.mapping:
- source = self.mapping[template]
- return source, None, lambda: source == self.mapping.get(template)
- raise TemplateNotFound(template)
-
- def list_templates(self):
- return sorted(self.mapping)
-
-
-class FunctionLoader(BaseLoader):
- """A loader that is passed a function which does the loading. The
- function receives the name of the template and has to return either
- a string with the template source, a tuple in the form ``(source,
- filename, uptodatefunc)`` or `None` if the template does not exist.
-
- >>> def load_template(name):
- ... if name == 'index.html':
- ... return '...'
- ...
- >>> loader = FunctionLoader(load_template)
-
- The `uptodatefunc` is a function that is called if autoreload is enabled
- and has to return `True` if the template is still up to date. For more
- details have a look at :meth:`BaseLoader.get_source` which has the same
- return value.
- """
-
- def __init__(self, load_func):
- self.load_func = load_func
-
- def get_source(self, environment, template):
- rv = self.load_func(template)
- if rv is None:
- raise TemplateNotFound(template)
- elif isinstance(rv, str):
- return rv, None, None
- return rv
-
-
-class PrefixLoader(BaseLoader):
- """A loader that is passed a dict of loaders where each loader is bound
- to a prefix. The prefix is delimited from the template by a slash per
- default, which can be changed by setting the `delimiter` argument to
- something else::
-
- loader = PrefixLoader({
- 'app1': PackageLoader('mypackage.app1'),
- 'app2': PackageLoader('mypackage.app2')
- })
-
- By loading ``'app1/index.html'`` the file from the app1 package is loaded,
- by loading ``'app2/index.html'`` the file from the second.
- """
-
- def __init__(self, mapping, delimiter="/"):
- self.mapping = mapping
- self.delimiter = delimiter
-
- def get_loader(self, template):
- try:
- prefix, name = template.split(self.delimiter, 1)
- loader = self.mapping[prefix]
- except (ValueError, KeyError):
- raise TemplateNotFound(template)
- return loader, name
-
- def get_source(self, environment, template):
- loader, name = self.get_loader(template)
- try:
- return loader.get_source(environment, name)
- except TemplateNotFound:
- # re-raise the exception with the correct filename here.
- # (the one that includes the prefix)
- raise TemplateNotFound(template)
-
- @internalcode
- def load(self, environment, name, globals=None):
- loader, local_name = self.get_loader(name)
- try:
- return loader.load(environment, local_name, globals)
- except TemplateNotFound:
- # re-raise the exception with the correct filename here.
- # (the one that includes the prefix)
- raise TemplateNotFound(name)
-
- def list_templates(self):
- result = []
- for prefix, loader in self.mapping.items():
- for template in loader.list_templates():
- result.append(prefix + self.delimiter + template)
- return result
-
-
-class ChoiceLoader(BaseLoader):
- """This loader works like the `PrefixLoader` just that no prefix is
- specified. If a template could not be found by one loader the next one
- is tried.
-
- >>> loader = ChoiceLoader([
- ... FileSystemLoader('/path/to/user/templates'),
- ... FileSystemLoader('/path/to/system/templates')
- ... ])
-
- This is useful if you want to allow users to override builtin templates
- from a different location.
- """
-
- def __init__(self, loaders):
- self.loaders = loaders
-
- def get_source(self, environment, template):
- for loader in self.loaders:
- try:
- return loader.get_source(environment, template)
- except TemplateNotFound:
- pass
- raise TemplateNotFound(template)
-
- @internalcode
- def load(self, environment, name, globals=None):
- for loader in self.loaders:
- try:
- return loader.load(environment, name, globals)
- except TemplateNotFound:
- pass
- raise TemplateNotFound(name)
-
- def list_templates(self):
- found = set()
- for loader in self.loaders:
- found.update(loader.list_templates())
- return sorted(found)
-
-
-class _TemplateModule(ModuleType):
- """Like a normal module but with support for weak references"""
-
-
-class ModuleLoader(BaseLoader):
- """This loader loads templates from precompiled templates.
-
- Example usage:
-
- >>> loader = ChoiceLoader([
- ... ModuleLoader('/path/to/compiled/templates'),
- ... FileSystemLoader('/path/to/templates')
- ... ])
-
- Templates can be precompiled with :meth:`Environment.compile_templates`.
- """
-
- has_source_access = False
-
- def __init__(self, path):
- package_name = f"_jinja2_module_templates_{id(self):x}"
-
- # create a fake module that looks for the templates in the
- # path given.
- mod = _TemplateModule(package_name)
-
- if not isinstance(path, abc.Iterable) or isinstance(path, str):
- path = [path]
-
- mod.__path__ = [os.fspath(p) for p in path]
-
- sys.modules[package_name] = weakref.proxy(
- mod, lambda x: sys.modules.pop(package_name, None)
- )
-
- # the only strong reference, the sys.modules entry is weak
- # so that the garbage collector can remove it once the
- # loader that created it goes out of business.
- self.module = mod
- self.package_name = package_name
-
- @staticmethod
- def get_template_key(name):
- return "tmpl_" + sha1(name.encode("utf-8")).hexdigest()
-
- @staticmethod
- def get_module_filename(name):
- return ModuleLoader.get_template_key(name) + ".py"
-
- @internalcode
- def load(self, environment, name, globals=None):
- key = self.get_template_key(name)
- module = f"{self.package_name}.{key}"
- mod = getattr(self.module, module, None)
- if mod is None:
- try:
- mod = __import__(module, None, None, ["root"])
- except ImportError:
- raise TemplateNotFound(name)
-
- # remove the entry from sys.modules, we only want the attribute
- # on the module object we have stored on the loader.
- sys.modules.pop(module, None)
-
- return environment.template_class.from_module_dict(
- environment, mod.__dict__, globals
- )
diff --git a/src/jinja2/meta.py b/src/jinja2/meta.py
deleted file mode 100644
index 899e179a..00000000
--- a/src/jinja2/meta.py
+++ /dev/null
@@ -1,98 +0,0 @@
-"""Functions that expose information about templates that might be
-interesting for introspection.
-"""
-from . import nodes
-from .compiler import CodeGenerator
-
-
-class TrackingCodeGenerator(CodeGenerator):
- """We abuse the code generator for introspection."""
-
- def __init__(self, environment):
- CodeGenerator.__init__(self, environment, "<introspection>", "<introspection>")
- self.undeclared_identifiers = set()
-
- def write(self, x):
- """Don't write."""
-
- def enter_frame(self, frame):
- """Remember all undeclared identifiers."""
- CodeGenerator.enter_frame(self, frame)
- for _, (action, param) in frame.symbols.loads.items():
- if action == "resolve" and param not in self.environment.globals:
- self.undeclared_identifiers.add(param)
-
-
-def find_undeclared_variables(ast):
- """Returns a set of all variables in the AST that will be looked up from
- the context at runtime. Because at compile time it's not known which
- variables will be used depending on the path the execution takes at
- runtime, all variables are returned.
-
- >>> from jinja2 import Environment, meta
- >>> env = Environment()
- >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
- >>> meta.find_undeclared_variables(ast) == {'bar'}
- True
-
- .. admonition:: Implementation
-
- Internally the code generator is used for finding undeclared variables.
- This is good to know because the code generator might raise a
- :exc:`TemplateAssertionError` during compilation and as a matter of
- fact this function can currently raise that exception as well.
- """
- codegen = TrackingCodeGenerator(ast.environment)
- codegen.visit(ast)
- return codegen.undeclared_identifiers
-
-
-def find_referenced_templates(ast):
- """Finds all the referenced templates from the AST. This will return an
- iterator over all the hardcoded template extensions, inclusions and
- imports. If dynamic inheritance or inclusion is used, `None` will be
- yielded.
-
- >>> from jinja2 import Environment, meta
- >>> env = Environment()
- >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
- >>> list(meta.find_referenced_templates(ast))
- ['layout.html', None]
-
- This function is useful for dependency tracking. For example if you want
- to rebuild parts of the website after a layout template has changed.
- """
- for node in ast.find_all(
- (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
- ):
- if not isinstance(node.template, nodes.Const):
- # a tuple with some non consts in there
- if isinstance(node.template, (nodes.Tuple, nodes.List)):
- for template_name in node.template.items:
- # something const, only yield the strings and ignore
- # non-string consts that really just make no sense
- if isinstance(template_name, nodes.Const):
- if isinstance(template_name.value, str):
- yield template_name.value
- # something dynamic in there
- else:
- yield None
- # something dynamic we don't know about here
- else:
- yield None
- continue
- # constant is a basestring, direct template name
- if isinstance(node.template.value, str):
- yield node.template.value
- # a tuple or list (latter *should* not happen) made of consts,
- # yield the consts that are strings. We could warn here for
- # non string values
- elif isinstance(node, nodes.Include) and isinstance(
- node.template.value, (tuple, list)
- ):
- for template_name in node.template.value:
- if isinstance(template_name, str):
- yield template_name
- # something else we don't care about, we could warn here
- else:
- yield None
diff --git a/src/jinja2/nativetypes.py b/src/jinja2/nativetypes.py
deleted file mode 100644
index 5ecf72b5..00000000
--- a/src/jinja2/nativetypes.py
+++ /dev/null
@@ -1,93 +0,0 @@
-from ast import literal_eval
-from itertools import chain
-from itertools import islice
-
-from . import nodes
-from .compiler import CodeGenerator
-from .compiler import has_safe_repr
-from .environment import Environment
-from .environment import Template
-
-
-def native_concat(nodes):
- """Return a native Python type from the list of compiled nodes. If
- the result is a single node, its value is returned. Otherwise, the
- nodes are concatenated as strings. If the result can be parsed with
- :func:`ast.literal_eval`, the parsed value is returned. Otherwise,
- the string is returned.
-
- :param nodes: Iterable of nodes to concatenate.
- """
- head = list(islice(nodes, 2))
-
- if not head:
- return None
-
- if len(head) == 1:
- raw = head[0]
- else:
- raw = "".join([str(v) for v in chain(head, nodes)])
-
- try:
- return literal_eval(raw)
- except (ValueError, SyntaxError, MemoryError):
- return raw
-
-
-class NativeCodeGenerator(CodeGenerator):
- """A code generator which renders Python types by not adding
- ``str()`` around output nodes.
- """
-
- @staticmethod
- def _default_finalize(value):
- return value
-
- def _output_const_repr(self, group):
- return repr("".join([str(v) for v in group]))
-
- def _output_child_to_const(self, node, frame, finalize):
- const = node.as_const(frame.eval_ctx)
-
- if not has_safe_repr(const):
- raise nodes.Impossible()
-
- if isinstance(node, nodes.TemplateData):
- return const
-
- return finalize.const(const)
-
- def _output_child_pre(self, node, frame, finalize):
- if finalize.src is not None:
- self.write(finalize.src)
-
- def _output_child_post(self, node, frame, finalize):
- if finalize.src is not None:
- self.write(")")
-
-
-class NativeEnvironment(Environment):
- """An environment that renders templates to native Python types."""
-
- code_generator_class = NativeCodeGenerator
-
-
-class NativeTemplate(Template):
- environment_class = NativeEnvironment
-
- def render(self, *args, **kwargs):
- """Render the template to produce a native Python type. If the
- result is a single node, its value is returned. Otherwise, the
- nodes are concatenated as strings. If the result can be parsed
- with :func:`ast.literal_eval`, the parsed value is returned.
- Otherwise, the string is returned.
- """
- vars = dict(*args, **kwargs)
-
- try:
- return native_concat(self.root_render_func(self.new_context(vars)))
- except Exception:
- return self.environment.handle_exception()
-
-
-NativeEnvironment.template_class = NativeTemplate
diff --git a/src/jinja2/nodes.py b/src/jinja2/nodes.py
deleted file mode 100644
index d5133f75..00000000
--- a/src/jinja2/nodes.py
+++ /dev/null
@@ -1,1052 +0,0 @@
-"""AST nodes generated by the parser for the compiler. Also provides
-some node tree helper functions used by the parser and compiler in order
-to normalize nodes.
-"""
-import operator
-from collections import deque
-
-from markupsafe import Markup
-
-_binop_to_func = {
- "*": operator.mul,
- "/": operator.truediv,
- "//": operator.floordiv,
- "**": operator.pow,
- "%": operator.mod,
- "+": operator.add,
- "-": operator.sub,
-}
-
-_uaop_to_func = {
- "not": operator.not_,
- "+": operator.pos,
- "-": operator.neg,
-}
-
-_cmpop_to_func = {
- "eq": operator.eq,
- "ne": operator.ne,
- "gt": operator.gt,
- "gteq": operator.ge,
- "lt": operator.lt,
- "lteq": operator.le,
- "in": lambda a, b: a in b,
- "notin": lambda a, b: a not in b,
-}
-
-
-class Impossible(Exception):
- """Raised if the node could not perform a requested action."""
-
-
-class NodeType(type):
- """A metaclass for nodes that handles the field and attribute
- inheritance. fields and attributes from the parent class are
- automatically forwarded to the child."""
-
- def __new__(mcs, name, bases, d):
- for attr in "fields", "attributes":
- storage = []
- storage.extend(getattr(bases[0] if bases else object, attr, ()))
- storage.extend(d.get(attr, ()))
- assert len(bases) <= 1, "multiple inheritance not allowed"
- assert len(storage) == len(set(storage)), "layout conflict"
- d[attr] = tuple(storage)
- d.setdefault("abstract", False)
- return type.__new__(mcs, name, bases, d)
-
-
-class EvalContext:
- """Holds evaluation time information. Custom attributes can be attached
- to it in extensions.
- """
-
- def __init__(self, environment, template_name=None):
- self.environment = environment
- if callable(environment.autoescape):
- self.autoescape = environment.autoescape(template_name)
- else:
- self.autoescape = environment.autoescape
- self.volatile = False
-
- def save(self):
- return self.__dict__.copy()
-
- def revert(self, old):
- self.__dict__.clear()
- self.__dict__.update(old)
-
-
-def get_eval_context(node, ctx):
- if ctx is None:
- if node.environment is None:
- raise RuntimeError(
- "if no eval context is passed, the node must have an"
- " attached environment."
- )
- return EvalContext(node.environment)
- return ctx
-
-
-class Node(metaclass=NodeType):
- """Baseclass for all Jinja nodes. There are a number of nodes available
- of different types. There are four major types:
-
- - :class:`Stmt`: statements
- - :class:`Expr`: expressions
- - :class:`Helper`: helper nodes
- - :class:`Template`: the outermost wrapper node
-
- All nodes have fields and attributes. Fields may be other nodes, lists,
- or arbitrary values. Fields are passed to the constructor as regular
- positional arguments, attributes as keyword arguments. Each node has
- two attributes: `lineno` (the line number of the node) and `environment`.
- The `environment` attribute is set at the end of the parsing process for
- all nodes automatically.
- """
-
- fields = ()
- attributes = ("lineno", "environment")
- abstract = True
-
- def __init__(self, *fields, **attributes):
- if self.abstract:
- raise TypeError("abstract nodes are not instantiable")
- if fields:
- if len(fields) != len(self.fields):
- if not self.fields:
- raise TypeError(f"{self.__class__.__name__!r} takes 0 arguments")
- raise TypeError(
- f"{self.__class__.__name__!r} takes 0 or {len(self.fields)}"
- f" argument{'s' if len(self.fields) != 1 else ''}"
- )
- for name, arg in zip(self.fields, fields):
- setattr(self, name, arg)
- for attr in self.attributes:
- setattr(self, attr, attributes.pop(attr, None))
- if attributes:
- raise TypeError(f"unknown attribute {next(iter(attributes))!r}")
-
- def iter_fields(self, exclude=None, only=None):
- """This method iterates over all fields that are defined and yields
- ``(key, value)`` tuples. Per default all fields are returned, but
- it's possible to limit that to some fields by providing the `only`
- parameter or to exclude some using the `exclude` parameter. Both
- should be sets or tuples of field names.
- """
- for name in self.fields:
- if (
- (exclude is only is None)
- or (exclude is not None and name not in exclude)
- or (only is not None and name in only)
- ):
- try:
- yield name, getattr(self, name)
- except AttributeError:
- pass
-
- def iter_child_nodes(self, exclude=None, only=None):
- """Iterates over all direct child nodes of the node. This iterates
- over all fields and yields the values of they are nodes. If the value
- of a field is a list all the nodes in that list are returned.
- """
- for _, item in self.iter_fields(exclude, only):
- if isinstance(item, list):
- for n in item:
- if isinstance(n, Node):
- yield n
- elif isinstance(item, Node):
- yield item
-
- def find(self, node_type):
- """Find the first node of a given type. If no such node exists the
- return value is `None`.
- """
- for result in self.find_all(node_type):
- return result
-
- def find_all(self, node_type):
- """Find all the nodes of a given type. If the type is a tuple,
- the check is performed for any of the tuple items.
- """
- for child in self.iter_child_nodes():
- if isinstance(child, node_type):
- yield child
- yield from child.find_all(node_type)
-
- def set_ctx(self, ctx):
- """Reset the context of a node and all child nodes. Per default the
- parser will all generate nodes that have a 'load' context as it's the
- most common one. This method is used in the parser to set assignment
- targets and other nodes to a store context.
- """
- todo = deque([self])
- while todo:
- node = todo.popleft()
- if "ctx" in node.fields:
- node.ctx = ctx
- todo.extend(node.iter_child_nodes())
- return self
-
- def set_lineno(self, lineno, override=False):
- """Set the line numbers of the node and children."""
- todo = deque([self])
- while todo:
- node = todo.popleft()
- if "lineno" in node.attributes:
- if node.lineno is None or override:
- node.lineno = lineno
- todo.extend(node.iter_child_nodes())
- return self
-
- def set_environment(self, environment):
- """Set the environment for all nodes."""
- todo = deque([self])
- while todo:
- node = todo.popleft()
- node.environment = environment
- todo.extend(node.iter_child_nodes())
- return self
-
- def __eq__(self, other):
- if type(self) is not type(other):
- return NotImplemented
-
- return tuple(self.iter_fields()) == tuple(other.iter_fields())
-
- def __hash__(self):
- return hash(tuple(self.iter_fields()))
-
- def __repr__(self):
- args_str = ", ".join(f"{a}={getattr(self, a, None)!r}" for a in self.fields)
- return f"{self.__class__.__name__}({args_str})"
-
- def dump(self):
- def _dump(node):
- if not isinstance(node, Node):
- buf.append(repr(node))
- return
-
- buf.append(f"nodes.{node.__class__.__name__}(")
- if not node.fields:
- buf.append(")")
- return
- for idx, field in enumerate(node.fields):
- if idx:
- buf.append(", ")
- value = getattr(node, field)
- if isinstance(value, list):
- buf.append("[")
- for idx, item in enumerate(value):
- if idx:
- buf.append(", ")
- _dump(item)
- buf.append("]")
- else:
- _dump(value)
- buf.append(")")
-
- buf = []
- _dump(self)
- return "".join(buf)
-
-
-class Stmt(Node):
- """Base node for all statements."""
-
- abstract = True
-
-
-class Helper(Node):
- """Nodes that exist in a specific context only."""
-
- abstract = True
-
-
-class Template(Node):
- """Node that represents a template. This must be the outermost node that
- is passed to the compiler.
- """
-
- fields = ("body",)
-
-
-class Output(Stmt):
- """A node that holds multiple expressions which are then printed out.
- This is used both for the `print` statement and the regular template data.
- """
-
- fields = ("nodes",)
-
-
-class Extends(Stmt):
- """Represents an extends statement."""
-
- fields = ("template",)
-
-
-class For(Stmt):
- """The for loop. `target` is the target for the iteration (usually a
- :class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
- of nodes that are used as loop-body, and `else_` a list of nodes for the
- `else` block. If no else node exists it has to be an empty list.
-
- For filtered nodes an expression can be stored as `test`, otherwise `None`.
- """
-
- fields = ("target", "iter", "body", "else_", "test", "recursive")
-
-
-class If(Stmt):
- """If `test` is true, `body` is rendered, else `else_`."""
-
- fields = ("test", "body", "elif_", "else_")
-
-
-class Macro(Stmt):
- """A macro definition. `name` is the name of the macro, `args` a list of
- arguments and `defaults` a list of defaults if there are any. `body` is
- a list of nodes for the macro body.
- """
-
- fields = ("name", "args", "defaults", "body")
-
-
-class CallBlock(Stmt):
- """Like a macro without a name but a call instead. `call` is called with
- the unnamed macro as `caller` argument this node holds.
- """
-
- fields = ("call", "args", "defaults", "body")
-
-
-class FilterBlock(Stmt):
- """Node for filter sections."""
-
- fields = ("body", "filter")
-
-
-class With(Stmt):
- """Specific node for with statements. In older versions of Jinja the
- with statement was implemented on the base of the `Scope` node instead.
-
- .. versionadded:: 2.9.3
- """
-
- fields = ("targets", "values", "body")
-
-
-class Block(Stmt):
- """A node that represents a block."""
-
- fields = ("name", "body", "scoped")
-
-
-class Include(Stmt):
- """A node that represents the include tag."""
-
- fields = ("template", "with_context", "ignore_missing")
-
-
-class Import(Stmt):
- """A node that represents the import tag."""
-
- fields = ("template", "target", "with_context")
-
-
-class FromImport(Stmt):
- """A node that represents the from import tag. It's important to not
- pass unsafe names to the name attribute. The compiler translates the
- attribute lookups directly into getattr calls and does *not* use the
- subscript callback of the interface. As exported variables may not
- start with double underscores (which the parser asserts) this is not a
- problem for regular Jinja code, but if this node is used in an extension
- extra care must be taken.
-
- The list of names may contain tuples if aliases are wanted.
- """
-
- fields = ("template", "names", "with_context")
-
-
-class ExprStmt(Stmt):
- """A statement that evaluates an expression and discards the result."""
-
- fields = ("node",)
-
-
-class Assign(Stmt):
- """Assigns an expression to a target."""
-
- fields = ("target", "node")
-
-
-class AssignBlock(Stmt):
- """Assigns a block to a target."""
-
- fields = ("target", "filter", "body")
-
-
-class Expr(Node):
- """Baseclass for all expressions."""
-
- abstract = True
-
- def as_const(self, eval_ctx=None):
- """Return the value of the expression as constant or raise
- :exc:`Impossible` if this was not possible.
-
- An :class:`EvalContext` can be provided, if none is given
- a default context is created which requires the nodes to have
- an attached environment.
-
- .. versionchanged:: 2.4
- the `eval_ctx` parameter was added.
- """
- raise Impossible()
-
- def can_assign(self):
- """Check if it's possible to assign something to this node."""
- return False
-
-
-class BinExpr(Expr):
- """Baseclass for all binary expressions."""
-
- fields = ("left", "right")
- operator = None
- abstract = True
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- # intercepted operators cannot be folded at compile time
- if (
- self.environment.sandboxed
- and self.operator in self.environment.intercepted_binops
- ):
- raise Impossible()
- f = _binop_to_func[self.operator]
- try:
- return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
-
-class UnaryExpr(Expr):
- """Baseclass for all unary expressions."""
-
- fields = ("node",)
- operator = None
- abstract = True
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- # intercepted operators cannot be folded at compile time
- if (
- self.environment.sandboxed
- and self.operator in self.environment.intercepted_unops
- ):
- raise Impossible()
- f = _uaop_to_func[self.operator]
- try:
- return f(self.node.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
-
-class Name(Expr):
- """Looks up a name or stores a value in a name.
- The `ctx` of the node can be one of the following values:
-
- - `store`: store a value in the name
- - `load`: load that name
- - `param`: like `store` but if the name was defined as function parameter.
- """
-
- fields = ("name", "ctx")
-
- def can_assign(self):
- return self.name not in ("true", "false", "none", "True", "False", "None")
-
-
-class NSRef(Expr):
- """Reference to a namespace value assignment"""
-
- fields = ("name", "attr")
-
- def can_assign(self):
- # We don't need any special checks here; NSRef assignments have a
- # runtime check to ensure the target is a namespace object which will
- # have been checked already as it is created using a normal assignment
- # which goes through a `Name` node.
- return True
-
-
-class Literal(Expr):
- """Baseclass for literals."""
-
- abstract = True
-
-
-class Const(Literal):
- """All constant values. The parser will return this node for simple
- constants such as ``42`` or ``"foo"`` but it can be used to store more
- complex values such as lists too. Only constants with a safe
- representation (objects where ``eval(repr(x)) == x`` is true).
- """
-
- fields = ("value",)
-
- def as_const(self, eval_ctx=None):
- return self.value
-
- @classmethod
- def from_untrusted(cls, value, lineno=None, environment=None):
- """Return a const object if the value is representable as
- constant value in the generated code, otherwise it will raise
- an `Impossible` exception.
- """
- from .compiler import has_safe_repr
-
- if not has_safe_repr(value):
- raise Impossible()
- return cls(value, lineno=lineno, environment=environment)
-
-
-class TemplateData(Literal):
- """A constant template string."""
-
- fields = ("data",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if eval_ctx.volatile:
- raise Impossible()
- if eval_ctx.autoescape:
- return Markup(self.data)
- return self.data
-
-
-class Tuple(Literal):
- """For loop unpacking and some other things like multiple arguments
- for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
- is used for loading the names or storing.
- """
-
- fields = ("items", "ctx")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return tuple(x.as_const(eval_ctx) for x in self.items)
-
- def can_assign(self):
- for item in self.items:
- if not item.can_assign():
- return False
- return True
-
-
-class List(Literal):
- """Any list literal such as ``[1, 2, 3]``"""
-
- fields = ("items",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return [x.as_const(eval_ctx) for x in self.items]
-
-
-class Dict(Literal):
- """Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
- :class:`Pair` nodes.
- """
-
- fields = ("items",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return dict(x.as_const(eval_ctx) for x in self.items)
-
-
-class Pair(Helper):
- """A key, value pair for dicts."""
-
- fields = ("key", "value")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
-
-
-class Keyword(Helper):
- """A key, value pair for keyword arguments where key is a string."""
-
- fields = ("key", "value")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.key, self.value.as_const(eval_ctx)
-
-
-class CondExpr(Expr):
- """A conditional expression (inline if expression). (``{{
- foo if bar else baz }}``)
- """
-
- fields = ("test", "expr1", "expr2")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if self.test.as_const(eval_ctx):
- return self.expr1.as_const(eval_ctx)
-
- # if we evaluate to an undefined object, we better do that at runtime
- if self.expr2 is None:
- raise Impossible()
-
- return self.expr2.as_const(eval_ctx)
-
-
-def args_as_const(node, eval_ctx):
- args = [x.as_const(eval_ctx) for x in node.args]
- kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
-
- if node.dyn_args is not None:
- try:
- args.extend(node.dyn_args.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
- if node.dyn_kwargs is not None:
- try:
- kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
- except Exception:
- raise Impossible()
-
- return args, kwargs
-
-
-class Filter(Expr):
- """This node applies a filter on an expression. `name` is the name of
- the filter, the rest of the fields are the same as for :class:`Call`.
-
- If the `node` of a filter is `None` the contents of the last buffer are
- filtered. Buffers are created by macros and filter blocks.
- """
-
- fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
-
- if eval_ctx.volatile or self.node is None:
- raise Impossible()
-
- filter_ = self.environment.filters.get(self.name)
-
- if filter_ is None or getattr(filter_, "contextfilter", False) is True:
- raise Impossible()
-
- # We cannot constant handle async filters, so we need to make sure
- # to not go down this path.
- if eval_ctx.environment.is_async and getattr(
- filter_, "asyncfiltervariant", False
- ):
- raise Impossible()
-
- args, kwargs = args_as_const(self, eval_ctx)
- args.insert(0, self.node.as_const(eval_ctx))
-
- if getattr(filter_, "evalcontextfilter", False) is True:
- args.insert(0, eval_ctx)
- elif getattr(filter_, "environmentfilter", False) is True:
- args.insert(0, self.environment)
-
- try:
- return filter_(*args, **kwargs)
- except Exception:
- raise Impossible()
-
-
-class Test(Expr):
- """Applies a test on an expression. `name` is the name of the test, the
- rest of the fields are the same as for :class:`Call`.
- """
-
- fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
-
- def as_const(self, eval_ctx=None):
- test = self.environment.tests.get(self.name)
-
- if test is None:
- raise Impossible()
-
- eval_ctx = get_eval_context(self, eval_ctx)
- args, kwargs = args_as_const(self, eval_ctx)
- args.insert(0, self.node.as_const(eval_ctx))
-
- try:
- return test(*args, **kwargs)
- except Exception:
- raise Impossible()
-
-
-class Call(Expr):
- """Calls an expression. `args` is a list of arguments, `kwargs` a list
- of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
- and `dyn_kwargs` has to be either `None` or a node that is used as
- node for dynamic positional (``*args``) or keyword (``**kwargs``)
- arguments.
- """
-
- fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
-
-
-class Getitem(Expr):
- """Get an attribute or item from an expression and prefer the item."""
-
- fields = ("node", "arg", "ctx")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if self.ctx != "load":
- raise Impossible()
- try:
- return self.environment.getitem(
- self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
- )
- except Exception:
- raise Impossible()
-
- def can_assign(self):
- return False
-
-
-class Getattr(Expr):
- """Get an attribute or item from an expression that is a ascii-only
- bytestring and prefer the attribute.
- """
-
- fields = ("node", "attr", "ctx")
-
- def as_const(self, eval_ctx=None):
- if self.ctx != "load":
- raise Impossible()
- try:
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.environment.getattr(self.node.as_const(eval_ctx), self.attr)
- except Exception:
- raise Impossible()
-
- def can_assign(self):
- return False
-
-
-class Slice(Expr):
- """Represents a slice object. This must only be used as argument for
- :class:`Subscript`.
- """
-
- fields = ("start", "stop", "step")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
-
- def const(obj):
- if obj is None:
- return None
- return obj.as_const(eval_ctx)
-
- return slice(const(self.start), const(self.stop), const(self.step))
-
-
-class Concat(Expr):
- """Concatenates the list of expressions provided after converting
- them to strings.
- """
-
- fields = ("nodes",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return "".join(str(x.as_const(eval_ctx)) for x in self.nodes)
-
-
-class Compare(Expr):
- """Compares an expression with some other expressions. `ops` must be a
- list of :class:`Operand`\\s.
- """
-
- fields = ("expr", "ops")
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- result = value = self.expr.as_const(eval_ctx)
-
- try:
- for op in self.ops:
- new_value = op.expr.as_const(eval_ctx)
- result = _cmpop_to_func[op.op](value, new_value)
-
- if not result:
- return False
-
- value = new_value
- except Exception:
- raise Impossible()
-
- return result
-
-
-class Operand(Helper):
- """Holds an operator and an expression."""
-
- fields = ("op", "expr")
-
-
-class Mul(BinExpr):
- """Multiplies the left with the right node."""
-
- operator = "*"
-
-
-class Div(BinExpr):
- """Divides the left by the right node."""
-
- operator = "/"
-
-
-class FloorDiv(BinExpr):
- """Divides the left by the right node and truncates conver the
- result into an integer by truncating.
- """
-
- operator = "//"
-
-
-class Add(BinExpr):
- """Add the left to the right node."""
-
- operator = "+"
-
-
-class Sub(BinExpr):
- """Subtract the right from the left node."""
-
- operator = "-"
-
-
-class Mod(BinExpr):
- """Left modulo right."""
-
- operator = "%"
-
-
-class Pow(BinExpr):
- """Left to the power of right."""
-
- operator = "**"
-
-
-class And(BinExpr):
- """Short circuited AND."""
-
- operator = "and"
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
-
-
-class Or(BinExpr):
- """Short circuited OR."""
-
- operator = "or"
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
-
-
-class Not(UnaryExpr):
- """Negate the expression."""
-
- operator = "not"
-
-
-class Neg(UnaryExpr):
- """Make the expression negative."""
-
- operator = "-"
-
-
-class Pos(UnaryExpr):
- """Make the expression positive (noop for most expressions)"""
-
- operator = "+"
-
-
-# Helpers for extensions
-
-
-class EnvironmentAttribute(Expr):
- """Loads an attribute from the environment object. This is useful for
- extensions that want to call a callback stored on the environment.
- """
-
- fields = ("name",)
-
-
-class ExtensionAttribute(Expr):
- """Returns the attribute of an extension bound to the environment.
- The identifier is the identifier of the :class:`Extension`.
-
- This node is usually constructed by calling the
- :meth:`~jinja2.ext.Extension.attr` method on an extension.
- """
-
- fields = ("identifier", "name")
-
-
-class ImportedName(Expr):
- """If created with an import name the import name is returned on node
- access. For example ``ImportedName('cgi.escape')`` returns the `escape`
- function from the cgi module on evaluation. Imports are optimized by the
- compiler so there is no need to assign them to local variables.
- """
-
- fields = ("importname",)
-
-
-class InternalName(Expr):
- """An internal name in the compiler. You cannot create these nodes
- yourself but the parser provides a
- :meth:`~jinja2.parser.Parser.free_identifier` method that creates
- a new identifier for you. This identifier is not available from the
- template and is not threated specially by the compiler.
- """
-
- fields = ("name",)
-
- def __init__(self):
- raise TypeError(
- "Can't create internal names. Use the "
- "`free_identifier` method on a parser."
- )
-
-
-class MarkSafe(Expr):
- """Mark the wrapped expression as safe (wrap it as `Markup`)."""
-
- fields = ("expr",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- return Markup(self.expr.as_const(eval_ctx))
-
-
-class MarkSafeIfAutoescape(Expr):
- """Mark the wrapped expression as safe (wrap it as `Markup`) but
- only if autoescaping is active.
-
- .. versionadded:: 2.5
- """
-
- fields = ("expr",)
-
- def as_const(self, eval_ctx=None):
- eval_ctx = get_eval_context(self, eval_ctx)
- if eval_ctx.volatile:
- raise Impossible()
- expr = self.expr.as_const(eval_ctx)
- if eval_ctx.autoescape:
- return Markup(expr)
- return expr
-
-
-class ContextReference(Expr):
- """Returns the current template context. It can be used like a
- :class:`Name` node, with a ``'load'`` ctx and will return the
- current :class:`~jinja2.runtime.Context` object.
-
- Here an example that assigns the current template name to a
- variable named `foo`::
-
- Assign(Name('foo', ctx='store'),
- Getattr(ContextReference(), 'name'))
-
- This is basically equivalent to using the
- :func:`~jinja2.contextfunction` decorator when using the
- high-level API, which causes a reference to the context to be passed
- as the first argument to a function.
- """
-
-
-class DerivedContextReference(Expr):
- """Return the current template context including locals. Behaves
- exactly like :class:`ContextReference`, but includes local
- variables, such as from a ``for`` loop.
-
- .. versionadded:: 2.11
- """
-
-
-class Continue(Stmt):
- """Continue a loop."""
-
-
-class Break(Stmt):
- """Break a loop."""
-
-
-class Scope(Stmt):
- """An artificial scope."""
-
- fields = ("body",)
-
-
-class OverlayScope(Stmt):
- """An overlay scope for extensions. This is a largely unoptimized scope
- that however can be used to introduce completely arbitrary variables into
- a sub scope from a dictionary or dictionary like object. The `context`
- field has to evaluate to a dictionary object.
-
- Example usage::
-
- OverlayScope(context=self.call_method('get_context'),
- body=[...])
-
- .. versionadded:: 2.10
- """
-
- fields = ("context", "body")
-
-
-class EvalContextModifier(Stmt):
- """Modifies the eval context. For each option that should be modified,
- a :class:`Keyword` has to be added to the :attr:`options` list.
-
- Example to change the `autoescape` setting::
-
- EvalContextModifier(options=[Keyword('autoescape', Const(True))])
- """
-
- fields = ("options",)
-
-
-class ScopedEvalContextModifier(EvalContextModifier):
- """Modifies the eval context and reverts it later. Works exactly like
- :class:`EvalContextModifier` but will only modify the
- :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
- """
-
- fields = ("body",)
-
-
-# make sure nobody creates custom nodes
-def _failing_new(*args, **kwargs):
- raise TypeError("can't create custom node types")
-
-
-NodeType.__new__ = staticmethod(_failing_new)
-del _failing_new
diff --git a/src/jinja2/optimizer.py b/src/jinja2/optimizer.py
deleted file mode 100644
index 39d059f1..00000000
--- a/src/jinja2/optimizer.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""The optimizer tries to constant fold expressions and modify the AST
-in place so that it should be faster to evaluate.
-
-Because the AST does not contain all the scoping information and the
-compiler has to find that out, we cannot do all the optimizations we
-want. For example, loop unrolling doesn't work because unrolled loops
-would have a different scope. The solution would be a second syntax tree
-that stored the scoping rules.
-"""
-from . import nodes
-from .visitor import NodeTransformer
-
-
-def optimize(node, environment):
- """The context hint can be used to perform an static optimization
- based on the context given."""
- optimizer = Optimizer(environment)
- return optimizer.visit(node)
-
-
-class Optimizer(NodeTransformer):
- def __init__(self, environment):
- self.environment = environment
-
- def generic_visit(self, node, *args, **kwargs):
- node = super().generic_visit(node, *args, **kwargs)
-
- # Do constant folding. Some other nodes besides Expr have
- # as_const, but folding them causes errors later on.
- if isinstance(node, nodes.Expr):
- try:
- return nodes.Const.from_untrusted(
- node.as_const(args[0] if args else None),
- lineno=node.lineno,
- environment=self.environment,
- )
- except nodes.Impossible:
- pass
-
- return node
diff --git a/src/jinja2/parser.py b/src/jinja2/parser.py
deleted file mode 100644
index eedea7a0..00000000
--- a/src/jinja2/parser.py
+++ /dev/null
@@ -1,934 +0,0 @@
-"""Parse tokens from the lexer into nodes for the compiler."""
-from . import nodes
-from .exceptions import TemplateAssertionError
-from .exceptions import TemplateSyntaxError
-from .lexer import describe_token
-from .lexer import describe_token_expr
-
-_statement_keywords = frozenset(
- [
- "for",
- "if",
- "block",
- "extends",
- "print",
- "macro",
- "include",
- "from",
- "import",
- "set",
- "with",
- "autoescape",
- ]
-)
-_compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"])
-
-_math_nodes = {
- "add": nodes.Add,
- "sub": nodes.Sub,
- "mul": nodes.Mul,
- "div": nodes.Div,
- "floordiv": nodes.FloorDiv,
- "mod": nodes.Mod,
-}
-
-
-class Parser:
- """This is the central parsing class Jinja uses. It's passed to
- extensions and can be used to parse expressions or statements.
- """
-
- def __init__(self, environment, source, name=None, filename=None, state=None):
- self.environment = environment
- self.stream = environment._tokenize(source, name, filename, state)
- self.name = name
- self.filename = filename
- self.closed = False
- self.extensions = {}
- for extension in environment.iter_extensions():
- for tag in extension.tags:
- self.extensions[tag] = extension.parse
- self._last_identifier = 0
- self._tag_stack = []
- self._end_token_stack = []
-
- def fail(self, msg, lineno=None, exc=TemplateSyntaxError):
- """Convenience method that raises `exc` with the message, passed
- line number or last line number as well as the current name and
- filename.
- """
- if lineno is None:
- lineno = self.stream.current.lineno
- raise exc(msg, lineno, self.name, self.filename)
-
- def _fail_ut_eof(self, name, end_token_stack, lineno):
- expected = []
- for exprs in end_token_stack:
- expected.extend(map(describe_token_expr, exprs))
- if end_token_stack:
- currently_looking = " or ".join(
- map(repr, map(describe_token_expr, end_token_stack[-1]))
- )
- else:
- currently_looking = None
-
- if name is None:
- message = ["Unexpected end of template."]
- else:
- message = [f"Encountered unknown tag {name!r}."]
-
- if currently_looking:
- if name is not None and name in expected:
- message.append(
- "You probably made a nesting mistake. Jinja is expecting this tag,"
- f" but currently looking for {currently_looking}."
- )
- else:
- message.append(
- f"Jinja was looking for the following tags: {currently_looking}."
- )
-
- if self._tag_stack:
- message.append(
- "The innermost block that needs to be closed is"
- f" {self._tag_stack[-1]!r}."
- )
-
- self.fail(" ".join(message), lineno)
-
- def fail_unknown_tag(self, name, lineno=None):
- """Called if the parser encounters an unknown tag. Tries to fail
- with a human readable error message that could help to identify
- the problem.
- """
- return self._fail_ut_eof(name, self._end_token_stack, lineno)
-
- def fail_eof(self, end_tokens=None, lineno=None):
- """Like fail_unknown_tag but for end of template situations."""
- stack = list(self._end_token_stack)
- if end_tokens is not None:
- stack.append(end_tokens)
- return self._fail_ut_eof(None, stack, lineno)
-
- def is_tuple_end(self, extra_end_rules=None):
- """Are we at the end of a tuple?"""
- if self.stream.current.type in ("variable_end", "block_end", "rparen"):
- return True
- elif extra_end_rules is not None:
- return self.stream.current.test_any(extra_end_rules)
- return False
-
- def free_identifier(self, lineno=None):
- """Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
- self._last_identifier += 1
- rv = object.__new__(nodes.InternalName)
- nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno)
- return rv
-
- def parse_statement(self):
- """Parse a single statement."""
- token = self.stream.current
- if token.type != "name":
- self.fail("tag name expected", token.lineno)
- self._tag_stack.append(token.value)
- pop_tag = True
- try:
- if token.value in _statement_keywords:
- return getattr(self, "parse_" + self.stream.current.value)()
- if token.value == "call":
- return self.parse_call_block()
- if token.value == "filter":
- return self.parse_filter_block()
- ext = self.extensions.get(token.value)
- if ext is not None:
- return ext(self)
-
- # did not work out, remove the token we pushed by accident
- # from the stack so that the unknown tag fail function can
- # produce a proper error message.
- self._tag_stack.pop()
- pop_tag = False
- self.fail_unknown_tag(token.value, token.lineno)
- finally:
- if pop_tag:
- self._tag_stack.pop()
-
- def parse_statements(self, end_tokens, drop_needle=False):
- """Parse multiple statements into a list until one of the end tokens
- is reached. This is used to parse the body of statements as it also
- parses template data if appropriate. The parser checks first if the
- current token is a colon and skips it if there is one. Then it checks
- for the block end and parses until if one of the `end_tokens` is
- reached. Per default the active token in the stream at the end of
- the call is the matched end token. If this is not wanted `drop_needle`
- can be set to `True` and the end token is removed.
- """
- # the first token may be a colon for python compatibility
- self.stream.skip_if("colon")
-
- # in the future it would be possible to add whole code sections
- # by adding some sort of end of statement token and parsing those here.
- self.stream.expect("block_end")
- result = self.subparse(end_tokens)
-
- # we reached the end of the template too early, the subparser
- # does not check for this, so we do that now
- if self.stream.current.type == "eof":
- self.fail_eof(end_tokens)
-
- if drop_needle:
- next(self.stream)
- return result
-
- def parse_set(self):
- """Parse an assign statement."""
- lineno = next(self.stream).lineno
- target = self.parse_assign_target(with_namespace=True)
- if self.stream.skip_if("assign"):
- expr = self.parse_tuple()
- return nodes.Assign(target, expr, lineno=lineno)
- filter_node = self.parse_filter(None)
- body = self.parse_statements(("name:endset",), drop_needle=True)
- return nodes.AssignBlock(target, filter_node, body, lineno=lineno)
-
- def parse_for(self):
- """Parse a for loop."""
- lineno = self.stream.expect("name:for").lineno
- target = self.parse_assign_target(extra_end_rules=("name:in",))
- self.stream.expect("name:in")
- iter = self.parse_tuple(
- with_condexpr=False, extra_end_rules=("name:recursive",)
- )
- test = None
- if self.stream.skip_if("name:if"):
- test = self.parse_expression()
- recursive = self.stream.skip_if("name:recursive")
- body = self.parse_statements(("name:endfor", "name:else"))
- if next(self.stream).value == "endfor":
- else_ = []
- else:
- else_ = self.parse_statements(("name:endfor",), drop_needle=True)
- return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno)
-
- def parse_if(self):
- """Parse an if construct."""
- node = result = nodes.If(lineno=self.stream.expect("name:if").lineno)
- while 1:
- node.test = self.parse_tuple(with_condexpr=False)
- node.body = self.parse_statements(("name:elif", "name:else", "name:endif"))
- node.elif_ = []
- node.else_ = []
- token = next(self.stream)
- if token.test("name:elif"):
- node = nodes.If(lineno=self.stream.current.lineno)
- result.elif_.append(node)
- continue
- elif token.test("name:else"):
- result.else_ = self.parse_statements(("name:endif",), drop_needle=True)
- break
- return result
-
- def parse_with(self):
- node = nodes.With(lineno=next(self.stream).lineno)
- targets = []
- values = []
- while self.stream.current.type != "block_end":
- if targets:
- self.stream.expect("comma")
- target = self.parse_assign_target()
- target.set_ctx("param")
- targets.append(target)
- self.stream.expect("assign")
- values.append(self.parse_expression())
- node.targets = targets
- node.values = values
- node.body = self.parse_statements(("name:endwith",), drop_needle=True)
- return node
-
- def parse_autoescape(self):
- node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno)
- node.options = [nodes.Keyword("autoescape", self.parse_expression())]
- node.body = self.parse_statements(("name:endautoescape",), drop_needle=True)
- return nodes.Scope([node])
-
- def parse_block(self):
- node = nodes.Block(lineno=next(self.stream).lineno)
- node.name = self.stream.expect("name").value
- node.scoped = self.stream.skip_if("name:scoped")
-
- # common problem people encounter when switching from django
- # to jinja. we do not support hyphens in block names, so let's
- # raise a nicer error message in that case.
- if self.stream.current.type == "sub":
- self.fail(
- "Block names in Jinja have to be valid Python identifiers and may not"
- " contain hyphens, use an underscore instead."
- )
-
- node.body = self.parse_statements(("name:endblock",), drop_needle=True)
- self.stream.skip_if("name:" + node.name)
- return node
-
- def parse_extends(self):
- node = nodes.Extends(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
- return node
-
- def parse_import_context(self, node, default):
- if self.stream.current.test_any(
- "name:with", "name:without"
- ) and self.stream.look().test("name:context"):
- node.with_context = next(self.stream).value == "with"
- self.stream.skip()
- else:
- node.with_context = default
- return node
-
- def parse_include(self):
- node = nodes.Include(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
- if self.stream.current.test("name:ignore") and self.stream.look().test(
- "name:missing"
- ):
- node.ignore_missing = True
- self.stream.skip(2)
- else:
- node.ignore_missing = False
- return self.parse_import_context(node, True)
-
- def parse_import(self):
- node = nodes.Import(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
- self.stream.expect("name:as")
- node.target = self.parse_assign_target(name_only=True).name
- return self.parse_import_context(node, False)
-
- def parse_from(self):
- node = nodes.FromImport(lineno=next(self.stream).lineno)
- node.template = self.parse_expression()
- self.stream.expect("name:import")
- node.names = []
-
- def parse_context():
- if self.stream.current.value in (
- "with",
- "without",
- ) and self.stream.look().test("name:context"):
- node.with_context = next(self.stream).value == "with"
- self.stream.skip()
- return True
- return False
-
- while 1:
- if node.names:
- self.stream.expect("comma")
- if self.stream.current.type == "name":
- if parse_context():
- break
- target = self.parse_assign_target(name_only=True)
- if target.name.startswith("_"):
- self.fail(
- "names starting with an underline can not be imported",
- target.lineno,
- exc=TemplateAssertionError,
- )
- if self.stream.skip_if("name:as"):
- alias = self.parse_assign_target(name_only=True)
- node.names.append((target.name, alias.name))
- else:
- node.names.append(target.name)
- if parse_context() or self.stream.current.type != "comma":
- break
- else:
- self.stream.expect("name")
- if not hasattr(node, "with_context"):
- node.with_context = False
- return node
-
- def parse_signature(self, node):
- node.args = args = []
- node.defaults = defaults = []
- self.stream.expect("lparen")
- while self.stream.current.type != "rparen":
- if args:
- self.stream.expect("comma")
- arg = self.parse_assign_target(name_only=True)
- arg.set_ctx("param")
- if self.stream.skip_if("assign"):
- defaults.append(self.parse_expression())
- elif defaults:
- self.fail("non-default argument follows default argument")
- args.append(arg)
- self.stream.expect("rparen")
-
- def parse_call_block(self):
- node = nodes.CallBlock(lineno=next(self.stream).lineno)
- if self.stream.current.type == "lparen":
- self.parse_signature(node)
- else:
- node.args = []
- node.defaults = []
-
- node.call = self.parse_expression()
- if not isinstance(node.call, nodes.Call):
- self.fail("expected call", node.lineno)
- node.body = self.parse_statements(("name:endcall",), drop_needle=True)
- return node
-
- def parse_filter_block(self):
- node = nodes.FilterBlock(lineno=next(self.stream).lineno)
- node.filter = self.parse_filter(None, start_inline=True)
- node.body = self.parse_statements(("name:endfilter",), drop_needle=True)
- return node
-
- def parse_macro(self):
- node = nodes.Macro(lineno=next(self.stream).lineno)
- node.name = self.parse_assign_target(name_only=True).name
- self.parse_signature(node)
- node.body = self.parse_statements(("name:endmacro",), drop_needle=True)
- return node
-
- def parse_print(self):
- node = nodes.Output(lineno=next(self.stream).lineno)
- node.nodes = []
- while self.stream.current.type != "block_end":
- if node.nodes:
- self.stream.expect("comma")
- node.nodes.append(self.parse_expression())
- return node
-
- def parse_assign_target(
- self,
- with_tuple=True,
- name_only=False,
- extra_end_rules=None,
- with_namespace=False,
- ):
- """Parse an assignment target. As Jinja allows assignments to
- tuples, this function can parse all allowed assignment targets. Per
- default assignments to tuples are parsed, that can be disable however
- by setting `with_tuple` to `False`. If only assignments to names are
- wanted `name_only` can be set to `True`. The `extra_end_rules`
- parameter is forwarded to the tuple parsing function. If
- `with_namespace` is enabled, a namespace assignment may be parsed.
- """
- if with_namespace and self.stream.look().type == "dot":
- token = self.stream.expect("name")
- next(self.stream) # dot
- attr = self.stream.expect("name")
- target = nodes.NSRef(token.value, attr.value, lineno=token.lineno)
- elif name_only:
- token = self.stream.expect("name")
- target = nodes.Name(token.value, "store", lineno=token.lineno)
- else:
- if with_tuple:
- target = self.parse_tuple(
- simplified=True, extra_end_rules=extra_end_rules
- )
- else:
- target = self.parse_primary()
- target.set_ctx("store")
- if not target.can_assign():
- self.fail(
- f"can't assign to {target.__class__.__name__.lower()!r}", target.lineno
- )
- return target
-
- def parse_expression(self, with_condexpr=True):
- """Parse an expression. Per default all expressions are parsed, if
- the optional `with_condexpr` parameter is set to `False` conditional
- expressions are not parsed.
- """
- if with_condexpr:
- return self.parse_condexpr()
- return self.parse_or()
-
- def parse_condexpr(self):
- lineno = self.stream.current.lineno
- expr1 = self.parse_or()
- while self.stream.skip_if("name:if"):
- expr2 = self.parse_or()
- if self.stream.skip_if("name:else"):
- expr3 = self.parse_condexpr()
- else:
- expr3 = None
- expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno)
- lineno = self.stream.current.lineno
- return expr1
-
- def parse_or(self):
- lineno = self.stream.current.lineno
- left = self.parse_and()
- while self.stream.skip_if("name:or"):
- right = self.parse_and()
- left = nodes.Or(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_and(self):
- lineno = self.stream.current.lineno
- left = self.parse_not()
- while self.stream.skip_if("name:and"):
- right = self.parse_not()
- left = nodes.And(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_not(self):
- if self.stream.current.test("name:not"):
- lineno = next(self.stream).lineno
- return nodes.Not(self.parse_not(), lineno=lineno)
- return self.parse_compare()
-
- def parse_compare(self):
- lineno = self.stream.current.lineno
- expr = self.parse_math1()
- ops = []
- while 1:
- token_type = self.stream.current.type
- if token_type in _compare_operators:
- next(self.stream)
- ops.append(nodes.Operand(token_type, self.parse_math1()))
- elif self.stream.skip_if("name:in"):
- ops.append(nodes.Operand("in", self.parse_math1()))
- elif self.stream.current.test("name:not") and self.stream.look().test(
- "name:in"
- ):
- self.stream.skip(2)
- ops.append(nodes.Operand("notin", self.parse_math1()))
- else:
- break
- lineno = self.stream.current.lineno
- if not ops:
- return expr
- return nodes.Compare(expr, ops, lineno=lineno)
-
- def parse_math1(self):
- lineno = self.stream.current.lineno
- left = self.parse_concat()
- while self.stream.current.type in ("add", "sub"):
- cls = _math_nodes[self.stream.current.type]
- next(self.stream)
- right = self.parse_concat()
- left = cls(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_concat(self):
- lineno = self.stream.current.lineno
- args = [self.parse_math2()]
- while self.stream.current.type == "tilde":
- next(self.stream)
- args.append(self.parse_math2())
- if len(args) == 1:
- return args[0]
- return nodes.Concat(args, lineno=lineno)
-
- def parse_math2(self):
- lineno = self.stream.current.lineno
- left = self.parse_pow()
- while self.stream.current.type in ("mul", "div", "floordiv", "mod"):
- cls = _math_nodes[self.stream.current.type]
- next(self.stream)
- right = self.parse_pow()
- left = cls(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_pow(self):
- lineno = self.stream.current.lineno
- left = self.parse_unary()
- while self.stream.current.type == "pow":
- next(self.stream)
- right = self.parse_unary()
- left = nodes.Pow(left, right, lineno=lineno)
- lineno = self.stream.current.lineno
- return left
-
- def parse_unary(self, with_filter=True):
- token_type = self.stream.current.type
- lineno = self.stream.current.lineno
- if token_type == "sub":
- next(self.stream)
- node = nodes.Neg(self.parse_unary(False), lineno=lineno)
- elif token_type == "add":
- next(self.stream)
- node = nodes.Pos(self.parse_unary(False), lineno=lineno)
- else:
- node = self.parse_primary()
- node = self.parse_postfix(node)
- if with_filter:
- node = self.parse_filter_expr(node)
- return node
-
- def parse_primary(self):
- token = self.stream.current
- if token.type == "name":
- if token.value in ("true", "false", "True", "False"):
- node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno)
- elif token.value in ("none", "None"):
- node = nodes.Const(None, lineno=token.lineno)
- else:
- node = nodes.Name(token.value, "load", lineno=token.lineno)
- next(self.stream)
- elif token.type == "string":
- next(self.stream)
- buf = [token.value]
- lineno = token.lineno
- while self.stream.current.type == "string":
- buf.append(self.stream.current.value)
- next(self.stream)
- node = nodes.Const("".join(buf), lineno=lineno)
- elif token.type in ("integer", "float"):
- next(self.stream)
- node = nodes.Const(token.value, lineno=token.lineno)
- elif token.type == "lparen":
- next(self.stream)
- node = self.parse_tuple(explicit_parentheses=True)
- self.stream.expect("rparen")
- elif token.type == "lbracket":
- node = self.parse_list()
- elif token.type == "lbrace":
- node = self.parse_dict()
- else:
- self.fail(f"unexpected {describe_token(token)!r}", token.lineno)
- return node
-
- def parse_tuple(
- self,
- simplified=False,
- with_condexpr=True,
- extra_end_rules=None,
- explicit_parentheses=False,
- ):
- """Works like `parse_expression` but if multiple expressions are
- delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
- This method could also return a regular expression instead of a tuple
- if no commas where found.
-
- The default parsing mode is a full tuple. If `simplified` is `True`
- only names and literals are parsed. The `no_condexpr` parameter is
- forwarded to :meth:`parse_expression`.
-
- Because tuples do not require delimiters and may end in a bogus comma
- an extra hint is needed that marks the end of a tuple. For example
- for loops support tuples between `for` and `in`. In that case the
- `extra_end_rules` is set to ``['name:in']``.
-
- `explicit_parentheses` is true if the parsing was triggered by an
- expression in parentheses. This is used to figure out if an empty
- tuple is a valid expression or not.
- """
- lineno = self.stream.current.lineno
- if simplified:
- parse = self.parse_primary
- elif with_condexpr:
- parse = self.parse_expression
- else:
-
- def parse():
- return self.parse_expression(with_condexpr=False)
-
- args = []
- is_tuple = False
- while 1:
- if args:
- self.stream.expect("comma")
- if self.is_tuple_end(extra_end_rules):
- break
- args.append(parse())
- if self.stream.current.type == "comma":
- is_tuple = True
- else:
- break
- lineno = self.stream.current.lineno
-
- if not is_tuple:
- if args:
- return args[0]
-
- # if we don't have explicit parentheses, an empty tuple is
- # not a valid expression. This would mean nothing (literally
- # nothing) in the spot of an expression would be an empty
- # tuple.
- if not explicit_parentheses:
- self.fail(
- "Expected an expression,"
- f" got {describe_token(self.stream.current)!r}"
- )
-
- return nodes.Tuple(args, "load", lineno=lineno)
-
- def parse_list(self):
- token = self.stream.expect("lbracket")
- items = []
- while self.stream.current.type != "rbracket":
- if items:
- self.stream.expect("comma")
- if self.stream.current.type == "rbracket":
- break
- items.append(self.parse_expression())
- self.stream.expect("rbracket")
- return nodes.List(items, lineno=token.lineno)
-
- def parse_dict(self):
- token = self.stream.expect("lbrace")
- items = []
- while self.stream.current.type != "rbrace":
- if items:
- self.stream.expect("comma")
- if self.stream.current.type == "rbrace":
- break
- key = self.parse_expression()
- self.stream.expect("colon")
- value = self.parse_expression()
- items.append(nodes.Pair(key, value, lineno=key.lineno))
- self.stream.expect("rbrace")
- return nodes.Dict(items, lineno=token.lineno)
-
- def parse_postfix(self, node):
- while 1:
- token_type = self.stream.current.type
- if token_type == "dot" or token_type == "lbracket":
- node = self.parse_subscript(node)
- # calls are valid both after postfix expressions (getattr
- # and getitem) as well as filters and tests
- elif token_type == "lparen":
- node = self.parse_call(node)
- else:
- break
- return node
-
- def parse_filter_expr(self, node):
- while 1:
- token_type = self.stream.current.type
- if token_type == "pipe":
- node = self.parse_filter(node)
- elif token_type == "name" and self.stream.current.value == "is":
- node = self.parse_test(node)
- # calls are valid both after postfix expressions (getattr
- # and getitem) as well as filters and tests
- elif token_type == "lparen":
- node = self.parse_call(node)
- else:
- break
- return node
-
- def parse_subscript(self, node):
- token = next(self.stream)
- if token.type == "dot":
- attr_token = self.stream.current
- next(self.stream)
- if attr_token.type == "name":
- return nodes.Getattr(
- node, attr_token.value, "load", lineno=token.lineno
- )
- elif attr_token.type != "integer":
- self.fail("expected name or number", attr_token.lineno)
- arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
- return nodes.Getitem(node, arg, "load", lineno=token.lineno)
- if token.type == "lbracket":
- args = []
- while self.stream.current.type != "rbracket":
- if args:
- self.stream.expect("comma")
- args.append(self.parse_subscribed())
- self.stream.expect("rbracket")
- if len(args) == 1:
- arg = args[0]
- else:
- arg = nodes.Tuple(args, "load", lineno=token.lineno)
- return nodes.Getitem(node, arg, "load", lineno=token.lineno)
- self.fail("expected subscript expression", token.lineno)
-
- def parse_subscribed(self):
- lineno = self.stream.current.lineno
-
- if self.stream.current.type == "colon":
- next(self.stream)
- args = [None]
- else:
- node = self.parse_expression()
- if self.stream.current.type != "colon":
- return node
- next(self.stream)
- args = [node]
-
- if self.stream.current.type == "colon":
- args.append(None)
- elif self.stream.current.type not in ("rbracket", "comma"):
- args.append(self.parse_expression())
- else:
- args.append(None)
-
- if self.stream.current.type == "colon":
- next(self.stream)
- if self.stream.current.type not in ("rbracket", "comma"):
- args.append(self.parse_expression())
- else:
- args.append(None)
- else:
- args.append(None)
-
- return nodes.Slice(lineno=lineno, *args)
-
- def parse_call(self, node):
- token = self.stream.expect("lparen")
- args = []
- kwargs = []
- dyn_args = dyn_kwargs = None
- require_comma = False
-
- def ensure(expr):
- if not expr:
- self.fail("invalid syntax for function call expression", token.lineno)
-
- while self.stream.current.type != "rparen":
- if require_comma:
- self.stream.expect("comma")
- # support for trailing comma
- if self.stream.current.type == "rparen":
- break
- if self.stream.current.type == "mul":
- ensure(dyn_args is None and dyn_kwargs is None)
- next(self.stream)
- dyn_args = self.parse_expression()
- elif self.stream.current.type == "pow":
- ensure(dyn_kwargs is None)
- next(self.stream)
- dyn_kwargs = self.parse_expression()
- else:
- if (
- self.stream.current.type == "name"
- and self.stream.look().type == "assign"
- ):
- # Parsing a kwarg
- ensure(dyn_kwargs is None)
- key = self.stream.current.value
- self.stream.skip(2)
- value = self.parse_expression()
- kwargs.append(nodes.Keyword(key, value, lineno=value.lineno))
- else:
- # Parsing an arg
- ensure(dyn_args is None and dyn_kwargs is None and not kwargs)
- args.append(self.parse_expression())
-
- require_comma = True
- self.stream.expect("rparen")
-
- if node is None:
- return args, kwargs, dyn_args, dyn_kwargs
- return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno)
-
- def parse_filter(self, node, start_inline=False):
- while self.stream.current.type == "pipe" or start_inline:
- if not start_inline:
- next(self.stream)
- token = self.stream.expect("name")
- name = token.value
- while self.stream.current.type == "dot":
- next(self.stream)
- name += "." + self.stream.expect("name").value
- if self.stream.current.type == "lparen":
- args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
- else:
- args = []
- kwargs = []
- dyn_args = dyn_kwargs = None
- node = nodes.Filter(
- node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
- )
- start_inline = False
- return node
-
- def parse_test(self, node):
- token = next(self.stream)
- if self.stream.current.test("name:not"):
- next(self.stream)
- negated = True
- else:
- negated = False
- name = self.stream.expect("name").value
- while self.stream.current.type == "dot":
- next(self.stream)
- name += "." + self.stream.expect("name").value
- dyn_args = dyn_kwargs = None
- kwargs = []
- if self.stream.current.type == "lparen":
- args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
- elif self.stream.current.type in (
- "name",
- "string",
- "integer",
- "float",
- "lparen",
- "lbracket",
- "lbrace",
- ) and not self.stream.current.test_any("name:else", "name:or", "name:and"):
- if self.stream.current.test("name:is"):
- self.fail("You cannot chain multiple tests with is")
- arg_node = self.parse_primary()
- arg_node = self.parse_postfix(arg_node)
- args = [arg_node]
- else:
- args = []
- node = nodes.Test(
- node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno
- )
- if negated:
- node = nodes.Not(node, lineno=token.lineno)
- return node
-
- def subparse(self, end_tokens=None):
- body = []
- data_buffer = []
- add_data = data_buffer.append
-
- if end_tokens is not None:
- self._end_token_stack.append(end_tokens)
-
- def flush_data():
- if data_buffer:
- lineno = data_buffer[0].lineno
- body.append(nodes.Output(data_buffer[:], lineno=lineno))
- del data_buffer[:]
-
- try:
- while self.stream:
- token = self.stream.current
- if token.type == "data":
- if token.value:
- add_data(nodes.TemplateData(token.value, lineno=token.lineno))
- next(self.stream)
- elif token.type == "variable_begin":
- next(self.stream)
- add_data(self.parse_tuple(with_condexpr=True))
- self.stream.expect("variable_end")
- elif token.type == "block_begin":
- flush_data()
- next(self.stream)
- if end_tokens is not None and self.stream.current.test_any(
- *end_tokens
- ):
- return body
- rv = self.parse_statement()
- if isinstance(rv, list):
- body.extend(rv)
- else:
- body.append(rv)
- self.stream.expect("block_end")
- else:
- raise AssertionError("internal parsing error")
-
- flush_data()
- finally:
- if end_tokens is not None:
- self._end_token_stack.pop()
-
- return body
-
- def parse(self):
- """Parse the whole template into a `Template` node."""
- result = nodes.Template(self.subparse(), lineno=1)
- result.set_environment(self.environment)
- return result
diff --git a/src/jinja2/runtime.py b/src/jinja2/runtime.py
deleted file mode 100644
index 7b5925b1..00000000
--- a/src/jinja2/runtime.py
+++ /dev/null
@@ -1,919 +0,0 @@
-"""The runtime functions and state used by compiled templates."""
-import sys
-from collections import abc
-from itertools import chain
-from types import MethodType
-
-from markupsafe import escape # noqa: F401
-from markupsafe import Markup
-from markupsafe import soft_str
-
-from .exceptions import TemplateNotFound # noqa: F401
-from .exceptions import TemplateRuntimeError # noqa: F401
-from .exceptions import UndefinedError
-from .nodes import EvalContext
-from .utils import concat
-from .utils import evalcontextfunction
-from .utils import internalcode
-from .utils import missing
-from .utils import Namespace # noqa: F401
-from .utils import object_type_repr
-
-# these variables are exported to the template runtime
-exported = [
- "LoopContext",
- "TemplateReference",
- "Macro",
- "Markup",
- "TemplateRuntimeError",
- "missing",
- "concat",
- "escape",
- "markup_join",
- "str_join",
- "identity",
- "TemplateNotFound",
- "Namespace",
- "Undefined",
-]
-
-
-def identity(x):
- """Returns its argument. Useful for certain things in the
- environment.
- """
- return x
-
-
-def markup_join(seq):
- """Concatenation that escapes if necessary and converts to string."""
- buf = []
- iterator = map(soft_str, seq)
- for arg in iterator:
- buf.append(arg)
- if hasattr(arg, "__html__"):
- return Markup("").join(chain(buf, iterator))
- return concat(buf)
-
-
-def str_join(seq):
- """Simple args to string conversion and concatenation."""
- return concat(map(str, seq))
-
-
-def unicode_join(seq):
- import warnings
-
- warnings.warn(
- "This template must be recompiled with at least Jinja 3.0, or"
- " it will fail in 3.1.",
- DeprecationWarning,
- stacklevel=2,
- )
- return str_join(seq)
-
-
-def new_context(
- environment,
- template_name,
- blocks,
- vars=None,
- shared=None,
- globals=None,
- locals=None,
-):
- """Internal helper for context creation."""
- if vars is None:
- vars = {}
- if shared:
- parent = vars
- else:
- parent = dict(globals or (), **vars)
- if locals:
- # if the parent is shared a copy should be created because
- # we don't want to modify the dict passed
- if shared:
- parent = dict(parent)
- for key, value in locals.items():
- if value is not missing:
- parent[key] = value
- return environment.context_class(
- environment, parent, template_name, blocks, globals=globals
- )
-
-
-class TemplateReference:
- """The `self` in templates."""
-
- def __init__(self, context):
- self.__context = context
-
- def __getitem__(self, name):
- blocks = self.__context.blocks[name]
- return BlockReference(name, self.__context, blocks, 0)
-
- def __repr__(self):
- return f"<{self.__class__.__name__} {self.__context.name!r}>"
-
-
-def _get_func(x):
- return getattr(x, "__func__", x)
-
-
-class ContextMeta(type):
- def __new__(mcs, name, bases, d):
- rv = type.__new__(mcs, name, bases, d)
- if bases == ():
- return rv
-
- resolve = _get_func(rv.resolve)
- default_resolve = _get_func(Context.resolve)
- resolve_or_missing = _get_func(rv.resolve_or_missing)
- default_resolve_or_missing = _get_func(Context.resolve_or_missing)
-
- # If we have a changed resolve but no changed default or missing
- # resolve we invert the call logic.
- if (
- resolve is not default_resolve
- and resolve_or_missing is default_resolve_or_missing
- ):
- rv._legacy_resolve_mode = True
- elif (
- resolve is default_resolve
- and resolve_or_missing is default_resolve_or_missing
- ):
- rv._fast_resolve_mode = True
-
- return rv
-
-
-def resolve_or_missing(context, key, missing=missing):
- if key in context.vars:
- return context.vars[key]
- if key in context.parent:
- return context.parent[key]
- return missing
-
-
-@abc.Mapping.register
-class Context(metaclass=ContextMeta):
- """The template context holds the variables of a template. It stores the
- values passed to the template and also the names the template exports.
- Creating instances is neither supported nor useful as it's created
- automatically at various stages of the template evaluation and should not
- be created by hand.
-
- The context is immutable. Modifications on :attr:`parent` **must not**
- happen and modifications on :attr:`vars` are allowed from generated
- template code only. Template filters and global functions marked as
- :func:`contextfunction`\\s get the active context passed as first argument
- and are allowed to access the context read-only.
-
- The template context supports read only dict operations (`get`,
- `keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`,
- `__getitem__`, `__contains__`). Additionally there is a :meth:`resolve`
- method that doesn't fail with a `KeyError` but returns an
- :class:`Undefined` object for missing variables.
- """
-
- # XXX: we want to eventually make this be a deprecation warning and
- # remove it.
- _legacy_resolve_mode = False
- _fast_resolve_mode = False
-
- def __init__(self, environment, parent, name, blocks, globals=None):
- self.parent = parent
- self.vars = {}
- self.environment = environment
- self.eval_ctx = EvalContext(self.environment, name)
- self.exported_vars = set()
- self.name = name
- self.globals_keys = set() if globals is None else set(globals)
-
- # create the initial mapping of blocks. Whenever template inheritance
- # takes place the runtime will update this mapping with the new blocks
- # from the template.
- self.blocks = {k: [v] for k, v in blocks.items()}
-
- # In case we detect the fast resolve mode we can set up an alias
- # here that bypasses the legacy code logic.
- if self._fast_resolve_mode:
- self.resolve_or_missing = MethodType(resolve_or_missing, self)
-
- def super(self, name, current):
- """Render a parent block."""
- try:
- blocks = self.blocks[name]
- index = blocks.index(current) + 1
- blocks[index]
- except LookupError:
- return self.environment.undefined(
- f"there is no parent block called {name!r}.", name="super"
- )
- return BlockReference(name, self, blocks, index)
-
- def get(self, key, default=None):
- """Returns an item from the template context, if it doesn't exist
- `default` is returned.
- """
- try:
- return self[key]
- except KeyError:
- return default
-
- def resolve(self, key):
- """Looks up a variable like `__getitem__` or `get` but returns an
- :class:`Undefined` object with the name of the name looked up.
- """
- if self._legacy_resolve_mode:
- rv = resolve_or_missing(self, key)
- else:
- rv = self.resolve_or_missing(key)
- if rv is missing:
- return self.environment.undefined(name=key)
- return rv
-
- def resolve_or_missing(self, key):
- """Resolves a variable like :meth:`resolve` but returns the
- special `missing` value if it cannot be found.
- """
- if self._legacy_resolve_mode:
- rv = self.resolve(key)
- if isinstance(rv, Undefined):
- rv = missing
- return rv
- return resolve_or_missing(self, key)
-
- def get_exported(self):
- """Get a new dict with the exported variables."""
- return {k: self.vars[k] for k in self.exported_vars}
-
- def get_all(self):
- """Return the complete context as dict including the exported
- variables. For optimizations reasons this might not return an
- actual copy so be careful with using it.
- """
- if not self.vars:
- return self.parent
- if not self.parent:
- return self.vars
- return dict(self.parent, **self.vars)
-
- @internalcode
- def call(__self, __obj, *args, **kwargs): # noqa: B902
- """Call the callable with the arguments and keyword arguments
- provided but inject the active context or environment as first
- argument if the callable is a :func:`contextfunction` or
- :func:`environmentfunction`.
- """
- if __debug__:
- __traceback_hide__ = True # noqa
-
- # Allow callable classes to take a context
- if hasattr(__obj, "__call__"): # noqa: B004
- fn = __obj.__call__
- for fn_type in (
- "contextfunction",
- "evalcontextfunction",
- "environmentfunction",
- ):
- if hasattr(fn, fn_type):
- __obj = fn
- break
-
- if callable(__obj):
- if getattr(__obj, "contextfunction", False) is True:
- args = (__self,) + args
- elif getattr(__obj, "evalcontextfunction", False) is True:
- args = (__self.eval_ctx,) + args
- elif getattr(__obj, "environmentfunction", False) is True:
- args = (__self.environment,) + args
- try:
- return __obj(*args, **kwargs)
- except StopIteration:
- return __self.environment.undefined(
- "value was undefined because a callable raised a"
- " StopIteration exception"
- )
-
- def derived(self, locals=None):
- """Internal helper function to create a derived context. This is
- used in situations where the system needs a new context in the same
- template that is independent.
- """
- context = new_context(
- self.environment, self.name, {}, self.get_all(), True, None, locals
- )
- context.eval_ctx = self.eval_ctx
- context.blocks.update((k, list(v)) for k, v in self.blocks.items())
- return context
-
- def _all(meth): # noqa: B902
- def proxy(self):
- return getattr(self.get_all(), meth)()
-
- proxy.__doc__ = getattr(dict, meth).__doc__
- proxy.__name__ = meth
- return proxy
-
- keys = _all("keys")
- values = _all("values")
- items = _all("items")
- del _all
-
- def __contains__(self, name):
- return name in self.vars or name in self.parent
-
- def __getitem__(self, key):
- """Lookup a variable or raise `KeyError` if the variable is
- undefined.
- """
- item = self.resolve_or_missing(key)
- if item is missing:
- raise KeyError(key)
- return item
-
- def __repr__(self):
- return f"<{self.__class__.__name__} {self.get_all()!r} of {self.name!r}>"
-
-
-class BlockReference:
- """One block on a template reference."""
-
- def __init__(self, name, context, stack, depth):
- self.name = name
- self._context = context
- self._stack = stack
- self._depth = depth
-
- @property
- def super(self):
- """Super the block."""
- if self._depth + 1 >= len(self._stack):
- return self._context.environment.undefined(
- f"there is no parent block called {self.name!r}.", name="super"
- )
- return BlockReference(self.name, self._context, self._stack, self._depth + 1)
-
- @internalcode
- def __call__(self):
- rv = concat(self._stack[self._depth](self._context))
- if self._context.eval_ctx.autoescape:
- rv = Markup(rv)
- return rv
-
-
-class LoopContext:
- """A wrapper iterable for dynamic ``for`` loops, with information
- about the loop and iteration.
- """
-
- #: Current iteration of the loop, starting at 0.
- index0 = -1
-
- _length = None
- _after = missing
- _current = missing
- _before = missing
- _last_changed_value = missing
-
- def __init__(self, iterable, undefined, recurse=None, depth0=0):
- """
- :param iterable: Iterable to wrap.
- :param undefined: :class:`Undefined` class to use for next and
- previous items.
- :param recurse: The function to render the loop body when the
- loop is marked recursive.
- :param depth0: Incremented when looping recursively.
- """
- self._iterable = iterable
- self._iterator = self._to_iterator(iterable)
- self._undefined = undefined
- self._recurse = recurse
- #: How many levels deep a recursive loop currently is, starting at 0.
- self.depth0 = depth0
-
- @staticmethod
- def _to_iterator(iterable):
- return iter(iterable)
-
- @property
- def length(self):
- """Length of the iterable.
-
- If the iterable is a generator or otherwise does not have a
- size, it is eagerly evaluated to get a size.
- """
- if self._length is not None:
- return self._length
-
- try:
- self._length = len(self._iterable)
- except TypeError:
- iterable = list(self._iterator)
- self._iterator = self._to_iterator(iterable)
- self._length = len(iterable) + self.index + (self._after is not missing)
-
- return self._length
-
- def __len__(self):
- return self.length
-
- @property
- def depth(self):
- """How many levels deep a recursive loop currently is, starting at 1."""
- return self.depth0 + 1
-
- @property
- def index(self):
- """Current iteration of the loop, starting at 1."""
- return self.index0 + 1
-
- @property
- def revindex0(self):
- """Number of iterations from the end of the loop, ending at 0.
-
- Requires calculating :attr:`length`.
- """
- return self.length - self.index
-
- @property
- def revindex(self):
- """Number of iterations from the end of the loop, ending at 1.
-
- Requires calculating :attr:`length`.
- """
- return self.length - self.index0
-
- @property
- def first(self):
- """Whether this is the first iteration of the loop."""
- return self.index0 == 0
-
- def _peek_next(self):
- """Return the next element in the iterable, or :data:`missing`
- if the iterable is exhausted. Only peeks one item ahead, caching
- the result in :attr:`_last` for use in subsequent checks. The
- cache is reset when :meth:`__next__` is called.
- """
- if self._after is not missing:
- return self._after
-
- self._after = next(self._iterator, missing)
- return self._after
-
- @property
- def last(self):
- """Whether this is the last iteration of the loop.
-
- Causes the iterable to advance early. See
- :func:`itertools.groupby` for issues this can cause.
- The :func:`groupby` filter avoids that issue.
- """
- return self._peek_next() is missing
-
- @property
- def previtem(self):
- """The item in the previous iteration. Undefined during the
- first iteration.
- """
- if self.first:
- return self._undefined("there is no previous item")
-
- return self._before
-
- @property
- def nextitem(self):
- """The item in the next iteration. Undefined during the last
- iteration.
-
- Causes the iterable to advance early. See
- :func:`itertools.groupby` for issues this can cause.
- The :func:`groupby` filter avoids that issue.
- """
- rv = self._peek_next()
-
- if rv is missing:
- return self._undefined("there is no next item")
-
- return rv
-
- def cycle(self, *args):
- """Return a value from the given args, cycling through based on
- the current :attr:`index0`.
-
- :param args: One or more values to cycle through.
- """
- if not args:
- raise TypeError("no items for cycling given")
-
- return args[self.index0 % len(args)]
-
- def changed(self, *value):
- """Return ``True`` if previously called with a different value
- (including when called for the first time).
-
- :param value: One or more values to compare to the last call.
- """
- if self._last_changed_value != value:
- self._last_changed_value = value
- return True
-
- return False
-
- def __iter__(self):
- return self
-
- def __next__(self):
- if self._after is not missing:
- rv = self._after
- self._after = missing
- else:
- rv = next(self._iterator)
-
- self.index0 += 1
- self._before = self._current
- self._current = rv
- return rv, self
-
- @internalcode
- def __call__(self, iterable):
- """When iterating over nested data, render the body of the loop
- recursively with the given inner iterable data.
-
- The loop must have the ``recursive`` marker for this to work.
- """
- if self._recurse is None:
- raise TypeError(
- "The loop must have the 'recursive' marker to be called recursively."
- )
-
- return self._recurse(iterable, self._recurse, depth=self.depth)
-
- def __repr__(self):
- return f"<{self.__class__.__name__} {self.index}/{self.length}>"
-
-
-class Macro:
- """Wraps a macro function."""
-
- def __init__(
- self,
- environment,
- func,
- name,
- arguments,
- catch_kwargs,
- catch_varargs,
- caller,
- default_autoescape=None,
- ):
- self._environment = environment
- self._func = func
- self._argument_count = len(arguments)
- self.name = name
- self.arguments = arguments
- self.catch_kwargs = catch_kwargs
- self.catch_varargs = catch_varargs
- self.caller = caller
- self.explicit_caller = "caller" in arguments
- if default_autoescape is None:
- default_autoescape = environment.autoescape
- self._default_autoescape = default_autoescape
-
- @internalcode
- @evalcontextfunction
- def __call__(self, *args, **kwargs):
- # This requires a bit of explanation, In the past we used to
- # decide largely based on compile-time information if a macro is
- # safe or unsafe. While there was a volatile mode it was largely
- # unused for deciding on escaping. This turns out to be
- # problematic for macros because whether a macro is safe depends not
- # on the escape mode when it was defined, but rather when it was used.
- #
- # Because however we export macros from the module system and
- # there are historic callers that do not pass an eval context (and
- # will continue to not pass one), we need to perform an instance
- # check here.
- #
- # This is considered safe because an eval context is not a valid
- # argument to callables otherwise anyway. Worst case here is
- # that if no eval context is passed we fall back to the compile
- # time autoescape flag.
- if args and isinstance(args[0], EvalContext):
- autoescape = args[0].autoescape
- args = args[1:]
- else:
- autoescape = self._default_autoescape
-
- # try to consume the positional arguments
- arguments = list(args[: self._argument_count])
- off = len(arguments)
-
- # For information why this is necessary refer to the handling
- # of caller in the `macro_body` handler in the compiler.
- found_caller = False
-
- # if the number of arguments consumed is not the number of
- # arguments expected we start filling in keyword arguments
- # and defaults.
- if off != self._argument_count:
- for name in self.arguments[len(arguments) :]:
- try:
- value = kwargs.pop(name)
- except KeyError:
- value = missing
- if name == "caller":
- found_caller = True
- arguments.append(value)
- else:
- found_caller = self.explicit_caller
-
- # it's important that the order of these arguments does not change
- # if not also changed in the compiler's `function_scoping` method.
- # the order is caller, keyword arguments, positional arguments!
- if self.caller and not found_caller:
- caller = kwargs.pop("caller", None)
- if caller is None:
- caller = self._environment.undefined("No caller defined", name="caller")
- arguments.append(caller)
-
- if self.catch_kwargs:
- arguments.append(kwargs)
- elif kwargs:
- if "caller" in kwargs:
- raise TypeError(
- f"macro {self.name!r} was invoked with two values for the special"
- " caller argument. This is most likely a bug."
- )
- raise TypeError(
- f"macro {self.name!r} takes no keyword argument {next(iter(kwargs))!r}"
- )
- if self.catch_varargs:
- arguments.append(args[self._argument_count :])
- elif len(args) > self._argument_count:
- raise TypeError(
- f"macro {self.name!r} takes not more than"
- f" {len(self.arguments)} argument(s)"
- )
-
- return self._invoke(arguments, autoescape)
-
- def _invoke(self, arguments, autoescape):
- """This method is being swapped out by the async implementation."""
- rv = self._func(*arguments)
- if autoescape:
- rv = Markup(rv)
- return rv
-
- def __repr__(self):
- name = "anonymous" if self.name is None else repr(self.name)
- return f"<{self.__class__.__name__} {name}>"
-
-
-class Undefined:
- """The default undefined type. This undefined type can be printed and
- iterated over, but every other access will raise an :exc:`UndefinedError`:
-
- >>> foo = Undefined(name='foo')
- >>> str(foo)
- ''
- >>> not foo
- True
- >>> foo + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- """
-
- __slots__ = (
- "_undefined_hint",
- "_undefined_obj",
- "_undefined_name",
- "_undefined_exception",
- )
-
- def __init__(self, hint=None, obj=missing, name=None, exc=UndefinedError):
- self._undefined_hint = hint
- self._undefined_obj = obj
- self._undefined_name = name
- self._undefined_exception = exc
-
- @property
- def _undefined_message(self):
- """Build a message about the undefined value based on how it was
- accessed.
- """
- if self._undefined_hint:
- return self._undefined_hint
-
- if self._undefined_obj is missing:
- return f"{self._undefined_name!r} is undefined"
-
- if not isinstance(self._undefined_name, str):
- return (
- f"{object_type_repr(self._undefined_obj)} has no"
- f" element {self._undefined_name!r}"
- )
-
- return (
- f"{object_type_repr(self._undefined_obj)!r} has no"
- f" attribute {self._undefined_name!r}"
- )
-
- @internalcode
- def _fail_with_undefined_error(self, *args, **kwargs):
- """Raise an :exc:`UndefinedError` when operations are performed
- on the undefined value.
- """
- raise self._undefined_exception(self._undefined_message)
-
- @internalcode
- def __getattr__(self, name):
- if name[:2] == "__":
- raise AttributeError(name)
- return self._fail_with_undefined_error()
-
- __add__ = __radd__ = __sub__ = __rsub__ = _fail_with_undefined_error
- __mul__ = __rmul__ = __div__ = __rdiv__ = _fail_with_undefined_error
- __truediv__ = __rtruediv__ = _fail_with_undefined_error
- __floordiv__ = __rfloordiv__ = _fail_with_undefined_error
- __mod__ = __rmod__ = _fail_with_undefined_error
- __pos__ = __neg__ = _fail_with_undefined_error
- __call__ = __getitem__ = _fail_with_undefined_error
- __lt__ = __le__ = __gt__ = __ge__ = _fail_with_undefined_error
- __int__ = __float__ = __complex__ = _fail_with_undefined_error
- __pow__ = __rpow__ = _fail_with_undefined_error
-
- def __eq__(self, other):
- return type(self) is type(other)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __hash__(self):
- return id(type(self))
-
- def __str__(self):
- return ""
-
- def __len__(self):
- return 0
-
- def __iter__(self):
- if 0:
- yield None
-
- def __bool__(self):
- return False
-
- def __repr__(self):
- return "Undefined"
-
-
-def make_logging_undefined(logger=None, base=None):
- """Given a logger object this returns a new undefined class that will
- log certain failures. It will log iterations and printing. If no
- logger is given a default logger is created.
-
- Example::
-
- logger = logging.getLogger(__name__)
- LoggingUndefined = make_logging_undefined(
- logger=logger,
- base=Undefined
- )
-
- .. versionadded:: 2.8
-
- :param logger: the logger to use. If not provided, a default logger
- is created.
- :param base: the base class to add logging functionality to. This
- defaults to :class:`Undefined`.
- """
- if logger is None:
- import logging
-
- logger = logging.getLogger(__name__)
- logger.addHandler(logging.StreamHandler(sys.stderr))
- if base is None:
- base = Undefined
-
- def _log_message(undef):
- logger.warning("Template variable warning: %s", undef._undefined_message)
-
- class LoggingUndefined(base):
- def _fail_with_undefined_error(self, *args, **kwargs):
- try:
- return super()._fail_with_undefined_error(*args, **kwargs)
- except self._undefined_exception as e:
- logger.error(f"Template variable error: %s", e)
- raise e
-
- def __str__(self):
- _log_message(self)
- return super().__str__()
-
- def __iter__(self):
- _log_message(self)
- return super().__iter__()
-
- def __bool__(self):
- _log_message(self)
- return super().__bool__()
-
- return LoggingUndefined
-
-
-class ChainableUndefined(Undefined):
- """An undefined that is chainable, where both ``__getattr__`` and
- ``__getitem__`` return itself rather than raising an
- :exc:`UndefinedError`.
-
- >>> foo = ChainableUndefined(name='foo')
- >>> str(foo.bar['baz'])
- ''
- >>> foo.bar['baz'] + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
-
- .. versionadded:: 2.11.0
- """
-
- __slots__ = ()
-
- def __html__(self):
- return self.__str__()
-
- def __getattr__(self, _):
- return self
-
- __getitem__ = __getattr__
-
-
-class DebugUndefined(Undefined):
- """An undefined that returns the debug info when printed.
-
- >>> foo = DebugUndefined(name='foo')
- >>> str(foo)
- '{{ foo }}'
- >>> not foo
- True
- >>> foo + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- """
-
- __slots__ = ()
-
- def __str__(self):
- if self._undefined_hint:
- message = f"undefined value printed: {self._undefined_hint}"
-
- elif self._undefined_obj is missing:
- message = self._undefined_name
-
- else:
- message = (
- f"no such element: {object_type_repr(self._undefined_obj)}"
- f"[{self._undefined_name!r}]"
- )
-
- return f"{{{{ {message} }}}}"
-
-
-class StrictUndefined(Undefined):
- """An undefined that barks on print and iteration as well as boolean
- tests and all kinds of comparisons. In other words: you can do nothing
- with it except checking if it's defined using the `defined` test.
-
- >>> foo = StrictUndefined(name='foo')
- >>> str(foo)
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- >>> not foo
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- >>> foo + 42
- Traceback (most recent call last):
- ...
- jinja2.exceptions.UndefinedError: 'foo' is undefined
- """
-
- __slots__ = ()
- __iter__ = __str__ = __len__ = Undefined._fail_with_undefined_error
- __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error
-
-
-# Remove slots attributes, after the metaclass is applied they are
-# unneeded and contain wrong data for subclasses.
-del (
- Undefined.__slots__,
- ChainableUndefined.__slots__,
- DebugUndefined.__slots__,
- StrictUndefined.__slots__,
-)
diff --git a/src/jinja2/sandbox.py b/src/jinja2/sandbox.py
deleted file mode 100644
index 5c6d0946..00000000
--- a/src/jinja2/sandbox.py
+++ /dev/null
@@ -1,419 +0,0 @@
-"""A sandbox layer that ensures unsafe operations cannot be performed.
-Useful when the template itself comes from an untrusted source.
-"""
-import operator
-import types
-from _string import formatter_field_name_split
-from collections import abc
-from collections import deque
-from string import Formatter
-
-from markupsafe import EscapeFormatter
-from markupsafe import Markup
-
-from .environment import Environment
-from .exceptions import SecurityError
-
-#: maximum number of items a range may produce
-MAX_RANGE = 100000
-
-#: Unsafe function attributes.
-UNSAFE_FUNCTION_ATTRIBUTES = set()
-
-#: Unsafe method attributes. Function attributes are unsafe for methods too.
-UNSAFE_METHOD_ATTRIBUTES = set()
-
-#: unsafe generator attributes.
-UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"}
-
-#: unsafe attributes on coroutines
-UNSAFE_COROUTINE_ATTRIBUTES = {"cr_frame", "cr_code"}
-
-#: unsafe attributes on async generators
-UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"}
-
-_mutable_spec = (
- (
- abc.MutableSet,
- frozenset(
- [
- "add",
- "clear",
- "difference_update",
- "discard",
- "pop",
- "remove",
- "symmetric_difference_update",
- "update",
- ]
- ),
- ),
- (
- abc.MutableMapping,
- frozenset(["clear", "pop", "popitem", "setdefault", "update"]),
- ),
- (
- abc.MutableSequence,
- frozenset(["append", "reverse", "insert", "sort", "extend", "remove"]),
- ),
- (
- deque,
- frozenset(
- [
- "append",
- "appendleft",
- "clear",
- "extend",
- "extendleft",
- "pop",
- "popleft",
- "remove",
- "rotate",
- ]
- ),
- ),
-)
-
-
-def inspect_format_method(callable):
- if not isinstance(
- callable, (types.MethodType, types.BuiltinMethodType)
- ) or callable.__name__ not in ("format", "format_map"):
- return None
- obj = callable.__self__
- if isinstance(obj, str):
- return obj
-
-
-def safe_range(*args):
- """A range that can't generate ranges with a length of more than
- MAX_RANGE items.
- """
- rng = range(*args)
-
- if len(rng) > MAX_RANGE:
- raise OverflowError(
- "Range too big. The sandbox blocks ranges larger than"
- f" MAX_RANGE ({MAX_RANGE})."
- )
-
- return rng
-
-
-def unsafe(f):
- """Marks a function or method as unsafe.
-
- .. code-block: python
-
- @unsafe
- def delete(self):
- pass
- """
- f.unsafe_callable = True
- return f
-
-
-def is_internal_attribute(obj, attr):
- """Test if the attribute given is an internal python attribute. For
- example this function returns `True` for the `func_code` attribute of
- python objects. This is useful if the environment method
- :meth:`~SandboxedEnvironment.is_safe_attribute` is overridden.
-
- >>> from jinja2.sandbox import is_internal_attribute
- >>> is_internal_attribute(str, "mro")
- True
- >>> is_internal_attribute(str, "upper")
- False
- """
- if isinstance(obj, types.FunctionType):
- if attr in UNSAFE_FUNCTION_ATTRIBUTES:
- return True
- elif isinstance(obj, types.MethodType):
- if attr in UNSAFE_FUNCTION_ATTRIBUTES or attr in UNSAFE_METHOD_ATTRIBUTES:
- return True
- elif isinstance(obj, type):
- if attr == "mro":
- return True
- elif isinstance(obj, (types.CodeType, types.TracebackType, types.FrameType)):
- return True
- elif isinstance(obj, types.GeneratorType):
- if attr in UNSAFE_GENERATOR_ATTRIBUTES:
- return True
- elif hasattr(types, "CoroutineType") and isinstance(obj, types.CoroutineType):
- if attr in UNSAFE_COROUTINE_ATTRIBUTES:
- return True
- elif hasattr(types, "AsyncGeneratorType") and isinstance(
- obj, types.AsyncGeneratorType
- ):
- if attr in UNSAFE_ASYNC_GENERATOR_ATTRIBUTES:
- return True
- return attr.startswith("__")
-
-
-def modifies_known_mutable(obj, attr):
- """This function checks if an attribute on a builtin mutable object
- (list, dict, set or deque) or the corresponding ABCs would modify it
- if called.
-
- >>> modifies_known_mutable({}, "clear")
- True
- >>> modifies_known_mutable({}, "keys")
- False
- >>> modifies_known_mutable([], "append")
- True
- >>> modifies_known_mutable([], "index")
- False
-
- If called with an unsupported object, ``False`` is returned.
-
- >>> modifies_known_mutable("foo", "upper")
- False
- """
- for typespec, unsafe in _mutable_spec:
- if isinstance(obj, typespec):
- return attr in unsafe
- return False
-
-
-class SandboxedEnvironment(Environment):
- """The sandboxed environment. It works like the regular environment but
- tells the compiler to generate sandboxed code. Additionally subclasses of
- this environment may override the methods that tell the runtime what
- attributes or functions are safe to access.
-
- If the template tries to access insecure code a :exc:`SecurityError` is
- raised. However also other exceptions may occur during the rendering so
- the caller has to ensure that all exceptions are caught.
- """
-
- sandboxed = True
-
- #: default callback table for the binary operators. A copy of this is
- #: available on each instance of a sandboxed environment as
- #: :attr:`binop_table`
- default_binop_table = {
- "+": operator.add,
- "-": operator.sub,
- "*": operator.mul,
- "/": operator.truediv,
- "//": operator.floordiv,
- "**": operator.pow,
- "%": operator.mod,
- }
-
- #: default callback table for the unary operators. A copy of this is
- #: available on each instance of a sandboxed environment as
- #: :attr:`unop_table`
- default_unop_table = {"+": operator.pos, "-": operator.neg}
-
- #: a set of binary operators that should be intercepted. Each operator
- #: that is added to this set (empty by default) is delegated to the
- #: :meth:`call_binop` method that will perform the operator. The default
- #: operator callback is specified by :attr:`binop_table`.
- #:
- #: The following binary operators are interceptable:
- #: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**``
- #:
- #: The default operation form the operator table corresponds to the
- #: builtin function. Intercepted calls are always slower than the native
- #: operator call, so make sure only to intercept the ones you are
- #: interested in.
- #:
- #: .. versionadded:: 2.6
- intercepted_binops = frozenset()
-
- #: a set of unary operators that should be intercepted. Each operator
- #: that is added to this set (empty by default) is delegated to the
- #: :meth:`call_unop` method that will perform the operator. The default
- #: operator callback is specified by :attr:`unop_table`.
- #:
- #: The following unary operators are interceptable: ``+``, ``-``
- #:
- #: The default operation form the operator table corresponds to the
- #: builtin function. Intercepted calls are always slower than the native
- #: operator call, so make sure only to intercept the ones you are
- #: interested in.
- #:
- #: .. versionadded:: 2.6
- intercepted_unops = frozenset()
-
- def intercept_unop(self, operator):
- """Called during template compilation with the name of a unary
- operator to check if it should be intercepted at runtime. If this
- method returns `True`, :meth:`call_unop` is executed for this unary
- operator. The default implementation of :meth:`call_unop` will use
- the :attr:`unop_table` dictionary to perform the operator with the
- same logic as the builtin one.
-
- The following unary operators are interceptable: ``+`` and ``-``
-
- Intercepted calls are always slower than the native operator call,
- so make sure only to intercept the ones you are interested in.
-
- .. versionadded:: 2.6
- """
- return False
-
- def __init__(self, *args, **kwargs):
- Environment.__init__(self, *args, **kwargs)
- self.globals["range"] = safe_range
- self.binop_table = self.default_binop_table.copy()
- self.unop_table = self.default_unop_table.copy()
-
- def is_safe_attribute(self, obj, attr, value):
- """The sandboxed environment will call this method to check if the
- attribute of an object is safe to access. Per default all attributes
- starting with an underscore are considered private as well as the
- special attributes of internal python objects as returned by the
- :func:`is_internal_attribute` function.
- """
- return not (attr.startswith("_") or is_internal_attribute(obj, attr))
-
- def is_safe_callable(self, obj):
- """Check if an object is safely callable. Per default a function is
- considered safe unless the `unsafe_callable` attribute exists and is
- True. Override this method to alter the behavior, but this won't
- affect the `unsafe` decorator from this module.
- """
- return not (
- getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False)
- )
-
- def call_binop(self, context, operator, left, right):
- """For intercepted binary operator calls (:meth:`intercepted_binops`)
- this function is executed instead of the builtin operator. This can
- be used to fine tune the behavior of certain operators.
-
- .. versionadded:: 2.6
- """
- return self.binop_table[operator](left, right)
-
- def call_unop(self, context, operator, arg):
- """For intercepted unary operator calls (:meth:`intercepted_unops`)
- this function is executed instead of the builtin operator. This can
- be used to fine tune the behavior of certain operators.
-
- .. versionadded:: 2.6
- """
- return self.unop_table[operator](arg)
-
- def getitem(self, obj, argument):
- """Subscribe an object from sandboxed code."""
- try:
- return obj[argument]
- except (TypeError, LookupError):
- if isinstance(argument, str):
- try:
- attr = str(argument)
- except Exception:
- pass
- else:
- try:
- value = getattr(obj, attr)
- except AttributeError:
- pass
- else:
- if self.is_safe_attribute(obj, argument, value):
- return value
- return self.unsafe_undefined(obj, argument)
- return self.undefined(obj=obj, name=argument)
-
- def getattr(self, obj, attribute):
- """Subscribe an object from sandboxed code and prefer the
- attribute. The attribute passed *must* be a bytestring.
- """
- try:
- value = getattr(obj, attribute)
- except AttributeError:
- try:
- return obj[attribute]
- except (TypeError, LookupError):
- pass
- else:
- if self.is_safe_attribute(obj, attribute, value):
- return value
- return self.unsafe_undefined(obj, attribute)
- return self.undefined(obj=obj, name=attribute)
-
- def unsafe_undefined(self, obj, attribute):
- """Return an undefined object for unsafe attributes."""
- return self.undefined(
- f"access to attribute {attribute!r} of"
- f" {obj.__class__.__name__!r} object is unsafe.",
- name=attribute,
- obj=obj,
- exc=SecurityError,
- )
-
- def format_string(self, s, args, kwargs, format_func=None):
- """If a format call is detected, then this is routed through this
- method so that our safety sandbox can be used for it.
- """
- if isinstance(s, Markup):
- formatter = SandboxedEscapeFormatter(self, s.escape)
- else:
- formatter = SandboxedFormatter(self)
-
- if format_func is not None and format_func.__name__ == "format_map":
- if len(args) != 1 or kwargs:
- raise TypeError(
- "format_map() takes exactly one argument"
- f" {len(args) + (kwargs is not None)} given"
- )
-
- kwargs = args[0]
- args = None
-
- rv = formatter.vformat(s, args, kwargs)
- return type(s)(rv)
-
- def call(__self, __context, __obj, *args, **kwargs): # noqa: B902
- """Call an object from sandboxed code."""
- fmt = inspect_format_method(__obj)
- if fmt is not None:
- return __self.format_string(fmt, args, kwargs, __obj)
-
- # the double prefixes are to avoid double keyword argument
- # errors when proxying the call.
- if not __self.is_safe_callable(__obj):
- raise SecurityError(f"{__obj!r} is not safely callable")
- return __context.call(__obj, *args, **kwargs)
-
-
-class ImmutableSandboxedEnvironment(SandboxedEnvironment):
- """Works exactly like the regular `SandboxedEnvironment` but does not
- permit modifications on the builtin mutable objects `list`, `set`, and
- `dict` by using the :func:`modifies_known_mutable` function.
- """
-
- def is_safe_attribute(self, obj, attr, value):
- if not SandboxedEnvironment.is_safe_attribute(self, obj, attr, value):
- return False
- return not modifies_known_mutable(obj, attr)
-
-
-class SandboxedFormatterMixin:
- def __init__(self, env):
- self._env = env
-
- def get_field(self, field_name, args, kwargs):
- first, rest = formatter_field_name_split(field_name)
- obj = self.get_value(first, args, kwargs)
- for is_attr, i in rest:
- if is_attr:
- obj = self._env.getattr(obj, i)
- else:
- obj = self._env.getitem(obj, i)
- return obj, first
-
-
-class SandboxedFormatter(SandboxedFormatterMixin, Formatter):
- def __init__(self, env):
- SandboxedFormatterMixin.__init__(self, env)
- Formatter.__init__(self)
-
-
-class SandboxedEscapeFormatter(SandboxedFormatterMixin, EscapeFormatter):
- def __init__(self, env, escape):
- SandboxedFormatterMixin.__init__(self, env)
- EscapeFormatter.__init__(self, escape)
diff --git a/src/jinja2/tests.py b/src/jinja2/tests.py
deleted file mode 100644
index bc763268..00000000
--- a/src/jinja2/tests.py
+++ /dev/null
@@ -1,211 +0,0 @@
-"""Built-in template tests used with the ``is`` operator."""
-import operator
-import re
-from collections import abc
-from numbers import Number
-
-from .runtime import Undefined
-
-number_re = re.compile(r"^-?\d+(\.\d+)?$")
-regex_type = type(number_re)
-test_callable = callable
-
-
-def test_odd(value):
- """Return true if the variable is odd."""
- return value % 2 == 1
-
-
-def test_even(value):
- """Return true if the variable is even."""
- return value % 2 == 0
-
-
-def test_divisibleby(value, num):
- """Check if a variable is divisible by a number."""
- return value % num == 0
-
-
-def test_defined(value):
- """Return true if the variable is defined:
-
- .. sourcecode:: jinja
-
- {% if variable is defined %}
- value of variable: {{ variable }}
- {% else %}
- variable is not defined
- {% endif %}
-
- See the :func:`default` filter for a simple way to set undefined
- variables.
- """
- return not isinstance(value, Undefined)
-
-
-def test_undefined(value):
- """Like :func:`defined` but the other way round."""
- return isinstance(value, Undefined)
-
-
-def test_none(value):
- """Return true if the variable is none."""
- return value is None
-
-
-def test_boolean(value):
- """Return true if the object is a boolean value.
-
- .. versionadded:: 2.11
- """
- return value is True or value is False
-
-
-def test_false(value):
- """Return true if the object is False.
-
- .. versionadded:: 2.11
- """
- return value is False
-
-
-def test_true(value):
- """Return true if the object is True.
-
- .. versionadded:: 2.11
- """
- return value is True
-
-
-# NOTE: The existing 'number' test matches booleans and floats
-def test_integer(value):
- """Return true if the object is an integer.
-
- .. versionadded:: 2.11
- """
- return isinstance(value, int) and value is not True and value is not False
-
-
-# NOTE: The existing 'number' test matches booleans and integers
-def test_float(value):
- """Return true if the object is a float.
-
- .. versionadded:: 2.11
- """
- return isinstance(value, float)
-
-
-def test_lower(value):
- """Return true if the variable is lowercased."""
- return str(value).islower()
-
-
-def test_upper(value):
- """Return true if the variable is uppercased."""
- return str(value).isupper()
-
-
-def test_string(value):
- """Return true if the object is a string."""
- return isinstance(value, str)
-
-
-def test_mapping(value):
- """Return true if the object is a mapping (dict etc.).
-
- .. versionadded:: 2.6
- """
- return isinstance(value, abc.Mapping)
-
-
-def test_number(value):
- """Return true if the variable is a number."""
- return isinstance(value, Number)
-
-
-def test_sequence(value):
- """Return true if the variable is a sequence. Sequences are variables
- that are iterable.
- """
- try:
- len(value)
- value.__getitem__
- except Exception:
- return False
- return True
-
-
-def test_sameas(value, other):
- """Check if an object points to the same memory address than another
- object:
-
- .. sourcecode:: jinja
-
- {% if foo.attribute is sameas false %}
- the foo attribute really is the `False` singleton
- {% endif %}
- """
- return value is other
-
-
-def test_iterable(value):
- """Check if it's possible to iterate over an object."""
- try:
- iter(value)
- except TypeError:
- return False
- return True
-
-
-def test_escaped(value):
- """Check if the value is escaped."""
- return hasattr(value, "__html__")
-
-
-def test_in(value, seq):
- """Check if value is in seq.
-
- .. versionadded:: 2.10
- """
- return value in seq
-
-
-TESTS = {
- "odd": test_odd,
- "even": test_even,
- "divisibleby": test_divisibleby,
- "defined": test_defined,
- "undefined": test_undefined,
- "none": test_none,
- "boolean": test_boolean,
- "false": test_false,
- "true": test_true,
- "integer": test_integer,
- "float": test_float,
- "lower": test_lower,
- "upper": test_upper,
- "string": test_string,
- "mapping": test_mapping,
- "number": test_number,
- "sequence": test_sequence,
- "iterable": test_iterable,
- "callable": test_callable,
- "sameas": test_sameas,
- "escaped": test_escaped,
- "in": test_in,
- "==": operator.eq,
- "eq": operator.eq,
- "equalto": operator.eq,
- "!=": operator.ne,
- "ne": operator.ne,
- ">": operator.gt,
- "gt": operator.gt,
- "greaterthan": operator.gt,
- "ge": operator.ge,
- ">=": operator.ge,
- "<": operator.lt,
- "lt": operator.lt,
- "lessthan": operator.lt,
- "<=": operator.le,
- "le": operator.le,
-}
diff --git a/src/jinja2/utils.py b/src/jinja2/utils.py
deleted file mode 100644
index 8ee02958..00000000
--- a/src/jinja2/utils.py
+++ /dev/null
@@ -1,666 +0,0 @@
-import json
-import os
-import re
-from collections import abc
-from collections import deque
-from random import choice
-from random import randrange
-from threading import Lock
-from urllib.parse import quote_from_bytes
-
-from markupsafe import escape
-from markupsafe import Markup
-
-_word_split_re = re.compile(r"(\s+)")
-_lead_pattern = "|".join(map(re.escape, ("(", "<", "&lt;")))
-_trail_pattern = "|".join(map(re.escape, (".", ",", ")", ">", "\n", "&gt;")))
-_punctuation_re = re.compile(
- fr"^(?P<lead>(?:{_lead_pattern})*)(?P<middle>.*?)(?P<trail>(?:{_trail_pattern})*)$"
-)
-_simple_email_re = re.compile(r"^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$")
-_striptags_re = re.compile(r"(<!--.*?-->|<[^>]*>)")
-_entity_re = re.compile(r"&([^;]+);")
-_letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
-_digits = "0123456789"
-
-# special singleton representing missing values for the runtime
-missing = type("MissingType", (), {"__repr__": lambda x: "missing"})()
-
-# internal code
-internal_code = set()
-
-concat = "".join
-
-_slash_escape = "\\/" not in json.dumps("/")
-
-
-def contextfunction(f):
- """This decorator can be used to mark a function or method context callable.
- A context callable is passed the active :class:`Context` as first argument when
- called from the template. This is useful if a function wants to get access
- to the context or functions provided on the context object. For example
- a function that returns a sorted list of template variables the current
- template exports could look like this::
-
- @contextfunction
- def get_exported_names(context):
- return sorted(context.exported_vars)
- """
- f.contextfunction = True
- return f
-
-
-def evalcontextfunction(f):
- """This decorator can be used to mark a function or method as an eval
- context callable. This is similar to the :func:`contextfunction`
- but instead of passing the context, an evaluation context object is
- passed. For more information about the eval context, see
- :ref:`eval-context`.
-
- .. versionadded:: 2.4
- """
- f.evalcontextfunction = True
- return f
-
-
-def environmentfunction(f):
- """This decorator can be used to mark a function or method as environment
- callable. This decorator works exactly like the :func:`contextfunction`
- decorator just that the first argument is the active :class:`Environment`
- and not context.
- """
- f.environmentfunction = True
- return f
-
-
-def internalcode(f):
- """Marks the function as internally used"""
- internal_code.add(f.__code__)
- return f
-
-
-def is_undefined(obj):
- """Check if the object passed is undefined. This does nothing more than
- performing an instance check against :class:`Undefined` but looks nicer.
- This can be used for custom filters or tests that want to react to
- undefined variables. For example a custom default filter can look like
- this::
-
- def default(var, default=''):
- if is_undefined(var):
- return default
- return var
- """
- from .runtime import Undefined
-
- return isinstance(obj, Undefined)
-
-
-def consume(iterable):
- """Consumes an iterable without doing anything with it."""
- for _ in iterable:
- pass
-
-
-def clear_caches():
- """Jinja keeps internal caches for environments and lexers. These are
- used so that Jinja doesn't have to recreate environments and lexers all
- the time. Normally you don't have to care about that but if you are
- measuring memory consumption you may want to clean the caches.
- """
- from .environment import _spontaneous_environments
- from .lexer import _lexer_cache
-
- _spontaneous_environments.clear()
- _lexer_cache.clear()
-
-
-def import_string(import_name, silent=False):
- """Imports an object based on a string. This is useful if you want to
- use import paths as endpoints or something similar. An import path can
- be specified either in dotted notation (``xml.sax.saxutils.escape``)
- or with a colon as object delimiter (``xml.sax.saxutils:escape``).
-
- If the `silent` is True the return value will be `None` if the import
- fails.
-
- :return: imported object
- """
- try:
- if ":" in import_name:
- module, obj = import_name.split(":", 1)
- elif "." in import_name:
- module, _, obj = import_name.rpartition(".")
- else:
- return __import__(import_name)
- return getattr(__import__(module, None, None, [obj]), obj)
- except (ImportError, AttributeError):
- if not silent:
- raise
-
-
-def open_if_exists(filename, mode="rb"):
- """Returns a file descriptor for the filename if that file exists,
- otherwise ``None``.
- """
- if not os.path.isfile(filename):
- return None
-
- return open(filename, mode)
-
-
-def object_type_repr(obj):
- """Returns the name of the object's type. For some recognized
- singletons the name of the object is returned instead. (For
- example for `None` and `Ellipsis`).
- """
- if obj is None:
- return "None"
- elif obj is Ellipsis:
- return "Ellipsis"
-
- cls = type(obj)
-
- if cls.__module__ == "builtins":
- return f"{cls.__name__} object"
-
- return f"{cls.__module__}.{cls.__name__} object"
-
-
-def pformat(obj):
- """Format an object using :func:`pprint.pformat`.
- """
- from pprint import pformat
-
- return pformat(obj)
-
-
-def urlize(text, trim_url_limit=None, rel=None, target=None):
- """Converts any URLs in text into clickable links. Works on http://,
- https:// and www. links. Links can have trailing punctuation (periods,
- commas, close-parens) and leading punctuation (opening parens) and
- it'll still do the right thing.
-
- If trim_url_limit is not None, the URLs in link text will be limited
- to trim_url_limit characters.
-
- If nofollow is True, the URLs in link text will get a rel="nofollow"
- attribute.
-
- If target is not None, a target attribute will be added to the link.
- """
-
- def trim_url(x, limit=trim_url_limit):
- if limit is not None:
- return x[:limit] + ("..." if len(x) >= limit else "")
-
- return x
-
- words = _word_split_re.split(str(escape(text)))
- rel_attr = f' rel="{escape(rel)}"' if rel else ""
- target_attr = f' target="{escape(target)}"' if target else ""
-
- for i, word in enumerate(words):
- match = _punctuation_re.match(word)
- if match:
- lead, middle, trail = match.groups()
- if middle.startswith("www.") or (
- "@" not in middle
- and not middle.startswith("http://")
- and not middle.startswith("https://")
- and len(middle) > 0
- and middle[0] in _letters + _digits
- and (
- middle.endswith(".org")
- or middle.endswith(".net")
- or middle.endswith(".com")
- )
- ):
- middle = (
- f'<a href="http://{middle}"{rel_attr}{target_attr}>'
- f"{trim_url(middle)}</a>"
- )
- if middle.startswith("http://") or middle.startswith("https://"):
- middle = (
- f'<a href="{middle}"{rel_attr}{target_attr}>{trim_url(middle)}</a>'
- )
- if (
- "@" in middle
- and not middle.startswith("www.")
- and ":" not in middle
- and _simple_email_re.match(middle)
- ):
- middle = f'<a href="mailto:{middle}">{middle}</a>'
- if lead + middle + trail != word:
- words[i] = lead + middle + trail
- return "".join(words)
-
-
-def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
- """Generate some lorem ipsum for the template."""
- from .constants import LOREM_IPSUM_WORDS
-
- words = LOREM_IPSUM_WORDS.split()
- result = []
-
- for _ in range(n):
- next_capitalized = True
- last_comma = last_fullstop = 0
- word = None
- last = None
- p = []
-
- # each paragraph contains out of 20 to 100 words.
- for idx, _ in enumerate(range(randrange(min, max))):
- while True:
- word = choice(words)
- if word != last:
- last = word
- break
- if next_capitalized:
- word = word.capitalize()
- next_capitalized = False
- # add commas
- if idx - randrange(3, 8) > last_comma:
- last_comma = idx
- last_fullstop += 2
- word += ","
- # add end of sentences
- if idx - randrange(10, 20) > last_fullstop:
- last_comma = last_fullstop = idx
- word += "."
- next_capitalized = True
- p.append(word)
-
- # ensure that the paragraph ends with a dot.
- p = " ".join(p)
- if p.endswith(","):
- p = p[:-1] + "."
- elif not p.endswith("."):
- p += "."
- result.append(p)
-
- if not html:
- return "\n\n".join(result)
- return Markup("\n".join(f"<p>{escape(x)}</p>" for x in result))
-
-
-def url_quote(obj, charset="utf-8", for_qs=False):
- """Quote a string for use in a URL using the given charset.
-
- This function is misnamed, it is a wrapper around
- :func:`urllib.parse.quote`.
-
- :param obj: String or bytes to quote. Other types are converted to
- string then encoded to bytes using the given charset.
- :param charset: Encode text to bytes using this charset.
- :param for_qs: Quote "/" and use "+" for spaces.
- """
- if not isinstance(obj, bytes):
- if not isinstance(obj, str):
- obj = str(obj)
-
- obj = obj.encode(charset)
-
- safe = b"" if for_qs else b"/"
- rv = quote_from_bytes(obj, safe)
-
- if for_qs:
- rv = rv.replace("%20", "+")
-
- return rv
-
-
-def unicode_urlencode(obj, charset="utf-8", for_qs=False):
- import warnings
-
- warnings.warn(
- "'unicode_urlencode' has been renamed to 'url_quote'. The old"
- " name will be removed in version 3.1.",
- DeprecationWarning,
- stacklevel=2,
- )
- return url_quote(obj, charset=charset, for_qs=for_qs)
-
-
-@abc.MutableMapping.register
-class LRUCache:
- """A simple LRU Cache implementation."""
-
- # this is fast for small capacities (something below 1000) but doesn't
- # scale. But as long as it's only used as storage for templates this
- # won't do any harm.
-
- def __init__(self, capacity):
- self.capacity = capacity
- self._mapping = {}
- self._queue = deque()
- self._postinit()
-
- def _postinit(self):
- # alias all queue methods for faster lookup
- self._popleft = self._queue.popleft
- self._pop = self._queue.pop
- self._remove = self._queue.remove
- self._wlock = Lock()
- self._append = self._queue.append
-
- def __getstate__(self):
- return {
- "capacity": self.capacity,
- "_mapping": self._mapping,
- "_queue": self._queue,
- }
-
- def __setstate__(self, d):
- self.__dict__.update(d)
- self._postinit()
-
- def __getnewargs__(self):
- return (self.capacity,)
-
- def copy(self):
- """Return a shallow copy of the instance."""
- rv = self.__class__(self.capacity)
- rv._mapping.update(self._mapping)
- rv._queue.extend(self._queue)
- return rv
-
- def get(self, key, default=None):
- """Return an item from the cache dict or `default`"""
- try:
- return self[key]
- except KeyError:
- return default
-
- def setdefault(self, key, default=None):
- """Set `default` if the key is not in the cache otherwise
- leave unchanged. Return the value of this key.
- """
- try:
- return self[key]
- except KeyError:
- self[key] = default
- return default
-
- def clear(self):
- """Clear the cache."""
- self._wlock.acquire()
- try:
- self._mapping.clear()
- self._queue.clear()
- finally:
- self._wlock.release()
-
- def __contains__(self, key):
- """Check if a key exists in this cache."""
- return key in self._mapping
-
- def __len__(self):
- """Return the current size of the cache."""
- return len(self._mapping)
-
- def __repr__(self):
- return f"<{self.__class__.__name__} {self._mapping!r}>"
-
- def __getitem__(self, key):
- """Get an item from the cache. Moves the item up so that it has the
- highest priority then.
-
- Raise a `KeyError` if it does not exist.
- """
- self._wlock.acquire()
- try:
- rv = self._mapping[key]
- if self._queue[-1] != key:
- try:
- self._remove(key)
- except ValueError:
- # if something removed the key from the container
- # when we read, ignore the ValueError that we would
- # get otherwise.
- pass
- self._append(key)
- return rv
- finally:
- self._wlock.release()
-
- def __setitem__(self, key, value):
- """Sets the value for an item. Moves the item up so that it
- has the highest priority then.
- """
- self._wlock.acquire()
- try:
- if key in self._mapping:
- self._remove(key)
- elif len(self._mapping) == self.capacity:
- del self._mapping[self._popleft()]
- self._append(key)
- self._mapping[key] = value
- finally:
- self._wlock.release()
-
- def __delitem__(self, key):
- """Remove an item from the cache dict.
- Raise a `KeyError` if it does not exist.
- """
- self._wlock.acquire()
- try:
- del self._mapping[key]
- try:
- self._remove(key)
- except ValueError:
- pass
- finally:
- self._wlock.release()
-
- def items(self):
- """Return a list of items."""
- result = [(key, self._mapping[key]) for key in list(self._queue)]
- result.reverse()
- return result
-
- def values(self):
- """Return a list of all values."""
- return [x[1] for x in self.items()]
-
- def keys(self):
- """Return a list of all keys ordered by most recent usage."""
- return list(self)
-
- def __iter__(self):
- return reversed(tuple(self._queue))
-
- def __reversed__(self):
- """Iterate over the keys in the cache dict, oldest items
- coming first.
- """
- return iter(tuple(self._queue))
-
- __copy__ = copy
-
-
-def select_autoescape(
- enabled_extensions=("html", "htm", "xml"),
- disabled_extensions=(),
- default_for_string=True,
- default=False,
-):
- """Intelligently sets the initial value of autoescaping based on the
- filename of the template. This is the recommended way to configure
- autoescaping if you do not want to write a custom function yourself.
-
- If you want to enable it for all templates created from strings or
- for all templates with `.html` and `.xml` extensions::
-
- from jinja2 import Environment, select_autoescape
- env = Environment(autoescape=select_autoescape(
- enabled_extensions=('html', 'xml'),
- default_for_string=True,
- ))
-
- Example configuration to turn it on at all times except if the template
- ends with `.txt`::
-
- from jinja2 import Environment, select_autoescape
- env = Environment(autoescape=select_autoescape(
- disabled_extensions=('txt',),
- default_for_string=True,
- default=True,
- ))
-
- The `enabled_extensions` is an iterable of all the extensions that
- autoescaping should be enabled for. Likewise `disabled_extensions` is
- a list of all templates it should be disabled for. If a template is
- loaded from a string then the default from `default_for_string` is used.
- If nothing matches then the initial value of autoescaping is set to the
- value of `default`.
-
- For security reasons this function operates case insensitive.
-
- .. versionadded:: 2.9
- """
- enabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in enabled_extensions)
- disabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in disabled_extensions)
-
- def autoescape(template_name):
- if template_name is None:
- return default_for_string
- template_name = template_name.lower()
- if template_name.endswith(enabled_patterns):
- return True
- if template_name.endswith(disabled_patterns):
- return False
- return default
-
- return autoescape
-
-
-def htmlsafe_json_dumps(obj, dumper=None, **kwargs):
- """Works exactly like :func:`dumps` but is safe for use in ``<script>``
- tags. It accepts the same arguments and returns a JSON string. Note that
- this is available in templates through the ``|tojson`` filter which will
- also mark the result as safe. Due to how this function escapes certain
- characters this is safe even if used outside of ``<script>`` tags.
-
- The following characters are escaped in strings:
-
- - ``<``
- - ``>``
- - ``&``
- - ``'``
-
- This makes it safe to embed such strings in any place in HTML with the
- notable exception of double quoted attributes. In that case single
- quote your attributes or HTML escape it in addition.
- """
- if dumper is None:
- dumper = json.dumps
- rv = (
- dumper(obj, **kwargs)
- .replace("<", "\\u003c")
- .replace(">", "\\u003e")
- .replace("&", "\\u0026")
- .replace("'", "\\u0027")
- )
- return Markup(rv)
-
-
-class Cycler:
- """Cycle through values by yield them one at a time, then restarting
- once the end is reached. Available as ``cycler`` in templates.
-
- Similar to ``loop.cycle``, but can be used outside loops or across
- multiple loops. For example, render a list of folders and files in a
- list, alternating giving them "odd" and "even" classes.
-
- .. code-block:: html+jinja
-
- {% set row_class = cycler("odd", "even") %}
- <ul class="browser">
- {% for folder in folders %}
- <li class="folder {{ row_class.next() }}">{{ folder }}
- {% endfor %}
- {% for file in files %}
- <li class="file {{ row_class.next() }}">{{ file }}
- {% endfor %}
- </ul>
-
- :param items: Each positional argument will be yielded in the order
- given for each cycle.
-
- .. versionadded:: 2.1
- """
-
- def __init__(self, *items):
- if not items:
- raise RuntimeError("at least one item has to be provided")
- self.items = items
- self.pos = 0
-
- def reset(self):
- """Resets the current item to the first item."""
- self.pos = 0
-
- @property
- def current(self):
- """Return the current item. Equivalent to the item that will be
- returned next time :meth:`next` is called.
- """
- return self.items[self.pos]
-
- def next(self):
- """Return the current item, then advance :attr:`current` to the
- next item.
- """
- rv = self.current
- self.pos = (self.pos + 1) % len(self.items)
- return rv
-
- __next__ = next
-
-
-class Joiner:
- """A joining helper for templates."""
-
- def __init__(self, sep=", "):
- self.sep = sep
- self.used = False
-
- def __call__(self):
- if not self.used:
- self.used = True
- return ""
- return self.sep
-
-
-class Namespace:
- """A namespace object that can hold arbitrary attributes. It may be
- initialized from a dictionary or with keyword arguments."""
-
- def __init__(*args, **kwargs): # noqa: B902
- self, args = args[0], args[1:]
- self.__attrs = dict(*args, **kwargs)
-
- def __getattribute__(self, name):
- # __class__ is needed for the awaitable check in async mode
- if name in {"_Namespace__attrs", "__class__"}:
- return object.__getattribute__(self, name)
- try:
- return self.__attrs[name]
- except KeyError:
- raise AttributeError(name)
-
- def __setitem__(self, name, value):
- self.__attrs[name] = value
-
- def __repr__(self):
- return f"<Namespace {self.__attrs!r}>"
-
-
-# does this python version support async for in and async generators?
-try:
- exec("async def _():\n async for _ in ():\n yield _")
- have_async_gen = True
-except SyntaxError:
- have_async_gen = False
diff --git a/src/jinja2/visitor.py b/src/jinja2/visitor.py
deleted file mode 100644
index 590fa9eb..00000000
--- a/src/jinja2/visitor.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""API for traversing the AST nodes. Implemented by the compiler and
-meta introspection.
-"""
-from .nodes import Node
-
-
-class NodeVisitor:
- """Walks the abstract syntax tree and call visitor functions for every
- node found. The visitor functions may return values which will be
- forwarded by the `visit` method.
-
- Per default the visitor functions for the nodes are ``'visit_'`` +
- class name of the node. So a `TryFinally` node visit function would
- be `visit_TryFinally`. This behavior can be changed by overriding
- the `get_visitor` function. If no visitor function exists for a node
- (return value `None`) the `generic_visit` visitor is used instead.
- """
-
- def get_visitor(self, node):
- """Return the visitor function for this node or `None` if no visitor
- exists for this node. In that case the generic visit function is
- used instead.
- """
- return getattr(self, f"visit_{node.__class__.__name__}", None)
-
- def visit(self, node, *args, **kwargs):
- """Visit a node."""
- f = self.get_visitor(node)
- if f is not None:
- return f(node, *args, **kwargs)
- return self.generic_visit(node, *args, **kwargs)
-
- def generic_visit(self, node, *args, **kwargs):
- """Called if no explicit visitor function exists for a node."""
- for node in node.iter_child_nodes():
- self.visit(node, *args, **kwargs)
-
-
-class NodeTransformer(NodeVisitor):
- """Walks the abstract syntax tree and allows modifications of nodes.
-
- The `NodeTransformer` will walk the AST and use the return value of the
- visitor functions to replace or remove the old node. If the return
- value of the visitor function is `None` the node will be removed
- from the previous location otherwise it's replaced with the return
- value. The return value may be the original node in which case no
- replacement takes place.
- """
-
- def generic_visit(self, node, *args, **kwargs):
- for field, old_value in node.iter_fields():
- if isinstance(old_value, list):
- new_values = []
- for value in old_value:
- if isinstance(value, Node):
- value = self.visit(value, *args, **kwargs)
- if value is None:
- continue
- elif not isinstance(value, Node):
- new_values.extend(value)
- continue
- new_values.append(value)
- old_value[:] = new_values
- elif isinstance(old_value, Node):
- new_node = self.visit(old_value, *args, **kwargs)
- if new_node is None:
- delattr(node, field)
- else:
- setattr(node, field, new_node)
- return node
-
- def visit_list(self, node, *args, **kwargs):
- """As transformers may return lists in some places this method
- can be used to enforce a list as return value.
- """
- rv = self.visit(node, *args, **kwargs)
- if not isinstance(rv, list):
- rv = [rv]
- return rv