1
0
mirror of https://gitlab.com/MoonTestUse1/AdministrationItDepartmens.git synced 2025-08-14 00:25:46 +02:00

Все подряд

This commit is contained in:
MoonTestUse1
2024-12-31 02:37:57 +06:00
parent 8e53bb6cb2
commit d5780b2eab
3258 changed files with 1087440 additions and 268 deletions

View File

@@ -0,0 +1,70 @@
# ext/autohandler.py
# Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""adds autohandler functionality to Mako templates.
requires that the TemplateLookup class is used with templates.
usage::
<%!
from mako.ext.autohandler import autohandler
%>
<%inherit file="${autohandler(template, context)}"/>
or with custom autohandler filename::
<%!
from mako.ext.autohandler import autohandler
%>
<%inherit file="${autohandler(template, context, name='somefilename')}"/>
"""
import os
import posixpath
import re
def autohandler(template, context, name="autohandler"):
lookup = context.lookup
_template_uri = template.module._template_uri
if not lookup.filesystem_checks:
try:
return lookup._uri_cache[(autohandler, _template_uri, name)]
except KeyError:
pass
tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name]
while len(tokens):
path = "/" + "/".join(tokens)
if path != _template_uri and _file_exists(lookup, path):
if not lookup.filesystem_checks:
return lookup._uri_cache.setdefault(
(autohandler, _template_uri, name), path
)
else:
return path
if len(tokens) == 1:
break
tokens[-2:] = [name]
if not lookup.filesystem_checks:
return lookup._uri_cache.setdefault(
(autohandler, _template_uri, name), None
)
else:
return None
def _file_exists(lookup, path):
psub = re.sub(r"^/", "", path)
for d in lookup.directories:
if os.path.exists(d + "/" + psub):
return True
else:
return False

View File

@@ -0,0 +1,57 @@
# ext/babelplugin.py
# Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""gettext message extraction via Babel: https://pypi.org/project/Babel/"""
from babel.messages.extract import extract_python
from mako.ext.extract import MessageExtractor
class BabelMakoExtractor(MessageExtractor):
def __init__(self, keywords, comment_tags, options):
self.keywords = keywords
self.options = options
self.config = {
"comment-tags": " ".join(comment_tags),
"encoding": options.get(
"input_encoding", options.get("encoding", None)
),
}
super().__init__()
def __call__(self, fileobj):
return self.process_file(fileobj)
def process_python(self, code, code_lineno, translator_strings):
comment_tags = self.config["comment-tags"]
for (
lineno,
funcname,
messages,
python_translator_comments,
) in extract_python(code, self.keywords, comment_tags, self.options):
yield (
code_lineno + (lineno - 1),
funcname,
messages,
translator_strings + python_translator_comments,
)
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Mako templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
extractor = BabelMakoExtractor(keywords, comment_tags, options)
yield from extractor(fileobj)

View File

@@ -0,0 +1,82 @@
# ext/beaker_cache.py
# Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provide a :class:`.CacheImpl` for the Beaker caching system."""
from mako import exceptions
from mako.cache import CacheImpl
try:
from beaker import cache as beaker_cache
except:
has_beaker = False
else:
has_beaker = True
_beaker_cache = None
class BeakerCacheImpl(CacheImpl):
"""A :class:`.CacheImpl` provided for the Beaker caching system.
This plugin is used by default, based on the default
value of ``'beaker'`` for the ``cache_impl`` parameter of the
:class:`.Template` or :class:`.TemplateLookup` classes.
"""
def __init__(self, cache):
if not has_beaker:
raise exceptions.RuntimeException(
"Can't initialize Beaker plugin; Beaker is not installed."
)
global _beaker_cache
if _beaker_cache is None:
if "manager" in cache.template.cache_args:
_beaker_cache = cache.template.cache_args["manager"]
else:
_beaker_cache = beaker_cache.CacheManager()
super().__init__(cache)
def _get_cache(self, **kw):
expiretime = kw.pop("timeout", None)
if "dir" in kw:
kw["data_dir"] = kw.pop("dir")
elif self.cache.template.module_directory:
kw["data_dir"] = self.cache.template.module_directory
if "manager" in kw:
kw.pop("manager")
if kw.get("type") == "memcached":
kw["type"] = "ext:memcached"
if "region" in kw:
region = kw.pop("region")
cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw)
else:
cache = _beaker_cache.get_cache(self.cache.id, **kw)
cache_args = {"starttime": self.cache.starttime}
if expiretime:
cache_args["expiretime"] = expiretime
return cache, cache_args
def get_or_create(self, key, creation_function, **kw):
cache, kw = self._get_cache(**kw)
return cache.get(key, createfunc=creation_function, **kw)
def put(self, key, value, **kw):
cache, kw = self._get_cache(**kw)
cache.put(key, value, **kw)
def get(self, key, **kw):
cache, kw = self._get_cache(**kw)
return cache.get(key, **kw)
def invalidate(self, key, **kw):
cache, kw = self._get_cache(**kw)
cache.remove_value(key, **kw)

View File

@@ -0,0 +1,129 @@
# ext/extract.py
# Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from io import BytesIO
from io import StringIO
import re
from mako import lexer
from mako import parsetree
class MessageExtractor:
use_bytes = True
def process_file(self, fileobj):
template_node = lexer.Lexer(
fileobj.read(), input_encoding=self.config["encoding"]
).parse()
yield from self.extract_nodes(template_node.get_children())
def extract_nodes(self, nodes):
translator_comments = []
in_translator_comments = False
input_encoding = self.config["encoding"] or "ascii"
comment_tags = list(
filter(None, re.split(r"\s+", self.config["comment-tags"]))
)
for node in nodes:
child_nodes = None
if (
in_translator_comments
and isinstance(node, parsetree.Text)
and not node.content.strip()
):
# Ignore whitespace within translator comments
continue
if isinstance(node, parsetree.Comment):
value = node.text.strip()
if in_translator_comments:
translator_comments.extend(
self._split_comment(node.lineno, value)
)
continue
for comment_tag in comment_tags:
if value.startswith(comment_tag):
in_translator_comments = True
translator_comments.extend(
self._split_comment(node.lineno, value)
)
continue
if isinstance(node, parsetree.DefTag):
code = node.function_decl.code
child_nodes = node.nodes
elif isinstance(node, parsetree.BlockTag):
code = node.body_decl.code
child_nodes = node.nodes
elif isinstance(node, parsetree.CallTag):
code = node.code.code
child_nodes = node.nodes
elif isinstance(node, parsetree.PageTag):
code = node.body_decl.code
elif isinstance(node, parsetree.CallNamespaceTag):
code = node.expression
child_nodes = node.nodes
elif isinstance(node, parsetree.ControlLine):
if node.isend:
in_translator_comments = False
continue
code = node.text
elif isinstance(node, parsetree.Code):
in_translator_comments = False
code = node.code.code
elif isinstance(node, parsetree.Expression):
code = node.code.code
else:
continue
# Comments don't apply unless they immediately precede the message
if (
translator_comments
and translator_comments[-1][0] < node.lineno - 1
):
translator_comments = []
translator_strings = [
comment[1] for comment in translator_comments
]
if isinstance(code, str) and self.use_bytes:
code = code.encode(input_encoding, "backslashreplace")
used_translator_comments = False
# We add extra newline to work around a pybabel bug
# (see python-babel/babel#274, parse_encoding dies if the first
# input string of the input is non-ascii)
# Also, because we added it, we have to subtract one from
# node.lineno
if self.use_bytes:
code = BytesIO(b"\n" + code)
else:
code = StringIO("\n" + code)
for message in self.process_python(
code, node.lineno - 1, translator_strings
):
yield message
used_translator_comments = True
if used_translator_comments:
translator_comments = []
in_translator_comments = False
if child_nodes:
yield from self.extract_nodes(child_nodes)
@staticmethod
def _split_comment(lineno, comment):
"""Return the multiline comment at lineno split into a list of
comment line numbers and the accompanying comment line"""
return [
(lineno + index, line)
for index, line in enumerate(comment.splitlines())
]

View File

@@ -0,0 +1,57 @@
# ext/linguaplugin.py
# Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import contextlib
import io
from lingua.extractors import Extractor
from lingua.extractors import get_extractor
from lingua.extractors import Message
from mako.ext.extract import MessageExtractor
class LinguaMakoExtractor(Extractor, MessageExtractor):
"""Mako templates"""
use_bytes = False
extensions = [".mako"]
default_config = {"encoding": "utf-8", "comment-tags": ""}
def __call__(self, filename, options, fileobj=None):
self.options = options
self.filename = filename
self.python_extractor = get_extractor("x.py")
if fileobj is None:
ctx = open(filename, "r")
else:
ctx = contextlib.nullcontext(fileobj)
with ctx as file_:
yield from self.process_file(file_)
def process_python(self, code, code_lineno, translator_strings):
source = code.getvalue().strip()
if source.endswith(":"):
if source in ("try:", "else:") or source.startswith("except"):
source = "" # Ignore try/except and else
elif source.startswith("elif"):
source = source[2:] # Replace "elif" with "if"
source += "pass"
code = io.StringIO(source)
for msg in self.python_extractor(
self.filename, self.options, code, code_lineno - 1
):
if translator_strings:
msg = Message(
msg.msgctxt,
msg.msgid,
msg.msgid_plural,
msg.flags,
" ".join(translator_strings + [msg.comment]),
msg.tcomment,
msg.location,
)
yield msg

View File

@@ -0,0 +1,20 @@
# ext/preprocessors.py
# Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""preprocessing functions, used with the 'preprocessor'
argument on Template, TemplateLookup"""
import re
def convert_comments(text):
"""preprocess old style comments.
example:
from mako.ext.preprocessors import convert_comments
t = Template(..., preprocessor=convert_comments)"""
return re.sub(r"(?<=\n)\s*#[^#]", "##", text)

View File

@@ -0,0 +1,150 @@
# ext/pygmentplugin.py
# Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from pygments import highlight
from pygments.formatters.html import HtmlFormatter
from pygments.lexer import bygroups
from pygments.lexer import DelegatingLexer
from pygments.lexer import include
from pygments.lexer import RegexLexer
from pygments.lexer import using
from pygments.lexers.agile import Python3Lexer
from pygments.lexers.agile import PythonLexer
from pygments.lexers.web import CssLexer
from pygments.lexers.web import HtmlLexer
from pygments.lexers.web import JavascriptLexer
from pygments.lexers.web import XmlLexer
from pygments.token import Comment
from pygments.token import Keyword
from pygments.token import Name
from pygments.token import Operator
from pygments.token import Other
from pygments.token import String
from pygments.token import Text
class MakoLexer(RegexLexer):
name = "Mako"
aliases = ["mako"]
filenames = ["*.mao"]
tokens = {
"root": [
(
r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)",
bygroups(Text, Comment.Preproc, Keyword, Other),
),
(
r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)",
bygroups(Text, Comment.Preproc, using(PythonLexer), Other),
),
(
r"(\s*)(##[^\n]*)(\n|\Z)",
bygroups(Text, Comment.Preproc, Other),
),
(r"""(?s)<%doc>.*?</%doc>""", Comment.Preproc),
(
r"(<%)([\w\.\:]+)",
bygroups(Comment.Preproc, Name.Builtin),
"tag",
),
(
r"(</%)([\w\.\:]+)(>)",
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc),
),
(r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"),
(
r"(?s)(<%(?:!?))(.*?)(%>)",
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
),
(
r"(\$\{)(.*?)(\})",
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc),
),
(
r"""(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line
(?=\#\*) | # multiline comment
(?=</?%) | # a python block
# call start or end
(?=\$\{) | # a substitution
(?<=\n)(?=\s*%) |
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)
""",
bygroups(Other, Operator),
),
(r"\s+", Text),
],
"ondeftags": [
(r"<%", Comment.Preproc),
(r"(?<=<%)(include|inherit|namespace|page)", Name.Builtin),
include("tag"),
],
"tag": [
(r'((?:\w+)\s*=)\s*(".*?")', bygroups(Name.Attribute, String)),
(r"/?\s*>", Comment.Preproc, "#pop"),
(r"\s+", Text),
],
"attr": [
('".*?"', String, "#pop"),
("'.*?'", String, "#pop"),
(r"[^\s>]+", String, "#pop"),
],
}
class MakoHtmlLexer(DelegatingLexer):
name = "HTML+Mako"
aliases = ["html+mako"]
def __init__(self, **options):
super().__init__(HtmlLexer, MakoLexer, **options)
class MakoXmlLexer(DelegatingLexer):
name = "XML+Mako"
aliases = ["xml+mako"]
def __init__(self, **options):
super().__init__(XmlLexer, MakoLexer, **options)
class MakoJavascriptLexer(DelegatingLexer):
name = "JavaScript+Mako"
aliases = ["js+mako", "javascript+mako"]
def __init__(self, **options):
super().__init__(JavascriptLexer, MakoLexer, **options)
class MakoCssLexer(DelegatingLexer):
name = "CSS+Mako"
aliases = ["css+mako"]
def __init__(self, **options):
super().__init__(CssLexer, MakoLexer, **options)
pygments_html_formatter = HtmlFormatter(
cssclass="syntax-highlighted", linenos=True
)
def syntax_highlight(filename="", language=None):
mako_lexer = MakoLexer()
python_lexer = Python3Lexer()
if filename.startswith("memory:") or language == "mako":
return lambda string: highlight(
string, mako_lexer, pygments_html_formatter
)
return lambda string: highlight(
string, python_lexer, pygments_html_formatter
)

View File

@@ -0,0 +1,61 @@
# ext/turbogears.py
# Copyright 2006-2024 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from mako import compat
from mako.lookup import TemplateLookup
from mako.template import Template
class TGPlugin:
"""TurboGears compatible Template Plugin."""
def __init__(self, extra_vars_func=None, options=None, extension="mak"):
self.extra_vars_func = extra_vars_func
self.extension = extension
if not options:
options = {}
# Pull the options out and initialize the lookup
lookup_options = {}
for k, v in options.items():
if k.startswith("mako."):
lookup_options[k[5:]] = v
elif k in ["directories", "filesystem_checks", "module_directory"]:
lookup_options[k] = v
self.lookup = TemplateLookup(**lookup_options)
self.tmpl_options = {}
# transfer lookup args to template args, based on those available
# in getargspec
for kw in compat.inspect_getargspec(Template.__init__)[0]:
if kw in lookup_options:
self.tmpl_options[kw] = lookup_options[kw]
def load_template(self, templatename, template_string=None):
"""Loads a template from a file or a string"""
if template_string is not None:
return Template(template_string, **self.tmpl_options)
# Translate TG dot notation to normal / template path
if "/" not in templatename:
templatename = (
"/" + templatename.replace(".", "/") + "." + self.extension
)
# Lookup template
return self.lookup.get_template(templatename)
def render(
self, info, format="html", fragment=False, template=None # noqa
):
if isinstance(template, str):
template = self.load_template(template)
# Load extra vars func if provided
if self.extra_vars_func:
info.update(self.extra_vars_func())
return template.render(**info)