Removed old PHP code, migrated to Python and Flask
Update .dockerignore, .env, and 503 more files...
This commit is contained in:
0
website/__init__.py
Normal file
0
website/__init__.py
Normal file
119
website/content/__init__.py
Normal file
119
website/content/__init__.py
Normal file
@@ -0,0 +1,119 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from locked_dict.locked_dict import LockedDict
|
||||
import yaml
|
||||
|
||||
from .metadata import ContentMetadata
|
||||
from .project import ContentProject
|
||||
from .tool import ContentTool, ContentToolData
|
||||
|
||||
__CONTENT_ARTICLES: LockedDict = LockedDict()
|
||||
__CONTENT_PROJECTS: LockedDict[str, ContentProject] = LockedDict()
|
||||
__CONTENT_TOOLS: LockedDict[str, ContentTool] = LockedDict()
|
||||
|
||||
|
||||
def get_articles() -> LockedDict:
|
||||
return __CONTENT_ARTICLES
|
||||
|
||||
|
||||
def get_projects() -> LockedDict[str, ContentProject]:
|
||||
return __CONTENT_PROJECTS
|
||||
|
||||
|
||||
def get_projects_by_tags(tags: list[str]) -> dict[Any, ContentProject]:
|
||||
project_obj: ContentProject
|
||||
return {
|
||||
project_key: project_value for project_key, project_value in __CONTENT_PROJECTS.items()
|
||||
if any(tag in project_value.metadata.general.tags for tag in tags)
|
||||
}
|
||||
|
||||
|
||||
def sanitize_input_tags(input_tags: str) -> list[str]:
|
||||
tags: list[str] = input_tags.split(";")
|
||||
for tag in tags:
|
||||
if not tag.isalnum() or len(tag) == 0:
|
||||
raise ValueError(f"Non-alphanumeric or empty tag was given !")
|
||||
return tags
|
||||
|
||||
|
||||
def get_tools() -> LockedDict:
|
||||
return __CONTENT_TOOLS
|
||||
|
||||
|
||||
def get_tools_by_tags(tags: list[str]) -> dict[Any, ContentProject]:
|
||||
tool_obj: ContentProject
|
||||
return {
|
||||
tool_key: tool_value for tool_key, tool_value in __CONTENT_TOOLS.items()
|
||||
if any(tag in tool_value.metadata.general.tags for tag in tags)
|
||||
}
|
||||
|
||||
|
||||
def reload_content_items() -> None:
|
||||
global __CONTENT_ARTICLES
|
||||
global __CONTENT_PROJECTS
|
||||
global __CONTENT_TOOLS
|
||||
|
||||
__CONTENT_ARTICLES = LockedDict()
|
||||
__CONTENT_PROJECTS = LockedDict()
|
||||
__CONTENT_TOOLS = LockedDict()
|
||||
|
||||
for article_folder in os.listdir(os.path.join(os.getcwd(), "data/articles")):
|
||||
article_folder_path = os.path.join(os.getcwd(), "data/articles", article_folder)
|
||||
if not os.path.isdir(article_folder_path):
|
||||
continue
|
||||
pass
|
||||
|
||||
for project_item in os.listdir(os.path.join(os.getcwd(), "data/projects")):
|
||||
project_item_path = os.path.join(os.getcwd(), "data/projects/", project_item)
|
||||
if not os.path.isfile(project_item_path) or project_item.startswith("."):
|
||||
continue
|
||||
|
||||
project_id = Path(project_item_path).stem
|
||||
project_page_path = os.path.join(os.getcwd(), f"templates/projects/{project_id}.jinja")
|
||||
|
||||
if not all(os.path.isfile(project_file) for project_file in
|
||||
[project_item_path, project_page_path]):
|
||||
print(f"Unable to load project '{project_item}' due to missing files !")
|
||||
continue
|
||||
|
||||
try:
|
||||
__CONTENT_PROJECTS[project_id] = ContentProject(
|
||||
id=project_id,
|
||||
metadata=ContentMetadata(**yaml.safe_load(open(project_item_path))),
|
||||
# strings=json.load(open(project_strings_path)) # Deprecated
|
||||
)
|
||||
print(f"Loaded project '{project_id}'")
|
||||
except Exception as e:
|
||||
print(f"Unable to load project '{project_id}' due to an exception !")
|
||||
print(e)
|
||||
|
||||
for tool_item in os.listdir(os.path.join(os.getcwd(), "data/tools")):
|
||||
tool_item_path = os.path.join(os.getcwd(), "data/tools", tool_item)
|
||||
if not os.path.isfile(tool_item_path) or tool_item_path.startswith("."):
|
||||
continue
|
||||
|
||||
tool_id = Path(tool_item_path).stem
|
||||
tool_page_path = os.path.join(os.getcwd(), f"templates/tools/{tool_id}.jinja")
|
||||
|
||||
if not all(os.path.isfile(project_file) for project_file in
|
||||
[tool_item_path, tool_page_path]):
|
||||
print(f"Unable to load tool '{tool_id}' due to missing files !")
|
||||
continue
|
||||
|
||||
tool_data: ContentTool
|
||||
try:
|
||||
raw_tool_data = yaml.safe_load(open(tool_item_path))
|
||||
__CONTENT_TOOLS[tool_id] = ContentTool(
|
||||
id=tool_id,
|
||||
metadata=ContentMetadata(**raw_tool_data["metadata"]),
|
||||
data=ContentToolData(**raw_tool_data["data"]),
|
||||
)
|
||||
print(f"Loaded tool '{tool_id}'")
|
||||
except Exception as e:
|
||||
print(f"Unable to load tool '{tool_id}' due to an exception !")
|
||||
print(e)
|
||||
continue
|
||||
|
||||
# FIXME: Check if the required files exist too !
|
67
website/content/metadata.py
Normal file
67
website/content/metadata.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContentHeadMetadata:
|
||||
title_key: str
|
||||
description_key: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContentOpengraphMetadata:
|
||||
title_key: str
|
||||
description_key: str
|
||||
type: Optional[str] = field(default=None)
|
||||
url: Optional[str] = field(default=None)
|
||||
image_url: Optional[str] = field(default=None)
|
||||
image_type: Optional[str] = field(default=None)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContentTwitterMetadata:
|
||||
title_key: str
|
||||
description_key: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContentIndexMetadata:
|
||||
priority: int
|
||||
enable: bool
|
||||
title_key: str
|
||||
preamble_key: str
|
||||
image_alt_key: str
|
||||
image_url: str = field(default="/resources/NibblePoker/images/placeholder.png")
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContentGeneralMetadata:
|
||||
icon: str
|
||||
title_key: str
|
||||
subtitle_key: str
|
||||
tags: list[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContentMetadata:
|
||||
head: ContentHeadMetadata
|
||||
opengraph: ContentOpengraphMetadata
|
||||
twitter: ContentTwitterMetadata
|
||||
index: ContentIndexMetadata
|
||||
general: ContentGeneralMetadata
|
||||
|
||||
def __post_init__(self):
|
||||
self.head: dict
|
||||
self.head = ContentHeadMetadata(**self.head)
|
||||
|
||||
self.opengraph: dict
|
||||
self.opengraph = ContentOpengraphMetadata(**self.opengraph)
|
||||
|
||||
self.twitter: dict
|
||||
self.twitter = ContentTwitterMetadata(**self.twitter)
|
||||
|
||||
self.index: dict
|
||||
self.index = ContentIndexMetadata(**self.index)
|
||||
|
||||
self.general: dict
|
||||
self.general = ContentGeneralMetadata(**self.general)
|
9
website/content/project.py
Normal file
9
website/content/project.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .metadata import ContentMetadata
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContentProject:
|
||||
id: str
|
||||
metadata: ContentMetadata
|
16
website/content/tool.py
Normal file
16
website/content/tool.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from .metadata import ContentMetadata
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContentToolData:
|
||||
scripts: list[str] = field(default_factory=list)
|
||||
stylesheets: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContentTool:
|
||||
id: str
|
||||
metadata: ContentMetadata
|
||||
data: ContentToolData
|
55
website/contributors.py
Normal file
55
website/contributors.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContributorConfig:
|
||||
root_image_path: str = "/"
|
||||
root_sound_path: str = "/"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContributorEntry:
|
||||
name: str
|
||||
image: str
|
||||
image_hover: Optional[str] = None
|
||||
sound_entry: Optional[str] = None
|
||||
sound_hover: Optional[str] = None
|
||||
sound_exit: Optional[str] = None
|
||||
achievements: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ContributorsIndex:
|
||||
config: ContributorConfig = field(default_factory=ContributorConfig)
|
||||
regular: List[ContributorEntry] = field(default_factory=list)
|
||||
spiritual: List[ContributorEntry] = field(default_factory=list)
|
||||
|
||||
def __post_init__(self):
|
||||
contributor_entry: dict
|
||||
|
||||
if self.regular is None:
|
||||
self.regular = list()
|
||||
else:
|
||||
self.regular = [ContributorEntry(**contributor_entry) for contributor_entry in self.regular]
|
||||
|
||||
if self.spiritual is None:
|
||||
self.spiritual = list()
|
||||
else:
|
||||
self.spiritual = [ContributorEntry(**contributor_entry) for contributor_entry in self.spiritual]
|
||||
|
||||
|
||||
__CONTRIBUTORS_DATA: ContributorsIndex = ContributorsIndex()
|
||||
|
||||
|
||||
def reload_contributors_data(definition_file: str) -> None:
|
||||
global __CONTRIBUTORS_DATA
|
||||
|
||||
with open(definition_file, 'r') as f:
|
||||
__CONTRIBUTORS_DATA = ContributorsIndex(**yaml.safe_load(f))
|
||||
|
||||
|
||||
def get_contributors_data() -> ContributorsIndex:
|
||||
return __CONTRIBUTORS_DATA
|
1
website/domains.py
Normal file
1
website/domains.py
Normal file
@@ -0,0 +1 @@
|
||||
ALLOWED_DOMAINS = ["nibblepoker.lu", "nibblepoker.com", "nibblepoker.ovh"]
|
72
website/l10n/__init__.py
Normal file
72
website/l10n/__init__.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import copy
|
||||
from typing import Optional
|
||||
|
||||
from locked_dict.locked_dict import LockedDict
|
||||
|
||||
|
||||
class Localizer:
|
||||
_langs_data: LockedDict[LockedDict[str, str]]
|
||||
_default_lang: str
|
||||
_allowed_langs: list[str]
|
||||
|
||||
def __init__(self, default_lang: str, allowed_langs: Optional[list[str]]):
|
||||
self._langs_data = LockedDict()
|
||||
self._default_lang = default_lang
|
||||
|
||||
self._allowed_langs = allowed_langs
|
||||
if self._allowed_langs is None:
|
||||
self._allowed_langs = list()
|
||||
self._allowed_langs.append(self._default_lang)
|
||||
|
||||
def add_lang(self, lang: str):
|
||||
if lang not in self._langs_data.keys():
|
||||
self._langs_data[lang] = LockedDict()
|
||||
|
||||
def add_domain(self, lang: str, domain: str, domain_data: Optional[dict[str, str]], strip_prefix: bool = False):
|
||||
if domain not in self._langs_data[lang].keys():
|
||||
self._langs_data[lang][domain] = LockedDict()
|
||||
|
||||
if strip_prefix:
|
||||
new_domain_data = dict()
|
||||
for key, value in domain_data.items():
|
||||
if key.startswith(f"{domain}."):
|
||||
new_domain_data[key[len(f"{domain}."):]] = value
|
||||
domain_data = new_domain_data
|
||||
|
||||
self._langs_data[lang][domain].update(domain_data)
|
||||
|
||||
def _localize_internal(self, lang: str, domain: str, key: str, args: list[str] = None) -> Optional[str]:
|
||||
if lang not in self._allowed_langs:
|
||||
return None
|
||||
|
||||
if lang not in self._langs_data.keys():
|
||||
return None
|
||||
|
||||
lang_data = self._langs_data[lang]
|
||||
if domain not in lang_data.keys():
|
||||
return None
|
||||
|
||||
domain_data = lang_data[domain]
|
||||
if key not in domain_data.keys():
|
||||
return None
|
||||
|
||||
localized_text = domain_data[key]
|
||||
if args is not None:
|
||||
for arg_index, arg_value in enumerate(args):
|
||||
localized_text = localized_text.replace(f"%{arg_index}", arg_value)
|
||||
|
||||
return localized_text
|
||||
|
||||
def localize(self, lang: str, domain: str, key: str, args: list[str] = None) -> str:
|
||||
localized_string = None
|
||||
|
||||
if localized_string is None:
|
||||
localized_string = self._localize_internal(lang, domain, key, args)
|
||||
|
||||
if localized_string is None and lang != self._default_lang:
|
||||
localized_string = self._localize_internal(self._default_lang, domain, key, args)
|
||||
|
||||
if localized_string is None:
|
||||
return domain + "." + key
|
||||
|
||||
return localized_string
|
168
website/l10n/utils.py
Normal file
168
website/l10n/utils.py
Normal file
@@ -0,0 +1,168 @@
|
||||
import json
|
||||
import os.path
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import yaml
|
||||
|
||||
from . import Localizer
|
||||
|
||||
DEFAULT_LANG = "en"
|
||||
ALLOWED_LANGS = ["en", "fr"]
|
||||
|
||||
L10N = Localizer(DEFAULT_LANG, ALLOWED_LANGS)
|
||||
|
||||
|
||||
def reload_strings(strings_root: str) -> None:
|
||||
global L10N
|
||||
|
||||
for allowed_lang in ALLOWED_LANGS:
|
||||
print(f"Adding lang '{allowed_lang}'...")
|
||||
L10N.add_lang(allowed_lang)
|
||||
|
||||
for lang_dir in os.listdir(strings_root):
|
||||
lang_dir_path = os.path.join(strings_root, lang_dir)
|
||||
|
||||
if not os.path.isdir(lang_dir_path):
|
||||
print(f"Ignoring lang non-folder '{lang_dir}'...")
|
||||
continue
|
||||
|
||||
if lang_dir not in ALLOWED_LANGS:
|
||||
print(f"Ignoring lang folder '{lang_dir}'...")
|
||||
continue
|
||||
|
||||
for lang_domain in os.listdir(os.path.join(lang_dir_path)):
|
||||
if lang_domain.startswith("_"):
|
||||
continue
|
||||
|
||||
lang_domain_path = os.path.join(os.getcwd(), strings_root, lang_dir, lang_domain)
|
||||
|
||||
if not os.path.isfile(lang_domain_path):
|
||||
continue
|
||||
|
||||
domain_key = str(Path(lang_domain).with_suffix(''))
|
||||
|
||||
if lang_domain.endswith(".json"):
|
||||
print(f"Loading JSON lang data from '{lang_domain_path}'...")
|
||||
L10N.add_domain(
|
||||
lang_dir,
|
||||
domain_key,
|
||||
json.loads(open(lang_domain_path, "rb").read().decode("utf-8"))
|
||||
)
|
||||
|
||||
if lang_domain.endswith(".yml"):
|
||||
print(f"Loading YAML lang data from '{lang_domain_path}'...")
|
||||
L10N.add_domain(
|
||||
lang_dir,
|
||||
domain_key,
|
||||
yaml.safe_load(open(lang_domain_path, "rb").read().decode("utf-8"))
|
||||
)
|
||||
|
||||
|
||||
def localize(strings_key: str, domain: str, language: str, args: list[str] = None) -> str:
|
||||
global L10N
|
||||
# print(f"l10n({strings_key}, {domain}, {language})")
|
||||
return L10N.localize(language, domain, strings_key, args)
|
||||
|
||||
|
||||
def get_user_lang(url_lang: Optional[str], header_langs: Optional[str], simplify_entries: bool = True) -> str:
|
||||
if url_lang is not None:
|
||||
return url_lang
|
||||
|
||||
if header_langs is None:
|
||||
return DEFAULT_LANG
|
||||
|
||||
processed_header_langs: list[tuple[str, float]] = list()
|
||||
processed_header_langs.append((DEFAULT_LANG, 0.01))
|
||||
|
||||
for header_lang in header_langs.split(","):
|
||||
header_lang_parts: list[str] = header_lang.split(";")
|
||||
|
||||
# Modifying entries without a "q=<float>" part to have a '0.1' value
|
||||
if len(header_lang_parts) == 1:
|
||||
header_lang_parts.append("0.1")
|
||||
|
||||
if len(header_lang_parts) != 2:
|
||||
continue
|
||||
|
||||
header_lang_parts: list[str, float]
|
||||
|
||||
# Simplifying complex entries from "en-US" to "en".
|
||||
# We'll ignore duplicates since it won't matter after sorting.
|
||||
if simplify_entries and "-" in header_lang_parts[0]:
|
||||
header_lang_parts[0] = header_lang_parts[0].split("-")[0]
|
||||
|
||||
# Only allowing supported languages
|
||||
if header_lang_parts[0] not in ALLOWED_LANGS:
|
||||
continue
|
||||
|
||||
# Parsing the language's weight
|
||||
try:
|
||||
header_lang_parts[1] = float(header_lang_parts[1].replace("q=", ""))
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
processed_header_langs.append((header_lang_parts[0], header_lang_parts[1], ))
|
||||
|
||||
# Returning the preferred language
|
||||
return max(processed_header_langs, key=lambda x: x[1])[0]
|
||||
|
||||
|
||||
def l10n_url_abs(url: str, raw_lang: Optional[str] = None) -> str:
|
||||
if raw_lang is None:
|
||||
return f"/{url}".replace("//", "/")
|
||||
else:
|
||||
return f"/{raw_lang}/{url}".replace("//", "/")
|
||||
|
||||
|
||||
def l10n_url_switch(url: str, new_lang: Optional[str] = None) -> str:
|
||||
for allowed_lang in ALLOWED_LANGS:
|
||||
url = url.replace(f"/{allowed_lang}/", "/")
|
||||
|
||||
if new_lang is not None:
|
||||
url = "/" + new_lang + url
|
||||
|
||||
return url.replace("//", "/")
|
||||
|
||||
|
||||
# STRINGS = dict()
|
||||
# STRINGS[DEFAULT_LANG] = dict()
|
||||
# STRINGS["_compile_date"] = "1970-01-01T00:00:00.000000+00:00Z"
|
||||
#
|
||||
#
|
||||
# def reload_strings(strings_file: str) -> None:
|
||||
# global STRINGS
|
||||
# STRINGS = dict()
|
||||
# STRINGS[DEFAULT_LANG] = dict()
|
||||
# STRINGS["_compile_date"] = "1970-01-01T00:00:00.000000+00:00Z"
|
||||
# STRINGS = json.loads(open(strings_file, "r").read())
|
||||
#
|
||||
#
|
||||
# def localize_internal(string_key: str, language: str, lang_data: dict, fallback: Optional[str]) -> Optional[str]:
|
||||
# if language not in ALLOWED_LANGS:
|
||||
# return fallback
|
||||
#
|
||||
# if language not in lang_data.keys():
|
||||
# language = DEFAULT_LANG
|
||||
#
|
||||
# localized_string = lang_data[language].get(string_key)
|
||||
# if localized_string is None and language != DEFAULT_LANG:
|
||||
# localized_string = lang_data[DEFAULT_LANG].get(string_key)
|
||||
#
|
||||
# if localized_string is None:
|
||||
# return fallback
|
||||
# return localized_string
|
||||
#
|
||||
#
|
||||
# def localize(strings_key: str, language: str, extra_lang_data: Optional[dict] = None) -> str:
|
||||
# localized_string = None
|
||||
#
|
||||
# if extra_lang_data is not None:
|
||||
# localized_string = localize_internal(strings_key, language, extra_lang_data, None)
|
||||
#
|
||||
# if localized_string is None:
|
||||
# localized_string = localize_internal(strings_key, language, STRINGS, None)
|
||||
#
|
||||
# if localized_string is None:
|
||||
# return f"${strings_key}"
|
||||
# return localized_string
|
0
website/renderers/__init__.py
Normal file
0
website/renderers/__init__.py
Normal file
10
website/renderers/button.py
Normal file
10
website/renderers/button.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from flask import render_template
|
||||
|
||||
|
||||
def render_button(inner_html: str, disabled: bool = False) -> str:
|
||||
return render_template(
|
||||
"elements/button.jinja",
|
||||
button_inner_html=inner_html,
|
||||
button_disabled=disabled,
|
||||
button_extra_classes=""
|
||||
)
|
37
website/renderers/headings.py
Normal file
37
website/renderers/headings.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from typing import Optional
|
||||
|
||||
from flask import render_template
|
||||
|
||||
|
||||
def render_heading(inner_html: str, level: int = 1, icon: Optional[str] = None, right_html: Optional[str] = None,
|
||||
anchor_id: Optional[str] = None, background_class: str = "bkgd-grid") -> str:
|
||||
return render_template(
|
||||
"elements/heading.jinja",
|
||||
heading_inner_html=inner_html,
|
||||
heading_level=level + 1,
|
||||
heading_icon=icon,
|
||||
heading_right_html=right_html,
|
||||
heading_anchor_id=anchor_id,
|
||||
heading_background_class=background_class,
|
||||
)
|
||||
|
||||
|
||||
def render_h1(inner_html: str, icon: Optional[str] = None, right_html: Optional[str] = None,
|
||||
anchor_id: Optional[str] = None, background_class: str = "bkgd-grid") -> str:
|
||||
return render_heading(
|
||||
inner_html, 1, icon, right_html, anchor_id, background_class
|
||||
)
|
||||
|
||||
|
||||
def render_h2(inner_html: str, icon: Optional[str] = None, right_html: Optional[str] = None,
|
||||
anchor_id: Optional[str] = None, background_class: str = "bkgd-grid") -> str:
|
||||
return render_heading(
|
||||
inner_html, 2, icon, right_html, anchor_id, background_class
|
||||
)
|
||||
|
||||
|
||||
def render_h3(inner_html: str, icon: Optional[str] = None, right_html: Optional[str] = None,
|
||||
anchor_id: Optional[str] = None, background_class: str = "bkgd-grid") -> str:
|
||||
return render_heading(
|
||||
inner_html, 3, icon, right_html, anchor_id, background_class
|
||||
)
|
0
website/renderers/lists.py
Normal file
0
website/renderers/lists.py
Normal file
8
website/renderers/paragraph.py
Normal file
8
website/renderers/paragraph.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from flask import render_template
|
||||
|
||||
|
||||
def render_paragraph(inner_html: str) -> str:
|
||||
return render_template(
|
||||
"elements/paragraph.jinja",
|
||||
paragraph_inner_html=inner_html,
|
||||
)
|
8
website/renderers/splide.py
Normal file
8
website/renderers/splide.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from flask import render_template
|
||||
|
||||
|
||||
def render_splide(inner_html_panes: list[str]) -> str:
|
||||
return render_template(
|
||||
"elements/splide.jinja",
|
||||
splide_inner_html_panes=inner_html_panes,
|
||||
)
|
35
website/sidebar.py
Normal file
35
website/sidebar.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
@dataclass
|
||||
class SidebarEntry:
|
||||
title_key: str
|
||||
icon: str
|
||||
active_id: str
|
||||
abs_href: Optional[str] = field(default=None)
|
||||
raw_href: Optional[str] = field(default=None)
|
||||
|
||||
|
||||
__SIDEBAR_ENTRIES: list[Optional[SidebarEntry]] = list()
|
||||
|
||||
|
||||
def reload_sidebar_entries(definition_file: str) -> None:
|
||||
global __SIDEBAR_ENTRIES
|
||||
|
||||
__SIDEBAR_ENTRIES = list()
|
||||
|
||||
with open(definition_file, 'r') as f:
|
||||
raw_sidebar_entries = yaml.safe_load(f)
|
||||
|
||||
for raw_sidebar_entry in raw_sidebar_entries:
|
||||
try:
|
||||
__SIDEBAR_ENTRIES.append(SidebarEntry(**raw_sidebar_entry))
|
||||
except Exception:
|
||||
__SIDEBAR_ENTRIES.append(None)
|
||||
|
||||
|
||||
def get_sidebar_entries() -> list[Optional[SidebarEntry]]:
|
||||
return __SIDEBAR_ENTRIES
|
24
website/sitemap.py
Normal file
24
website/sitemap.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from .l10n.utils import ALLOWED_LANGS
|
||||
|
||||
import yaml
|
||||
|
||||
__SITEMAP_ENTRIES: list[str] = list()
|
||||
|
||||
|
||||
def reload_sitemap_entries(definition_file: str) -> None:
|
||||
global __SITEMAP_ENTRIES
|
||||
|
||||
__SITEMAP_ENTRIES = list()
|
||||
|
||||
with open(definition_file, 'r') as f:
|
||||
raw_sitemap_entries = yaml.safe_load(f)
|
||||
|
||||
for sitemap_entry in raw_sitemap_entries:
|
||||
__SITEMAP_ENTRIES.append(sitemap_entry)
|
||||
|
||||
for allowed_lang in ALLOWED_LANGS:
|
||||
__SITEMAP_ENTRIES.append(("/" + allowed_lang + "/" + sitemap_entry).replace("//", "/"))
|
||||
|
||||
|
||||
def get_sitemap_entries() -> list[str]:
|
||||
return __SITEMAP_ENTRIES
|
Reference in New Issue
Block a user