Compare commits

..

12 commits

Author SHA1 Message Date
5e83b58b32 🚧 api: new "cached" decorator implementation 2026-03-21 20:56:07 +01:00
0073e72f9c api: add dummy webdav server
- webdav content inside "api" subproject
- ship api.conf for development
2026-02-28 03:01:32 +01:00
f36192dc74 🐛 missed one class method usage 2026-02-28 02:55:32 +01:00
febcf63dba api: add dummy webdav server 2026-02-27 02:45:29 +01:00
3a64668d89 🚧 api: building redis cache decorator 2026-02-25 23:00:09 +01:00
21defd1e3d Merge branch 'develop' into feature/webdav-rework 2026-02-25 22:55:03 +01:00
75b3f01651 🚧 api: begin building own redis cache decorator 2026-02-25 22:51:37 +01:00
1ca9a2083e api: admin/credentials: only allow certain "name" values 2026-02-25 22:50:24 +01:00
7451205bf4 ⬆️ ui: upgrade bulma-toast 2026-02-22 16:51:34 +00:00
049ae8fc56 🚧 webdav rework
- use instance instead of class methods
- prettier cache keys
2026-02-22 16:39:25 +00:00
7b65d8c9b5 🧹 production script: use pydantic constrained integers 2026-02-22 16:38:50 +00:00
09b9886ee7 Merge tag '0.2.0' into develop
 re-scaffolded both projects

- api: "poetry", "isort", "black", "flake8" -> astral.sh tooling
- ui: "webpack" -> "vite"
2026-02-22 13:31:53 +00:00
37 changed files with 492 additions and 97 deletions

View file

@ -1,6 +1,20 @@
name: advent22
services:
webdav:
image: sfuhrm/docker-nginx-webdav
environment:
USERNAME: wduser
PASSWORD: 53cr3t!
volumes:
- ../api/.devcontainer/webdav:/media/data
healthcheck:
test: ["CMD", "sh", "-c", "wget --spider -S -U 'HEALTHCHECK' 'http://127.0.0.1' 2>&1 | grep -E 'HTTP/1\\.[01] (200|401)'"]
interval: 30s
timeout: 10s
start_period: 5s
retries: 3
api:
image: mcr.microsoft.com/devcontainers/python:3-3.14-trixie
volumes:

View file

@ -7,7 +7,7 @@
"dockerComposeFile": "../../.devcontainer/docker_compose.yml",
"service": "api",
"workspaceFolder": "/workspaces/advent22/api",
"runServices": ["api"],
"runServices": ["api", "webdav"],
// Features to add to the dev container. More info: https://containers.dev/features.
"features": {

View file

@ -0,0 +1,27 @@
calendar = "calendar.toml"
random_seed = "foo_bar"
[admin]
username = "admin"
password = "42"
[solution]
value = "ergophobia"
[site]
title = "Advent22 UI"
subtitle = "Lorem ipsum dolor sit amet"
content = "This supports *markdown* **and** Emoji! 😂"
footer = "Pictures from [Lorem picsum](https://picsum.photos)"
[puzzle]
skip_empty = true
extra_days = [5]
begin_day = 1
begin_month = 12
close_after = 90
[image]
size = 1000
border = 60

BIN
api/.devcontainer/webdav/advent22/files/Gorditas-Bold_50.ttf (Stored with Git LFS) Normal file

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,15 @@
background = "things.jpg"
favicon = "flower.jpg"
doors = [
{ day = 1, x1 = 59, y1 = 176, x2 = 219, y2 = 400 },
{ day = 2, x1 = 418, y1 = 414, x2 = 540, y2 = 462 },
{ day = 3, x1 = 640, y1 = 159, x2 = 902, y2 = 338 },
{ day = 4, x1 = 698, y1 = 395, x2 = 888, y2 = 841 },
{ day = 5, x1 = 66, y1 = 484, x2 = 196, y2 = 810 },
{ day = 6, x1 = 284, y1 = 166, x2 = 358, y2 = 454 },
{ day = 7, x1 = 266, y1 = 512, x2 = 360, y2 = 807 },
{ day = 8, x1 = 413, y1 = 183, x2 = 583, y2 = 297 },
{ day = 9, x1 = 550, y1 = 465, x2 = 617, y2 = 785 },
{ day = 10, x1 = 425, y1 = 532, x2 = 469, y2 = 778 },
{ day = 11, x1 = 388, y1 = 358, x2 = 610, y2 = 392 },
]

BIN
api/.devcontainer/webdav/advent22/files/flower.jpg (Stored with Git LFS) Normal file

Binary file not shown.

BIN
api/.devcontainer/webdav/advent22/files/things.jpg (Stored with Git LFS) Normal file

Binary file not shown.

BIN
api/.devcontainer/webdav/advent22/images_auto/creek.jpg (Stored with Git LFS) Normal file

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 858 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 825 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 MiB

BIN
api/.devcontainer/webdav/advent22/images_auto/shore.jpg (Stored with Git LFS) Normal file

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 MiB

BIN
api/.devcontainer/webdav/advent22/images_manual/3.jpg (Stored with Git LFS) Normal file

Binary file not shown.

4
api/.gitignore vendored
View file

@ -1,3 +1,5 @@
api.*.conf
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
@ -151,5 +153,3 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
api.conf

View file

@ -21,7 +21,7 @@
"${workspaceFolder}/advent22_api"
],
"env": {
"ADVENT22__WEBDAV__CACHE_TTL": "30"
"ADVENT22__REDIS__CACHE_TTL": "30"
},
"justMyCode": true
}

View file

@ -5,7 +5,7 @@ from fastapi import Depends
from pydantic import BaseModel
from .config import Config, get_config
from .dav.webdav import WebDAV
from .settings import WEBDAV
class DoorSaved(BaseModel):
@ -37,7 +37,7 @@ class CalendarConfig(BaseModel):
Kalender Konfiguration ändern
"""
await WebDAV.write_str(
await WEBDAV.write_str(
path=f"files/{cfg.calendar}",
content=tomli_w.dumps(self.model_dump()),
)
@ -50,5 +50,5 @@ async def get_calendar_config(
Kalender Konfiguration lesen
"""
txt = await WebDAV.read_str(path=f"files/{cfg.calendar}")
txt = await WEBDAV.read_str(path=f"files/{cfg.calendar}")
return CalendarConfig.model_validate(tomllib.loads(txt))

View file

@ -3,8 +3,7 @@ import tomllib
from markdown import markdown
from pydantic import BaseModel, ConfigDict, field_validator
from .dav.webdav import WebDAV
from .settings import SETTINGS, Credentials
from .settings import SETTINGS, WEBDAV, Credentials
from .transformed_string import TransformedString
@ -77,5 +76,5 @@ async def get_config() -> Config:
Globale Konfiguration lesen
"""
txt = await WebDAV.read_str(path=SETTINGS.webdav.config_filename)
txt = await WEBDAV.read_str(path=SETTINGS.webdav.config_filename)
return Config.model_validate(tomllib.loads(txt))

View file

@ -1,8 +1,8 @@
from itertools import chain
from json import JSONDecodeError
from typing import Callable, Hashable
from typing import Any, Callable
import requests
from cachetools.keys import hashkey
from CacheToolsUtils import RedisCache as __RedisCache
from redis.typing import EncodableT, ResponseT
from webdav3.client import Client as __WebDAVclient
@ -11,12 +11,18 @@ from webdav3.client import Client as __WebDAVclient
def davkey(
name: str,
slice: slice = slice(1, None),
) -> Callable[..., tuple[Hashable, ...]]:
def func(*args, **kwargs) -> tuple[Hashable, ...]:
) -> Callable[..., str]:
def func(*args: Any, **kwargs: Any) -> str:
"""Return a cache key for use with cached methods."""
key = hashkey(name, *args[slice], **kwargs)
return hashkey(*(str(key_item) for key_item in key))
call_args = chain(
# positional args
(f"{arg!r}" for arg in args[slice]),
# keyword args
(f"{k}:{v!r}" for k, v in kwargs.items()),
)
return f"{name}({', '.join(call_args)})"
return func

View file

@ -1,108 +1,128 @@
import logging
import re
from dataclasses import dataclass
from io import BytesIO
from typing import Any, Callable
from asyncify import asyncify
from cachetools import cachedmethod
from fastapi import BackgroundTasks
from redis import Redis
from ..settings import SETTINGS
from .helpers import RedisCache, WebDAVclient, davkey
from ...redis_cache import JobsQueue, RedisCache, cached
from .helpers import WebDAVclient
_logger = logging.getLogger(__name__)
@dataclass(kw_only=True, frozen=True, slots=True)
class Settings:
url: str
username: str = "johndoe"
password: str = "s3cr3t!"
class FastAPIQueue:
_tasks: BackgroundTasks
def enqueue(self, task: Callable, *args: Any, **kwargs: Any) -> None:
self._tasks.add_task(task, args=args, kwargs=kwargs)
class WebDAV:
_webdav_client = WebDAVclient(
{
"webdav_hostname": SETTINGS.webdav.url,
"webdav_login": SETTINGS.webdav.auth.username,
"webdav_password": SETTINGS.webdav.auth.password,
}
)
_webdav_client: WebDAVclient
_cache: RedisCache
_cache = RedisCache(
cache=Redis(
host=SETTINGS.redis.host,
port=SETTINGS.redis.port,
db=SETTINGS.redis.db,
protocol=SETTINGS.redis.protocol,
),
ttl=SETTINGS.webdav.cache_ttl,
)
def __init__(
self,
settings: Settings,
redis: Redis,
tasks: JobsQueue,
ttl_sec: int,
) -> None:
try:
self._webdav_client = WebDAVclient(
{
"webdav_hostname": settings.url,
"webdav_login": settings.username,
"webdav_password": settings.password,
}
)
assert self._webdav_client.check() is True
except AssertionError:
raise RuntimeError("WebDAV connection failed!")
self._cache = RedisCache(redis=redis, tasks=tasks, ttl_fresh=ttl_sec)
@classmethod
@asyncify
@cachedmethod(cache=lambda cls: cls._cache, key=davkey("list_files"))
def list_files(
cls,
directory: str = "",
*,
regex: re.Pattern[str] = re.compile(""),
) -> list[str]:
@cached(lambda self: self._cache)
def _list_files(self, directory: str = "") -> list[str]:
"""
List files in directory `directory` matching RegEx `regex`
"""
_logger.debug(f"list_files {directory!r}")
ls = cls._webdav_client.list(directory)
return self._webdav_client.list(directory)
async def list_files(
self,
directory: str = "",
*,
regex: re.Pattern[str] = re.compile(""),
) -> list[str]:
_logger.debug(f"list_files {directory!r} ({regex!r})")
ls = await self._list_files(directory)
return [path for path in ls if regex.search(path)]
@classmethod
@asyncify
@cachedmethod(cache=lambda cls: cls._cache, key=davkey("exists"))
def exists(cls, path: str) -> bool:
@cached(lambda self: self._cache)
def exists(self, path: str) -> bool:
"""
`True` iff there is a WebDAV resource at `path`
"""
_logger.debug(f"file_exists {path!r}")
return cls._webdav_client.check(path)
return self._webdav_client.check(path)
@classmethod
@asyncify
@cachedmethod(cache=lambda cls: cls._cache, key=davkey("read_bytes"))
def read_bytes(cls, path: str) -> bytes:
@cached(lambda self: self._cache)
def read_bytes(self, path: str) -> bytes:
"""
Load WebDAV file from `path` as bytes
"""
_logger.debug(f"read_bytes {path!r}")
buffer = BytesIO()
cls._webdav_client.download_from(buffer, path)
self._webdav_client.download_from(buffer, path)
buffer.seek(0)
return buffer.read()
@classmethod
async def read_str(cls, path: str, encoding="utf-8") -> str:
async def read_str(self, path: str, encoding="utf-8") -> str:
"""
Load WebDAV file from `path` as string
"""
_logger.debug(f"read_str {path!r}")
return (await cls.read_bytes(path)).decode(encoding=encoding).strip()
return (await self.read_bytes(path)).decode(encoding=encoding).strip()
@classmethod
@asyncify
def write_bytes(cls, path: str, buffer: bytes) -> None:
def write_bytes(self, path: str, buffer: bytes) -> None:
"""
Write bytes from `buffer` into WebDAV file at `path`
"""
_logger.debug(f"write_bytes {path!r}")
cls._webdav_client.upload_to(buffer, path)
self._webdav_client.upload_to(buffer, path)
# invalidate cache entry
# explicit slice as there is no "cls" argument
del cls._cache[davkey("read_bytes", slice(0, None))(path)]
# begin slice at 0 (there is no "self" argument)
# del self._cache[davkey("read_bytes", slice(0, None))(path)]
@classmethod
async def write_str(cls, path: str, content: str, encoding="utf-8") -> None:
async def write_str(self, path: str, content: str, encoding="utf-8") -> None:
"""
Write string from `content` into WebDAV file at `path`
"""
_logger.debug(f"write_str {path!r}")
await cls.write_bytes(path, content.encode(encoding=encoding))
await self.write_bytes(path, content.encode(encoding=encoding))

View file

@ -12,7 +12,6 @@ from PIL.ImageFont import FreeTypeFont
from .advent_image import _XY, AdventImage
from .calendar_config import CalendarConfig, get_calendar_config
from .config import Config, get_config
from .dav.webdav import WebDAV
from .helpers import (
RE_TTF,
EventDates,
@ -23,6 +22,7 @@ from .helpers import (
load_image,
set_len,
)
from .settings import WEBDAV
RE_NUM = re.compile(r"/(\d+)\.", flags=re.IGNORECASE)
@ -143,7 +143,7 @@ class TTFont:
@property
async def font(self) -> FreeTypeFont:
return ImageFont.truetype(
font=BytesIO(await WebDAV.read_bytes(self.file_name)),
font=BytesIO(await WEBDAV.read_bytes(self.file_name)),
size=100,
)

View file

@ -11,7 +11,7 @@ from PIL.Image import Image, Resampling
from pydantic import BaseModel
from .config import get_config
from .dav.webdav import WebDAV
from .settings import WEBDAV
T = TypeVar("T")
RE_IMG = re.compile(r"\.(gif|jpe?g|tiff?|png|bmp)$", flags=re.IGNORECASE)
@ -94,7 +94,7 @@ def list_helper(
async def _list_helper() -> list[str]:
return [
f"{directory}/{file}"
for file in await WebDAV.list_files(directory=directory, regex=regex)
for file in await WEBDAV.list_files(directory=directory, regex=regex)
]
return _list_helper
@ -110,10 +110,10 @@ async def load_image(file_name: str) -> Image:
Versuche, Bild aus Datei zu laden
"""
if not await WebDAV.exists(file_name):
if not await WEBDAV.exists(file_name):
raise RuntimeError(f"DAV-File {file_name} does not exist!")
return PILImage.open(BytesIO(await WebDAV.read_bytes(file_name)))
return PILImage.open(BytesIO(await WEBDAV.read_bytes(file_name)))
class ImageData(BaseModel):

View file

@ -1,5 +1,9 @@
from pydantic import BaseModel, Field
from pydantic_settings import BaseSettings, SettingsConfigDict
from redis import ConnectionError, Redis
from .dav.webdav import Settings as WebDAVSettings
from .dav.webdav import WebDAV
class Credentials(BaseModel):
@ -22,7 +26,6 @@ class DavSettings(BaseModel):
password="password",
)
cache_ttl: int = 60 * 10
config_filename: str = "config.toml"
@property
@ -39,10 +42,12 @@ class RedisSettings(BaseModel):
Connection to a redis server.
"""
cache_ttl: int = 60 * 10
host: str = "localhost"
port: int = 6379
db: int = 0
protocol: int = 3
protocol_version: int = 3
class Settings(BaseSettings):
@ -98,3 +103,26 @@ class Settings(BaseSettings):
SETTINGS = Settings()
try:
_REDIS = Redis(
host=SETTINGS.redis.host,
port=SETTINGS.redis.port,
db=SETTINGS.redis.db,
protocol=SETTINGS.redis.protocol_version,
)
_REDIS.ping()
except ConnectionError:
raise RuntimeError("Redis connection failed!")
WEBDAV = WebDAV(
WebDAVSettings(
url=SETTINGS.webdav.url,
username=SETTINGS.webdav.auth.username,
password=SETTINGS.webdav.auth.password,
),
_REDIS,
SETTINGS.redis.cache_ttl,
)

View file

@ -2,14 +2,14 @@ import os
from granian import Granian
from granian.constants import Interfaces, Loops
from pydantic import BaseModel, Field
from pydantic import BaseModel, PositiveInt
from pydantic_settings import BaseSettings, SettingsConfigDict
class WorkersSettings(BaseModel):
per_core: int = Field(1, ge=1)
max: int | None = Field(None, ge=1)
exact: int | None = Field(None, ge=1)
per_core: PositiveInt = 1
max: PositiveInt | None = None
exact: PositiveInt | None = None
@property
def count(self) -> int:

View file

@ -0,0 +1,7 @@
from .cached import JobsQueue, RedisCache, cached
__all__ = [
"JobsQueue",
"RedisCache",
"cached",
]

View file

@ -0,0 +1,186 @@
import functools
import json
import time
from dataclasses import dataclass
from typing import Any, Awaitable, Callable, Protocol
from redis import Redis
from .helpers import build_cache_key
class JobsQueue(Protocol):
def enqueue(self, task: Callable, *args: Any, **kwargs: Any) -> None: ...
@dataclass(frozen=True, kw_only=True, slots=True)
class RedisCache:
"""
Container for Redis-backed caching configuration.
Attributes:
redis: Redis client instance.
tasks: Background job queue used to refresh stale entries.
prefix: Optional key prefix (defaults to "cache").
ttl_fresh: TTL in seconds for the freshness marker (how long a value is considered fresh).
ttl_stale: TTL in seconds for the cached payload. If None, payload does not expire.
ttl_stale: TTL in seconds for the stampede protection lock.
"""
redis: Redis
tasks: JobsQueue
prefix: str | None = "cache"
ttl_fresh: int = 600
ttl_stale: int | None = None
ttl_lock: int = 5
@dataclass(frozen=True, kw_only=True, slots=True)
class CachedValue[T]:
"""Wrapper for cached content plus freshness flag."""
content: T
fresh: bool
def cached(cache: RedisCache):
"""
Decorator factory to cache function results in Redis with a freshness marker.
On miss, uses a short-lock (SETNX) to ensure only one process recomputes.
When value is stale, returns the stale value and queues a background recompute.
Accepts either:
- a RedisCache instance, or
- a callable taking the instance (self) and returning RedisCache.
If a callable is given, the cache is resolved at call-time using the method's `self`.
"""
def decorator[T](func: Callable[..., T]) -> Callable[..., CachedValue[T]]:
# Keys used in Redis:
# - "<key>:val" -> JSON-serialized value
# - "<key>:fresh" -> existence means fresh (string "1"), TTL = ttl_fresh
# - "<key>:lock" -> short-lived lock to prevent stampede
def _redis_val_key(k: str) -> str:
return f"{k}:val"
def _redis_fresh_key(k: str) -> str:
return f"{k}:fresh"
def _redis_lock_key(k: str) -> str:
return f"{k}:lock"
def _serialize(v: Any) -> str:
return json.dumps(v, default=str)
def _deserialize(s: bytes | str | None) -> Any:
if s is None:
return None
return json.loads(s)
def recompute_value(*args: Any, **kwargs: Any) -> None:
"""
Recompute the function result and store in Redis.
This function is intentionally designed to be enqueued into a background job queue.
"""
# Compute outside of any Redis lock to avoid holding locks during heavy computation.
result = func(*args, **kwargs)
full_key = build_cache_key(func, cache.prefix, *args, **kwargs)
val_key = _redis_val_key(full_key)
fresh_key = _redis_fresh_key(full_key)
# Store payload (with optional ttl_stale)
payload = _serialize(result)
if cache.ttl_stale is None:
# No expiry for payload
cache.redis.set(val_key, payload)
else:
cache.redis.setex(val_key, cache.ttl_stale, payload)
# Create freshness marker with ttl_fresh
cache.redis.setex(fresh_key, cache.ttl_fresh, "1")
# Ensure lock removed if present (best-effort)
try:
cache.redis.delete(_redis_lock_key(full_key))
except Exception:
# swallow: background job should not crash for Redis delete errors
pass
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> CachedValue[T]:
"""
Attempt to return cached value.
If missing entirely, try to acquire a short lock and recompute synchronously;
otherwise wait briefly for the recompute or return a fallback (None).
If present but stale, return the stale value and enqueue background refresh.
"""
full_key = build_cache_key(func, cache.prefix, *args, **kwargs)
val_key = _redis_val_key(full_key)
fresh_key = _redis_fresh_key(full_key)
lock_key = _redis_lock_key(full_key)
# Try to get payload and freshness marker
raw_payload = cache.redis.get(val_key)
is_fresh = cache.redis.exists(fresh_key) == 1
if raw_payload is None:
# Cache miss. Try to acquire lock to recompute synchronously.
if cache.redis.setnx(lock_key, "1") == 1:
# Ensure lock TTL so it doesn't persist forever
cache.redis.expire(lock_key, cache.ttl_lock)
try:
# Recompute synchronously and store
value = func(*args, **kwargs)
payload = _serialize(value)
if cache.ttl_stale is not None:
cache.redis.setex(val_key, cache.ttl_stale, payload)
else:
cache.redis.set(val_key, payload)
cache.redis.setex(fresh_key, cache.ttl_fresh, "1")
return CachedValue(content=value, fresh=True)
finally:
# Release lock (best-effort)
try:
cache.redis.delete(lock_key)
except Exception:
pass
else:
# Another process is recomputing. Wait briefly for it to finish.
# Do not wait indefinitely; poll a couple times with small backoff.
wait_deadline = time.time() + cache.ttl_lock
while time.time() < wait_deadline:
time.sleep(0.05)
raw_payload = cache.redis.get(val_key)
if raw_payload is not None:
break
if raw_payload is None:
# Still missing after waiting: compute synchronously as fallback to avoid returning None.
value = func(*args, **kwargs)
payload = _serialize(value)
if cache.ttl_stale is None:
cache.redis.set(val_key, payload)
else:
cache.redis.setex(val_key, cache.ttl_stale, payload)
cache.redis.setex(fresh_key, cache.ttl_fresh, "1")
return CachedValue(content=value, fresh=True)
# If we reach here, raw_payload is present (either from the start or after waiting)
assert not isinstance(raw_payload, Awaitable)
deserialized = _deserialize(raw_payload)
# If fresh marker missing => stale
if not is_fresh:
# Schedule background refresh; do not block caller.
cache.tasks.enqueue(recompute_value, *args, **kwargs)
return CachedValue(content=deserialized, fresh=is_fresh)
return wrapper
return decorator

View file

@ -0,0 +1,63 @@
import inspect
import json
from typing import Any, Callable
def stable_repr(val: Any) -> str:
"""Stable JSON representation for cache key components."""
return json.dumps(val, sort_keys=True, default=str)
def get_canonical_name(item: Any) -> str:
"""Return canonical module.qualname for functions / callables."""
module = getattr(
item,
"__module__",
item.__class__.__module__,
)
qualname = getattr(
item,
"__qualname__",
getattr(item, "__name__", item.__class__.__name__),
)
return f"{module}.{qualname}"
def build_cache_key(
func: Callable,
prefix: str | None,
*args: Any,
**kwargs: Any,
) -> str:
"""
Build a deterministic cache key for func called with args/kwargs.
For bound methods, skips the first parameter if it's named 'self' or 'cls'.
"""
sig = inspect.signature(func)
bound = sig.bind_partial(*args, **kwargs)
bound.apply_defaults()
params = list(sig.parameters.values())
arguments = list(bound.arguments.items())
# Detect methods: if first parameter name is 'self' or 'cls' and it's provided in bound args,
# skip it when building the key.
if params:
first_name = params[0].name
if first_name in ("self", "cls") and first_name in bound.arguments:
arguments = arguments[1:]
arguments_fmt = [
f"{name}={stable_repr(val)}"
for name, val in sorted(arguments, key=lambda kv: kv[0])
]
key_parts = [
get_canonical_name(func),
*arguments_fmt,
]
if prefix is not None:
key_parts = [prefix] + key_parts
return ":".join(key_parts)

View file

@ -1,6 +1,7 @@
from datetime import date
from enum import Enum
from fastapi import APIRouter, Depends
from fastapi import APIRouter, Depends, HTTPException, status
from pydantic import BaseModel
from advent22_api.core.helpers import EventDates
@ -59,7 +60,6 @@ class AdminConfigModel(BaseModel):
class __WebDAV(BaseModel):
url: str
cache_ttl: int
config_file: str
solution: __Solution
@ -113,7 +113,7 @@ async def get_config_model(
"redis": SETTINGS.redis,
"webdav": {
"url": SETTINGS.webdav.url,
"cache_ttl": SETTINGS.webdav.cache_ttl,
"cache_ttl": SETTINGS.redis.cache_ttl,
"config_file": SETTINGS.webdav.config_filename,
},
}
@ -174,16 +174,21 @@ async def put_doors(
await cal_cfg.change(cfg)
class CredentialsName(str, Enum):
DAV = "dav"
UI = "ui"
@router.get("/credentials/{name}")
async def get_credentials(
name: str,
name: CredentialsName,
_: None = Depends(require_admin),
cfg: Config = Depends(get_config),
) -> Credentials:
if name == "dav":
if name == CredentialsName.DAV:
return SETTINGS.webdav.auth
elif name == "ui":
elif name == CredentialsName.UI:
return cfg.admin
else:
return Credentials()
raise HTTPException(status.HTTP_400_BAD_REQUEST)

5
api/api.conf Normal file
View file

@ -0,0 +1,5 @@
ADVENT22__WEBDAV__PROTOCOL="http"
ADVENT22__WEBDAV__HOST="webdav"
ADVENT22__WEBDAV__PATH=""
ADVENT22__WEBDAV__AUTH__USERNAME="wduser"
ADVENT22__WEBDAV__AUTH__PASSWORD="53cr3t!"

View file

@ -36,7 +36,7 @@
"animate.css": "^4.1.1",
"axios": "^1.13.5",
"bulma": "^1.0.4",
"bulma-toast": "2.4.3",
"bulma-toast": "2.4.4",
"eslint": "^10.0.1",
"eslint-config-prettier": "^10.1.8",
"eslint-plugin-oxlint": "~1.49.0",

View file

@ -140,9 +140,6 @@
</BulmaSecret>
</dd>
<dt>Cache-Dauer</dt>
<dd>{{ admin_config_model.webdav.cache_ttl }} s</dd>
<dt>Konfigurationsdatei</dt>
<dd>{{ admin_config_model.webdav.config_file }}</dd>
</dl>
@ -152,10 +149,11 @@
<h3>Sonstige</h3>
<dl>
<dt>Redis</dt>
<dd>Cache-Dauer: {{ admin_config_model.redis.cache_ttl }} s</dd>
<dd>Host: {{ admin_config_model.redis.host }}</dd>
<dd>Port: {{ admin_config_model.redis.port }}</dd>
<dd>Datenbank: {{ admin_config_model.redis.db }}</dd>
<dd>Protokoll: {{ admin_config_model.redis.protocol }}</dd>
<dd>Protokoll: {{ admin_config_model.redis.protocol_version }}</dd>
<dt>UI-Admin</dt>
<dd class="is-family-monospace">
@ -219,14 +217,14 @@ const admin_config_model = ref<AdminConfigModel>({
},
fonts: [{ file: "consetetur", size: 0 }],
redis: {
cache_ttl: 0,
host: "0.0.0.0",
port: 6379,
db: 0,
protocol: 3,
protocol_version: 3,
},
webdav: {
url: "sadipscing elitr",
cache_ttl: 0,
config_file: "sed diam nonumy",
},
});

View file

@ -26,14 +26,14 @@ export interface AdminConfigModel {
};
fonts: { file: string; size: number }[];
redis: {
cache_ttl: number;
host: string;
port: number;
db: number;
protocol: number;
protocol_version: number;
};
webdav: {
url: string;
cache_ttl: number;
config_file: string;
};
}

View file

@ -17,6 +17,7 @@ advent22Store().init();
app.mount("#app");
toast_set_defaults({
message: "",
duration: 10e3,
pauseOnHover: true,
dismissible: true,

View file

@ -2075,7 +2075,7 @@ __metadata:
animate.css: "npm:^4.1.1"
axios: "npm:^1.13.5"
bulma: "npm:^1.0.4"
bulma-toast: "npm:2.4.3"
bulma-toast: "npm:2.4.4"
eslint: "npm:^10.0.1"
eslint-config-prettier: "npm:^10.1.8"
eslint-plugin-oxlint: "npm:~1.49.0"
@ -2311,10 +2311,10 @@ __metadata:
languageName: node
linkType: hard
"bulma-toast@npm:2.4.3":
version: 2.4.3
resolution: "bulma-toast@npm:2.4.3"
checksum: 10c0/40dd9668643338496eb28caca9b772a6002d9c6fbdc5d76237cbdaaa8f56c8ced39965705f92f5d5a29f3e6df57f70a8fa311cf05c14075aacf93f96f7338470
"bulma-toast@npm:2.4.4":
version: 2.4.4
resolution: "bulma-toast@npm:2.4.4"
checksum: 10c0/ccb36b5c632585e9e5bca4b7da7fa5f5e0e87da6244cca580bbb95fc8f3d0dc78d8b279fe0dfe024818baa1c47c2139e50d052447b130fb525ae5ffdb297acfd
languageName: node
linkType: hard