Significantly overhaul configuration management
Everything now uses bog-standard Python dataclasses, with Pydantic providing validation and type conversion through separate classes using its type adapter feature. It's also possible to define your classes using Pydantic's own model type directly, making the type adapter unnecessary, but I didn't want to do things that way because no actual validation is needed when constructing a Song instance for example. Having Pydantic do its thing only on-demand was preferable. I tried a number of validation libraries before settling on Pydantic for this. It's not the fastest option out there (msgspec is I think), but it makes adding support for third-party types like yarl.URL really easy, it generates a nice clean JSON Schema which is easy enough to adjust to my requirements through its GenerateJsonSchema hooks, and raw speed isn't all that important anyway since this is a single-user desktop program that reads its configuration file once on startup. Also, MessagePack is now mandatory if you're caching to an external service. It just didn't make a whole lot sense to explicitly install mpd-now-playable's Redis or Memcached support and then use pickling with them. With all this fussing around done, I'm probably finally ready to actually use that configuration file to configure new features! Yay!
This commit is contained in:
parent
3b7ddfa718
commit
27d8c37139
18 changed files with 355 additions and 169 deletions
|
|
@ -1,37 +1,41 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
from typing import Any, Optional, TypeVar
|
||||
from typing import Any, Generic, Optional, TypeVar
|
||||
|
||||
from aiocache import Cache
|
||||
from aiocache.serializers import BaseSerializer, PickleSerializer
|
||||
from aiocache.serializers import BaseSerializer
|
||||
from pydantic.type_adapter import TypeAdapter
|
||||
from yarl import URL
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
HAS_ORMSGPACK = False
|
||||
with suppress(ImportError):
|
||||
import ormsgpack
|
||||
|
||||
HAS_ORMSGPACK = True
|
||||
|
||||
|
||||
class OrmsgpackSerializer(BaseSerializer):
|
||||
class OrmsgpackSerializer(BaseSerializer, Generic[T]):
|
||||
DEFAULT_ENCODING = None
|
||||
|
||||
def dumps(self, value: object) -> bytes:
|
||||
return ormsgpack.packb(value)
|
||||
def __init__(self, schema: TypeAdapter[T]):
|
||||
super().__init__()
|
||||
self.schema = schema
|
||||
|
||||
def loads(self, value: Optional[bytes]) -> object:
|
||||
def dumps(self, value: T) -> bytes:
|
||||
return ormsgpack.packb(self.schema.dump_python(value))
|
||||
|
||||
def loads(self, value: Optional[bytes]) -> T | None:
|
||||
if value is None:
|
||||
return None
|
||||
return ormsgpack.unpackb(value)
|
||||
data = ormsgpack.unpackb(value)
|
||||
return self.schema.validate_python(data)
|
||||
|
||||
|
||||
def make_cache(url: URL, namespace: str = "") -> Cache[T]:
|
||||
def make_cache(schema: TypeAdapter[T], url: URL, namespace: str = "") -> Cache[T]:
|
||||
backend = Cache.get_scheme_class(url.scheme)
|
||||
if backend == Cache.MEMORY:
|
||||
return Cache(backend)
|
||||
|
||||
kwargs: dict[str, Any] = dict(url.query)
|
||||
|
||||
if url.path:
|
||||
|
|
@ -48,6 +52,6 @@ def make_cache(url: URL, namespace: str = "") -> Cache[T]:
|
|||
|
||||
namespace = ":".join(s for s in [kwargs.pop("namespace", ""), namespace] if s)
|
||||
|
||||
serializer = OrmsgpackSerializer if HAS_ORMSGPACK else PickleSerializer
|
||||
serializer = OrmsgpackSerializer(schema)
|
||||
|
||||
return Cache(backend, serializer=serializer(), namespace=namespace, **kwargs)
|
||||
return Cache(backend, serializer=serializer, namespace=namespace, **kwargs)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue