Compare commits

...

2 Commits

Author SHA1 Message Date
artem 9f5071a7b8 start add refactoring with DB
Gitea Actions Demo / build_and_push (push) Failing after 1m47s Details
2025-12-14 10:12:46 +03:00
artem c3958ecdc3 start add refactoring with DB 2025-12-14 10:12:25 +03:00
287 changed files with 1558 additions and 149 deletions

View File

@ -21,6 +21,12 @@ dependencies = [
"torchvision>=0.24.1",
"types-requests>=2.32.4.20250913",
"types-markdown>=3.10.0.20251106",
"sqlalchemy>=2.0.44",
"inject>=5.3.0",
"aiofiles>=25.1.0",
"botocore>=1.42.9",
"types-aiofiles>=25.1.0.20251011",
"betterconf>=4.5.0",
]
[project.optional-dependencies]
@ -49,7 +55,3 @@ name = "pytorch-cpu"
url = "https://download.pytorch.org/whl/cpu"
explicit = true
[tool.uv.workspace]
members = [
"kivy",
]

41
server/config/__init__.py Normal file
View File

@ -0,0 +1,41 @@
"""Config for application"""
from functools import lru_cache
from betterconf import betterconf, field
from betterconf.caster import to_bool, to_int, to_list
@betterconf
class AppConfig:
# pylint: disable=R0903
"""
Class of configuration the application
"""
app_debug: bool = field("APP_DEBUG", default=False, caster=to_bool)
app_origin: list = field("APP_ORIGIN", default=[], caster=to_list)
app_host: str = field("APP_HOST", default="0.0.0.0")
app_port: int = field("APP_PORT", default=8000, caster=to_int)
app_public_url: str = field("APP_PUBLIC_URL", default="http://127.0.0.1:8000")
sentry_dns: str = field("SENTRY_DNS", default="")
log_level: str = field("LOG_LEVEL", "INFO")
db_uri: str = field("DB_URI", "postgresql+asyncpg://svcuser:svcpass@localhost:5432/svc")
db_pass_salt: str = field("DB_PASS_SALT", "")
db_search_path: str = field("DB_SEARCH_PATH", "public")
fs_local_mount_dir: str = field("FS_LOCAL_MOUNT_DIR", default="./tmp/files")
fs_s3_bucket: str = field("FS_S3_BUCKET", "")
fs_s3_access_key_id: str = field("FS_ACCESS_KEY_ID", "")
fs_s3_access_key: str = field("FS_SECRET_ACCESS_KEY", "")
fs_s3_endpoint: str = field("FS_S3_ENDPOINT", "")
@lru_cache
def get_app_config() -> AppConfig:
# pylint: disable=C0116
return AppConfig()

33
server/infra/cache.py Normal file
View File

@ -0,0 +1,33 @@
from abc import ABCMeta, abstractmethod
from typing import Optional
class CacheRepository(metaclass=ABCMeta):
@abstractmethod
async def get(self, key: str) -> Optional[str]:
pass
@abstractmethod
async def set(self, key: str, data: str, _exp_min: Optional[int] = None):
pass
@abstractmethod
async def delete(self, key: str):
pass
# TODO: сделать через общий кеш, например, redis. Работу с редис вынести в infra
class LocalCacheRepository(CacheRepository):
_data: dict[str, str]
def __init__(self) -> None:
self._data = {}
async def get(self, key: str) -> Optional[str]:
return self._data.get(key)
async def set(self, key: str, data: str, _exp_min: Optional[int] = None):
self._data[key] = data
async def delete(self, key: str):
del self._data[key]

View File

@ -0,0 +1,5 @@
from server.infra.db.abc import AbstractDB, AbstractSession, ExecuteFun
from server.infra.db.mock import MockDB, MockSession
from server.infra.db.pg import AsyncDB
__all__ = ["AsyncDB", "AbstractDB", "ExecuteFun", "AbstractSession", "MockDB", "MockSession"]

41
server/infra/db/abc.py Normal file
View File

@ -0,0 +1,41 @@
"""Abstract realiztion for DB"""
from typing import Any, AsyncContextManager, Awaitable, Callable, TypeAlias
from server.config import AppConfig
ExecuteFun: TypeAlias = Callable[[Any], Awaitable[None]]
class ConnectError(Exception):
"""Custom error wor failed connections"""
class AbstractSession:
async def __aenter__(self) -> "AbstractSession":
raise NotImplementedError
async def __aexit__(self, exc_type, exc, tb):
raise NotImplementedError
def begin(self) -> AsyncContextManager:
raise NotImplementedError
async def execute(self, data: Any):
raise NotImplementedError
async def commit(self):
raise NotImplementedError
class AbstractDB:
"""Abstract realiztion for DB"""
def __init__(self, cnf: AppConfig):
raise NotImplementedError
def session_master(self) -> AbstractSession:
raise NotImplementedError
def session_slave(self) -> AbstractSession:
raise NotImplementedError

View File

@ -0,0 +1,11 @@
from typing import Type, TypeVar
from sqlalchemy.orm import registry
mapper_registry = registry()
DC = TypeVar("DC")
def dict_to_dataclass(data: dict, class_type: Type[DC]) -> DC:
return class_type(**data)

45
server/infra/db/mock.py Normal file
View File

@ -0,0 +1,45 @@
from typing import Any, AsyncContextManager
from server.config import AppConfig
from server.infra.db.abc import AbstractDB, AbstractSession
class MockBeginSession(AbstractSession):
async def __aenter__(self):
pass
async def __aexit__(self, exc_type, exc, tb):
pass
class MockSession(AbstractSession):
async def __aenter__(self) -> "AbstractSession":
return self
async def __aexit__(self, exc_type, exc, tb):
pass
def begin(self) -> AsyncContextManager:
return MockBeginSession()
async def execute(self, data: Any):
pass
async def commit(self):
pass
class MockDB(AbstractDB):
"""Mock realiztion for DB"""
def __init__(self, cnf: AppConfig):
pass
async def __aenter__(self):
pass
async def __aenxit__(self, exc_type, exc, tb):
pass
def session(self) -> MockSession:
return MockSession()

38
server/infra/db/pg.py Normal file
View File

@ -0,0 +1,38 @@
"""Postgres DB realization"""
from sqlalchemy.ext import asyncio
from server.config import AppConfig
from server.infra.db.abc import AbstractDB, ConnectError
class AsyncDB(AbstractDB):
engine: asyncio.AsyncEngine
async_session: asyncio.async_sessionmaker[asyncio.AsyncSession]
def __init__(self, cnf: AppConfig):
con_arg = {}
if "postgresql+asyncpg" in str(cnf.db_uri):
con_arg = {"server_settings": {"search_path": cnf.db_search_path}}
self.engine = asyncio.create_async_engine(
str(cnf.db_uri), echo=bool(cnf.app_debug), connect_args=con_arg, pool_recycle=1800
)
# self.engine.execution_options(stream_results=True)
if self.engine is None:
raise ConnectError
session = asyncio.async_sessionmaker(self.engine, expire_on_commit=False)
if session is None:
raise ConnectError
self.async_session = session
async def connect(self):
"""connect with DB"""
pass
@property
def session(self):
return asyncio.async_sessionmaker(self.engine, expire_on_commit=False)
def new_session(self):
return asyncio.async_sessionmaker(self.engine, expire_on_commit=False)()

View File

@ -0,0 +1,64 @@
"""The logger configuration"""
import logging
from logging import config as log_config
from server import config
cnf = config.get_app_config()
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": True,
"formatters": {
"standard": {
"use_colors": True,
"format": "%(filename)s:%(lineno)d -> %(asctime)s [%(levelname)s]: %(message)s",
},
"uvicorn_default": {
"()": "uvicorn.logging.DefaultFormatter",
"format": "%(levelprefix)s %(message)s",
"use_colors": True,
},
"uvicorn_access": {
"()": "uvicorn.logging.AccessFormatter",
"format": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s', # noqa: E501
},
},
"handlers": {
"default": {
"level": cnf.log_level,
"formatter": "standard",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
"sentry": {
"level": "ERROR",
"class": "sentry_sdk.integrations.logging.EventHandler",
},
"uvicorn_default": {
"level": "INFO",
"formatter": "uvicorn_default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
"uvicorn_access": {
"level": "INFO",
"formatter": "uvicorn_access",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
},
"loggers": {
"": {"handlers": ["default"], "level": cnf.log_level, "propagate": False},
"uvicorn.access": {"handlers": ["uvicorn_access"], "level": "INFO", "propagate": False},
"uvicorn.error": {"handlers": ["uvicorn_default"], "level": "INFO", "propagate": False},
"uvicorn.asgi": {"handlers": ["uvicorn_default"], "level": "INFO", "propagate": False},
},
}
def get_logger() -> logging.Logger:
log_config.dictConfig(LOGGING_CONFIG)
return logging.getLogger()

View File

@ -0,0 +1,21 @@
from server.infra.tools.db.filter import (
Filter,
FilterLeftField,
FilterQuery,
FilterSign,
data_by_filter,
indexes_by_id,
sqlalchemy_conditions,
sqlalchemy_restrictions,
)
__all__ = [
"FilterSign",
"FilterLeftField",
"Filter",
"FilterQuery",
"data_by_filter",
"sqlalchemy_conditions",
"indexes_by_id",
"sqlalchemy_restrictions",
]

View File

@ -0,0 +1,285 @@
from copy import copy
from dataclasses import Field, asdict, dataclass
from enum import Enum
from typing import Any, ClassVar, Optional, Protocol, assert_never
from sqlalchemy import Select, and_, or_
class FilterSign(Enum):
NOT_EQ = "not_eq"
EQ = "eq"
LT = "lt"
LE = "le"
GT = "gt"
GE = "ge"
IS = "is"
IS_NOT = "is_not"
OR = "or"
AND = "and"
IN = "in"
class FilterLeftField(Protocol):
name: str
@dataclass(frozen=True)
class Filter:
right: Any
sign: FilterSign
left: Optional[FilterLeftField] = None
@staticmethod
def not_eq(f1: Any, f2: Any):
return Filter(left=f1, right=f2, sign=FilterSign.NOT_EQ)
@staticmethod
def eq(f1: Any, f2: Any):
return Filter(left=f1, right=f2, sign=FilterSign.EQ)
@staticmethod
def lt(f1: Any, f2: Any):
return Filter(left=f1, right=f2, sign=FilterSign.LT)
@staticmethod
def le(f1: Any, f2: Any):
return Filter(left=f1, right=f2, sign=FilterSign.LE)
@staticmethod
def gt(f1: Any, f2: Any):
return Filter(left=f1, right=f2, sign=FilterSign.GT)
@staticmethod
def ge(f1: Any, f2: Any):
return Filter(left=f1, right=f2, sign=FilterSign.GE)
@staticmethod
def is_none(f1: Any):
return Filter(left=f1, right=None, sign=FilterSign.IS_NOT)
@staticmethod
def is_not_none(f1: Any):
return Filter(left=f1, right=None, sign=FilterSign.IS)
@staticmethod
def or_(f1: list["Filter"]):
return Filter(left=None, right=f1, sign=FilterSign.OR)
@staticmethod
def in_(f1: Any, f2: list[Any]):
return Filter(left=f1, right=f2, sign=FilterSign.IN)
@dataclass
class RestrictionField:
field: Any
direction: str
@dataclass
class QueryRestriction:
filters: Optional[list[Filter]] = None
limit: Optional[int] = None
offset: Optional[int] = None
sort: Optional[list[RestrictionField]] = None
@dataclass(frozen=False)
class FilterQuery:
filters: list[Filter]
limit: Optional[int] = None
offset: Optional[int] = None
sort: Optional[list[RestrictionField]] = None
@staticmethod
def mass_and(fields: list[object], values: list[Any]) -> "FilterQuery":
return FilterQuery(filters=[Filter.eq(field, val) for field, val in zip(fields, values)])
@staticmethod
def mass_or(fields: list[object], values: list[Any]) -> "FilterQuery":
return FilterQuery(filters=[Filter.or_([Filter.eq(field, val) for field, val in zip(fields, values)])])
@staticmethod
def eq(field: object, value: Any) -> "FilterQuery":
return FilterQuery(filters=[Filter.eq(field, value)])
@staticmethod
def in_(field: object, value: list[Any]) -> "FilterQuery":
return FilterQuery(filters=[Filter.in_(field, value)])
def add_and(self, field: object, value: Any):
self.filters.append(Filter.eq(field, value))
def add_query_restistions(self, q_restriction: Optional[QueryRestriction] = None):
if not q_restriction:
return None
if q_restriction.limit:
self.limit = q_restriction.limit
if q_restriction.offset:
self.offset = q_restriction.offset
if q_restriction.sort:
self.sort = q_restriction.sort
if q_restriction.filters:
self.filters += q_restriction.filters
return None
class DataclassInstance(Protocol):
__dataclass_fields__: ClassVar[dict[str, Field[Any]]]
async def indexes_by_id(input_data: list, values: list[str], id_name="id") -> Optional[list[int]]:
r_data: list[int] = []
for i, _ in enumerate(input_data):
if getattr(input_data[i], id_name) in values:
r_data.append(i)
if not r_data:
return None
return r_data
def data_by_filter[T: DataclassInstance](input_data: list[T], q: FilterQuery) -> list[T]:
# can't do query AND(OR() + AND())
data: list[T] = []
data_or: list[T] = []
data_and: list[T] = []
is_found = False
for d in input_data:
dict_class = asdict(d)
for f in q.filters:
if f.sign == FilterSign.OR:
for f_or in f.right:
data_or += data_by_filter(input_data, q=FilterQuery(filters=[f_or]))
continue
if f.sign == FilterSign.AND:
data_and += data_by_filter(input_data, q=FilterQuery(filters=f.right))
continue
if f.left is None:
continue
if f.sign != FilterSign.IS and dict_class.get(f.left.name) is None:
break
if f.sign != FilterSign.IS_NOT and dict_class.get(f.left.name) is None:
break
match f.sign:
case FilterSign.NOT_EQ:
is_found = dict_class.get(f.left.name) != f.right
if is_found:
continue
break
case FilterSign.EQ:
is_found = dict_class.get(f.left.name) == f.right
if is_found:
continue
break
case FilterSign.LT:
is_found = dict_class.get(f.left.name) < f.right
if is_found:
continue
break
case FilterSign.LE:
is_found = dict_class.get(f.left.name) <= f.right
if is_found:
continue
break
case FilterSign.GT:
is_found = dict_class.get(f.left.name) > f.right
if is_found:
continue
break
case FilterSign.GE:
is_found = dict_class.get(f.left.name) >= f.right
if is_found:
continue
break
case FilterSign.IS:
is_found = dict_class.get(f.left.name) is None
if is_found:
continue
break
case FilterSign.IS_NOT:
is_found = dict_class.get(f.left.name) is not None
if is_found:
continue
break
case FilterSign.IN:
is_found = dict_class.get(f.left.name) in f.right
if is_found:
continue
break
case _ as arg:
assert_never(arg)
if is_found:
data.append(copy(d))
if q.limit:
limit = q.limit
if limit > len(data) - 1:
limit = len(data) - 1
return data[:limit]
if data_and:
if not data:
data = data_and
else:
data = list(set(data) & set(data_and))
if data_or:
if not data:
data = data_or
else:
data = list(set(data) & set(data_or))
return data
def sqlalchemy_conditions(q: FilterQuery):
conditions = []
for f in q.filters:
if f.sign == FilterSign.OR:
conditions.append(or_(*sqlalchemy_conditions(q=FilterQuery(filters=f.right))))
continue
if f.sign == FilterSign.AND:
conditions.append(and_(*sqlalchemy_conditions(q=FilterQuery(filters=f.right))))
continue
if f.left is None:
continue
match f.sign:
case FilterSign.NOT_EQ:
conditions.append(f.left != f.right)
case FilterSign.EQ:
conditions.append(f.left == f.right)
case FilterSign.LT:
conditions.append(f.left < f.right)
case FilterSign.LE:
if f.left is None:
continue
conditions.append(f.left <= f.right)
case FilterSign.GT:
conditions.append(f.left > f.right)
case FilterSign.GE:
conditions.append(f.left >= f.right)
case FilterSign.IS:
conditions.append(f.left.is_(None)) # type: ignore
case FilterSign.IS_NOT:
conditions.append(f.left.is_not(None)) # type: ignore
case FilterSign.IN:
conditions.append(f.left.in_(f.right)) # type: ignore
case _ as arg:
assert_never(arg)
return conditions
def sqlalchemy_restrictions(f: FilterQuery, q: Select, dict_to_sort: Optional[dict] = None) -> Select:
if f.limit:
q = q.limit(f.limit)
if f.offset:
q = q.offset(f.offset)
if f.sort:
for s in f.sort:
field = s.field
if dict_to_sort and str(s.field) in dict_to_sort:
if s.direction == "desc":
q = q.order_by(dict_to_sort[str(s.field)].desc()) # type: ignore
continue
return q.order_by(dict_to_sort[str(s.field)]) # type: ignore
if s.direction == "desc":
field = s.field.desc() # type: ignore
q = q.order_by(field) # type: ignore
return q

View File

@ -0,0 +1,5 @@
from server.infra.web.description import DescriptionController
from server.infra.web.seo import SeoController
from server.infra.web.recognizer import BreedsController
__all__ = ("DescriptionController", "SeoController", "BreedsController")

View File

@ -0,0 +1,70 @@
import asyncio
from pathlib import Path
import os
import inject
import markdown
from litestar import (
Controller,
get,
post,
MediaType,
Litestar,
)
from litestar.enums import RequestEncodingType
from litestar.datastructures import UploadFile
from litestar.params import Body
from litestar.exceptions import HTTPException
from litestar.contrib.jinja import JinjaTemplateEngine
from litestar.template.config import TemplateConfig
from litestar.response import Template
from litestar.static_files import create_static_files_router
from server.infra import logger
from server.config import get_app_config
from server.infra.cache import LocalCacheRepository
from server.infra.db import AsyncDB
from server.modules.descriptions import CharactersService, Breed, CharactersRepository
from server.modules.recognizer import RecognizerService, RecognizerRepository
class DescriptionController(Controller):
path = "/"
@get("/")
async def dogs(self) -> Template:
return Template(template_name="dogs.html")
@get("/cats")
async def cats(self) -> Template:
return Template(template_name="cats.html")
@get("/contacts")
async def contacts(self) -> Template:
return Template(template_name="contacts.html")
@get("/donate")
async def donate(self) -> Template:
return Template(template_name="donate.html")
@get("/dogs-characteristics")
async def dogs_characteristics(self) -> Template:
breeds = await characters_service.get_characters()
return Template(
template_name="dogs-characteristics.html", context={"breeds": breeds}
)
@get("/dogs-characteristics/{name:str}")
async def beer_description(self, name: str) -> Template:
breed = await characters_service.get_character(name)
if breed is None:
raise HTTPException(status_code=404, detail="Порода не найдена")
images = await recognizer_service.images_dogs()
return Template(
template_name="beers-description.html",
context={
"text": markdown.markdown(breed.description),
"title": breed.name,
"images": [f"/static/assets/dog/{name}/{i}" for i in images[name]],
},
)

View File

@ -0,0 +1,45 @@
import asyncio
from pathlib import Path
import os
import inject
import markdown
from litestar import (
Controller,
get,
post,
MediaType,
Litestar,
)
from litestar.enums import RequestEncodingType
from litestar.datastructures import UploadFile
from litestar.params import Body
from litestar.exceptions import HTTPException
from litestar.contrib.jinja import JinjaTemplateEngine
from litestar.template.config import TemplateConfig
from litestar.response import Template
from litestar.static_files import create_static_files_router
from server.infra import logger
from server.config import get_app_config
from server.infra.cache import LocalCacheRepository
from server.infra.db import AsyncDB
from server.modules.descriptions import CharactersService, Breed, CharactersRepository
from server.modules.recognizer import RecognizerService, RecognizerRepository
class BreedsController(Controller):
path = "/beerds"
@post("/dogs")
async def beerds_dogs(
self, data: UploadFile = Body(media_type=RequestEncodingType.MULTI_PART)
) -> dict:
body = await data.read()
return await recognizer_service.predict_dog_image(body)
@post("/cats")
async def beerds_cats(
self, data: UploadFile = Body(media_type=RequestEncodingType.MULTI_PART)
) -> dict:
body = await data.read()
return await recognizer_service.predict_cat_image(body)

80
server/infra/web/seo.py Normal file
View File

@ -0,0 +1,80 @@
import asyncio
from pathlib import Path
import os
import inject
import markdown
from litestar import (
Controller,
get,
post,
MediaType,
Litestar,
)
from litestar.enums import RequestEncodingType
from litestar.datastructures import UploadFile
from litestar.params import Body
from litestar.exceptions import HTTPException
from litestar.contrib.jinja import JinjaTemplateEngine
from litestar.template.config import TemplateConfig
from litestar.response import Template
from litestar.static_files import create_static_files_router
from server.infra import logger
from server.config import get_app_config
from server.infra.cache import LocalCacheRepository
from server.infra.db import AsyncDB
from server.modules.descriptions import CharactersService, Breed, CharactersRepository
from server.modules.recognizer import RecognizerService, RecognizerRepository
class SeoController(Controller):
@get("/sitemap.xml", media_type=MediaType.XML)
async def sitemaps(self) -> bytes:
breeds: list[Breed] = await characters_service.get_characters()
lastmod = "2025-10-04T19:01:03+00:00"
beers_url = ""
for b in breeds:
beers_url += f"""
<url>
<loc>https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/dogs-characteristics/{b.alias}</loc>
<lastmod>{lastmod}</lastmod>
</url>
"""
return f"""<?xml version="1.0" encoding="UTF-8"?>
<urlset
xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9
http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">
<!-- created with Free Online Sitemap Generator www.xml-sitemaps.com -->
<url>
<loc>https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/</loc>
<lastmod>{lastmod}</lastmod>
</url>
<url>
<loc>https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/cats</loc>
<lastmod>{lastmod}</lastmod>
</url>
<url>
<loc>https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/donate</loc>
<lastmod>{lastmod}</lastmod>
</url>
<url>
<loc>https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/dogs-characteristics</loc>
<lastmod>{lastmod}</lastmod>
</url>
{beers_url}
</urlset>
""".encode()
@get("/robots.txt", media_type=MediaType.TEXT)
async def robots(self) -> str:
return """
User-agent: *
Allow: /
Sitemap: https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/sitemap.xml
"""

View File

@ -1,6 +1,8 @@
import asyncio
from pathlib import Path
import os
import inject
import markdown
from litestar import (
Controller,
@ -18,137 +20,45 @@ from litestar.template.config import TemplateConfig
from litestar.response import Template
from litestar.static_files import create_static_files_router
from server.services.descriptions import CharactersService, Breed, CharactersRepository
from server.services.recognizer import RecognizerService, RecognizerRepository
from server.infra import logger
from server.config import get_app_config
from server.infra.web import BreedsController, DescriptionController, SeoController
from server.infra.db import AsyncDB
from server.modules.descriptions import CharactersService, Breed, CharactersRepository
from server.modules.recognizer import RecognizerService, RecognizerRepository
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
recognizer_service = RecognizerService(RecognizerRepository())
characters_service = CharactersService(CharactersRepository())
loop = asyncio.new_event_loop()
def inject_config(binder: inject.Binder):
"""initialization inject_config for server FastApi"""
loop.run_until_complete(db.connect())
cnf = get_app_config()
db = AsyncDB(cnf)
binder.bind(RecognizerService, RecognizerService(RecognizerRepository()))
binder.bind(CharactersService, CharactersService(CharactersRepository()))
class BreedsController(Controller):
path = "/beerds"
@post("/dogs")
async def beerds_dogs(
self, data: UploadFile = Body(media_type=RequestEncodingType.MULTI_PART)
) -> dict:
body = await data.read()
return await recognizer_service.predict_dog_image(body)
@post("/cats")
async def beerds_cats(
self, data: UploadFile = Body(media_type=RequestEncodingType.MULTI_PART)
) -> dict:
body = await data.read()
return await recognizer_service.predict_cat_image(body)
class DescriptionController(Controller):
path = "/"
@get("/")
async def dogs(self) -> Template:
return Template(template_name="dogs.html")
@get("/cats")
async def cats(self) -> Template:
return Template(template_name="cats.html")
@get("/contacts")
async def contacts(self) -> Template:
return Template(template_name="contacts.html")
@get("/donate")
async def donate(self) -> Template:
return Template(template_name="donate.html")
@get("/dogs-characteristics")
async def dogs_characteristics(self) -> Template:
breeds = await characters_service.get_characters()
return Template(
template_name="dogs-characteristics.html", context={"breeds": breeds}
)
@get("/dogs-characteristics/{name:str}")
async def beer_description(self, name: str) -> Template:
breed = await characters_service.get_character(name)
if breed is None:
raise HTTPException(status_code=404, detail="Порода не найдена")
images = await recognizer_service.images_dogs()
return Template(
template_name="beers-description.html",
context={
"text": markdown.markdown(breed.description),
"title": breed.name,
"images": [f"/static/assets/dog/{name}/{i}" for i in images[name]],
},
)
@get("/sitemap.xml", media_type=MediaType.XML)
async def sitemaps(self) -> bytes:
breeds: list[Breed] = await characters_service.get_characters()
lastmod = "2025-10-04T19:01:03+00:00"
beers_url = ""
for b in breeds:
beers_url += f"""
<url>
<loc>https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/dogs-characteristics/{b.alias}</loc>
<lastmod>{lastmod}</lastmod>
</url>
"""
return f"""<?xml version="1.0" encoding="UTF-8"?>
<urlset
xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9
http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">
<!-- created with Free Online Sitemap Generator www.xml-sitemaps.com -->
<url>
<loc>https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/</loc>
<lastmod>{lastmod}</lastmod>
</url>
<url>
<loc>https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/cats</loc>
<lastmod>{lastmod}</lastmod>
</url>
<url>
<loc>https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/donate</loc>
<lastmod>{lastmod}</lastmod>
</url>
<url>
<loc>https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/dogs-characteristics</loc>
<lastmod>{lastmod}</lastmod>
</url>
{beers_url}
</urlset>
""".encode()
@get("/robots.txt", media_type=MediaType.TEXT)
async def robots(self) -> str:
return """
User-agent: *
Allow: /
Sitemap: https://xn-----6kcp3cadbabfh8a0a.xn--p1ai/sitemap.xml
"""
app = Litestar(
debug=True,
route_handlers=[
BreedsController,
DescriptionController,
create_static_files_router(path="/static", directories=["server/static"]),
],
template_config=TemplateConfig(
directory=Path("server/templates"),
engine=JinjaTemplateEngine,
),
)
if __name__ == "__main__":
inject.configure(inject_config)
app = Litestar(
debug=True,
route_handlers=[
BreedsController,
DescriptionController,
SeoController,
create_static_files_router(path="/static", directories=["server/static"]),
],
template_config=TemplateConfig(
directory=Path("server/templates"),
engine=JinjaTemplateEngine,
),
)

View File

View File

@ -0,0 +1,31 @@
"""
Working with media files - uploading, storing, receiving
"""
from server.modules.attachments.domains.attachments import Attachment
from server.modules.attachments.repositories.attachments import (
AttachmentRepository,
DBAttachmentRepository,
MockAttachmentRepository,
)
from server.modules.attachments.services.attachment import (
AtachmentService,
LocalStorageDriver,
MediaType,
MockStorageDriver,
S3StorageDriver,
StorageDriversType,
)
__all__ = [
"AtachmentService",
"Attachment",
"AttachmentRepository",
"MockAttachmentRepository",
"DBAttachmentRepository",
"LocalStorageDriver",
"MockStorageDriver",
"S3StorageDriver",
"MediaType",
"StorageDriversType",
]

View File

@ -0,0 +1,17 @@
from dataclasses import dataclass
from datetime import UTC, datetime
@dataclass(frozen=True)
class Attachment:
id: str
created_at: datetime
updated_at: datetime
size: int
storage_driver_name: str
path: str
media_type: str
created_by: str
content_type: str
is_deleted: bool = False

View File

@ -0,0 +1,155 @@
from abc import ABCMeta, abstractmethod
from datetime import UTC, datetime
from typing import Tuple
from sqlalchemy import CursorResult, delete, insert, select, update
from server.config import get_app_config
from server.infra.db import AbstractDB, AbstractSession, AsyncDB, MockDB
from server.modules.attachments.domains.attachments import Attachment
class AttachmentRepository(metaclass=ABCMeta):
@abstractmethod
async def get_by_id(self, session: AbstractSession, attach_id: list[str]) -> list[Attachment]:
"""Get Attachment by ID"""
pass
@abstractmethod
async def create(self, data: Attachment):
"""Create entry in DB"""
pass
@abstractmethod
async def delete(self, attach_id: str):
"""Get Attachment by ID"""
pass
@abstractmethod
async def update(self, attach_id: str, **kwargs):
pass
@abstractmethod
def db(self) -> AbstractDB:
pass
class MockAttachmentRepository(AttachmentRepository):
_data: dict[str, Attachment]
def __init__(self):
self._data = {
"jian4": Attachment(
id="jian4",
size=0,
storage_driver_name="mock",
path="jian4",
media_type="audio",
created_by="created_by",
content_type="audio/mp3",
),
"zai4": Attachment(
id="zai4",
size=0,
storage_driver_name="mock",
path="zai4",
media_type="audio",
created_by="created_by",
content_type="audio/mp3",
),
"cheng2": Attachment(
id="cheng2",
size=0,
storage_driver_name="mock",
path="cheng2",
media_type="audio",
created_by="created_by",
content_type="audio/mp3",
),
"shi4": Attachment(
id="shi4",
size=0,
storage_driver_name="mock",
path="shi4",
media_type="audio",
created_by="created_by",
content_type="audio/mp3",
),
"nv3": Attachment(
id="nv3",
size=0,
storage_driver_name="mock",
path="nv3",
media_type="audio",
created_by="created_by",
content_type="audio/mp3",
),
}
self._db = MockDB(get_app_config())
async def get_by_id(self, session: AbstractSession, attach_id: list[str]) -> list[Attachment]:
f: list[Attachment] = []
for f_id in attach_id:
f_item = self._data.get(f_id)
if f_item is None:
continue
f.append(f_item)
return f
async def create(self, data: Attachment):
self._data[data.id] = data
async def delete(self, attach_id: str):
del self._data[attach_id]
async def update(self, attach_id: str, **kwargs):
pass
def db(self) -> AbstractDB:
return self._db
class DBAttachmentRepository(AttachmentRepository):
_db: AsyncDB
def __init__(self, db: AsyncDB):
self._db = db
async def get_by_id(self, session: AbstractSession, attach_id: list[str]) -> list[Attachment]:
q = select(Attachment).where(
Attachment.id.in_(attach_id) # type: ignore
)
attachment: list[Attachment] = []
result: CursorResult[Tuple[Attachment]] = await session.execute(q) # type: ignore
for d in result.all():
attachment.append(d[0])
return attachment
async def create(self, data: Attachment):
new_dict = data.to_serializable(delete_private=True, time_to_str=False)
insert_data = insert(Attachment).values(**new_dict)
async with self._db.session_master() as session:
async with session.begin():
await session.execute(insert_data)
await session.commit()
return data.id
async def delete(self, attach_id: str):
q = delete(Attachment).where(
Attachment.id == attach_id # type: ignore
)
async with self._db.session_master() as session:
async with session.begin():
await session.execute(q)
await session.commit()
async def update(self, attach_id: str, **kwargs):
kwargs["updated_at"] = datetime.now(UTC)
q = update(Attachment).values(kwargs).where(Attachment.id == attach_id) # type: ignore
async with self._db.session_master() as session:
async with session.begin():
await session.execute(q)
await session.commit()
def db(self) -> AbstractDB:
return self._db

View File

@ -0,0 +1,351 @@
import hashlib
import hmac
import os.path
from abc import ABCMeta, abstractmethod
from datetime import UTC, datetime
from enum import Enum
from http import HTTPStatus
from io import BytesIO
from pathlib import Path
from typing import Any, AsyncIterable, AsyncIterator, Optional
from urllib.parse import urlparse
import uuid
import aioboto3 # type: ignore
import aiofiles
import magic # type: ignore
from botocore.client import BaseClient # type: ignore
from botocore.exceptions import ClientError # type: ignore
from PIL import Image
from server.config import AppConfig, get_app_config
from server.infra.db import AbstractSession
from server.infra.logger import get_logger
from server.modules.attachments.domains.attachments import Attachment
from server.modules.attachments.repositories.attachments import AttachmentRepository
class StorageDriversType(str, Enum):
MOCK = "mock"
LOCAL = "local"
S3 = "s3"
class MediaType(str, Enum):
AUDIO = "audio"
VIDEO = "video"
IMAGE = "image"
SVG_IMAGE = "svg_image"
OTHER = "other"
class StorageDriver(metaclass=ABCMeta):
@abstractmethod
def get_name(self) -> str:
pass
@abstractmethod
async def put(self, data: bytes) -> str:
pass
@abstractmethod
async def take(self, path: str) -> Optional[bytes]:
pass
@abstractmethod
async def stream(self, path: str) -> AsyncIterable[Any]:
pass
@abstractmethod
async def delete(self, path: str):
pass
class LocalStorageDriver(StorageDriver):
_mount_dir: Path
def __init__(self) -> None:
self._mount_dir = Path(get_app_config().fs_local_mount_dir) # type: ignore
def get_name(self) -> str:
return StorageDriversType.LOCAL.value
async def put(self, data: bytes) -> str:
dir_path = self._mount_dir / Path(datetime.now(UTC).strftime("%d"))
if not os.path.isdir(dir_path):
os.mkdir(dir_path)
path = dir_path / Path(hashlib.file_digest(BytesIO(data), "sha256").hexdigest())
if os.path.isfile(path) and os.stat(path).st_size == len(data):
return str(path)
async with aiofiles.open(path, "wb") as f:
await f.write(data)
return str(path)
async def take(self, path: str) -> Optional[bytes]:
if not os.path.isfile(path):
return None
async with aiofiles.open(path, "rb") as f:
return await f.read()
async def stream(self, path: str):
async with aiofiles.open(path, mode="rb") as f:
while chunk := await f.read(1024):
yield chunk
async def delete(self, path: str):
if not os.path.isfile(path):
return
os.remove(path)
class MockStorageDriver(StorageDriver):
_store: dict[str, bytes]
def __init__(self) -> None:
self._store = {}
def get_name(self) -> str:
return StorageDriversType.MOCK.value
async def stream(self, path: str) -> AsyncIterable:
raise NotImplementedError
async def put(self, data: bytes) -> str:
path = str(uuid.uuid4())
self._store[path] = data
return path
async def take(self, path: str) -> Optional[bytes]:
return self._store.get(path)
async def delete(self, path: str):
del self._store[path]
class S3StorageDriver(StorageDriver):
_prefix: str = "pvc-435e5137-052f-43b1-ace2-e350c9d50c76"
def __init__(self, cnf: AppConfig) -> None:
self._chunk_size: int = 69 * 1024
self._cnf = cnf
self._logger = get_logger()
self._session = aioboto3.Session(
aws_access_key_id=str(cnf.fs_s3_access_key_id),
aws_secret_access_key=str(cnf.fs_s3_access_key),
)
def get_name(self) -> str:
return StorageDriversType.S3.value
async def _client(self) -> BaseClient:
return self._session.client("s3", endpoint_url=self._cnf.fs_s3_endpoint)
def _normalize_path(self, path: str) -> str:
return f"{S3StorageDriver._prefix}{path}".replace(self._cnf.fs_local_mount_dir, "")
async def put(self, data: bytes) -> str:
sign = hashlib.file_digest(BytesIO(data), "sha256").hexdigest()
day = datetime.now(UTC).strftime("%d")
path = str(Path(S3StorageDriver._prefix) / day / sign)
async with await self._client() as s3:
await s3.upload_fileobj(
Fileobj=BytesIO(data),
Bucket=self._cnf.fs_s3_bucket,
Key=path,
ExtraArgs={
"ChecksumAlgorithm": "SHA256",
"ChecksumSHA256": sign,
},
)
return path.replace(S3StorageDriver._prefix, "")
async def stream(self, path: str):
path = self._normalize_path(path)
self._logger.debug(
f"stream s3 path {path}, bucket {self._cnf.fs_s3_bucket}, endpoint {self._cnf.fs_s3_endpoint}"
)
try:
async with await self._client() as s3:
obj = await s3.get_object(Bucket=self._cnf.fs_s3_bucket, Key=path)
body = obj["Body"]
while chunk := await body.read(self._chunk_size):
yield chunk
except ClientError as e:
if e.response.get("Error", {}).get("Code") != "NoSuchKey":
self._logger.error(f"stream client error: {str(e)}, path: {path}")
raise FileNotFoundError
except Exception as e:
self._logger.error(f"stream error: {type(e).__name__} {str(e)}, path: {path}")
raise FileNotFoundError
async def take(self, path: str) -> Optional[bytes]:
buffer = BytesIO()
async for chunk in self.stream(path):
if chunk:
buffer.write(chunk)
content = buffer.getvalue()
return content if content else None
async def delete(self, path: str) -> None:
async with await self._client() as s3:
await s3.delete_object(Bucket=self._cnf.fs_s3_bucket, Key=self._normalize_path(path))
RESIZE_MAX_SIZE = 100_000
RESIZE_PARAMS = (500, 500)
class AtachmentService:
_driver: StorageDriver
_repository: AttachmentRepository
_cnf: AppConfig
def __init__(self, driver: StorageDriver, attach_repository: AttachmentRepository):
self._driver = driver
self._repository = attach_repository
self._cnf = get_app_config()
def media_type(self, content_type: str) -> MediaType:
if content_type == "":
return MediaType.OTHER
firt_part = content_type.split("/")[0]
match firt_part:
case str(MediaType.SVG_IMAGE):
return MediaType.SVG_IMAGE
case str(MediaType.IMAGE):
return MediaType.IMAGE
case str(MediaType.AUDIO):
return MediaType.AUDIO
case str(MediaType.VIDEO):
return MediaType.VIDEO
case _:
return MediaType.OTHER
def content_type(self, file: bytes) -> str:
return magic.from_buffer(file[0:2048], mime=True)
def extension(self, content_type: str | None) -> str:
if not content_type:
return "jpg"
if len(content_type.split("/")) != 2:
return "jpg"
return content_type.split("/")[1].split("+")[0]
def id_from_url(self, url: str) -> str:
if ".original" not in url:
raise ValueError(f"wrong url: {url}")
parts = url.split(".original")[0]
return parts.replace("/", "")
def url(self, attachment_id: str, content_type: str | None = None) -> str:
return f"{self._cnf.app_public_url}/api/v0/attachment/{attachment_id}.original.{
self.extension(content_type)}"
def audio(self, attachment: list[Attachment] | None) -> list[Attachment]:
if not attachment:
return []
return [a for a in attachment if a.media_type == MediaType.AUDIO.value and a.is_deleted == False]
def svg_images(self, attachment: list[Attachment] | None) -> list[Attachment]:
if not attachment:
return []
return [a for a in attachment if a.media_type == MediaType.SVG_IMAGE.value and a.is_deleted == False]
def images(self, attachment: list[Attachment] | None) -> list[Attachment]:
if not attachment:
return []
return [a for a in attachment if a.media_type == MediaType.IMAGE.value and a.is_deleted == False]
async def create(self, file: bytes, user_id: str) -> Attachment:
path = await self._driver.put(file)
content_type = self.content_type(file)
attach = Attachment(
size=len(file),
storage_driver_name=str(self._driver.get_name()),
path=path,
media_type=self.media_type(content_type),
content_type=content_type,
created_by=user_id,
id=str(uuid.uuid4()),
created_at=datetime.now(UTC),
updated_at=datetime.now(UTC)
)
await self._repository.create(attach)
return attach
async def get_info(self, session: AbstractSession | None, attach_id: list[str]) -> list[Attachment]:
if not attach_id:
return []
if session is not None:
return await self._repository.get_by_id(session, attach_id)
async with self._repository.db().session_slave() as session:
return await self._repository.get_by_id(session, attach_id)
def get_name(self, attachment: Attachment) -> str:
return f"{attachment.id}.{self.extension(attachment.content_type)}"
async def get_data(self, session: AbstractSession, attach_id: str) -> Optional[bytes]:
file = await self._repository.get_by_id(session, [attach_id])
if not file:
return None
return await self._driver.take(file[0].path)
async def get_stream(self, session: AbstractSession | None, attach_id: str) -> AsyncIterator[bytes]:
async def _stream_iterator(is_empty: bool):
if is_empty:
return
yield first_chunk
async for chunk in stream: # type: ignore
yield chunk
if session:
file = await self._repository.get_by_id(session, [attach_id])
else:
async with self._repository.db().session_slave() as session:
file = await self._repository.get_by_id(session, [attach_id])
if not file:
raise FileNotFoundError
stream = self._driver.stream(file[0].path)
try:
first_chunk = await stream.__anext__() # type: ignore
except StopAsyncIteration:
return _stream_iterator(is_empty=True)
return _stream_iterator(is_empty=False)
async def image_resize(self, session: AbstractSession, attach_id: list[str]):
info = await self.get_info(session, attach_id)
get_logger().info(
f"image_resize {len(info)}",
)
if not info:
return
for item in info:
if item.media_type != MediaType.IMAGE.value:
continue
if item.is_deleted:
continue
data = await self.get_data(session, item.id)
if data is None:
continue
if len(data) <= RESIZE_MAX_SIZE:
get_logger().info(
f"skip because size: {len(data)}",
)
continue
img = Image.open(BytesIO(data))
img.thumbnail(RESIZE_PARAMS)
buffer = BytesIO()
img.save(buffer, format="JPEG", quality=70)
buffer.seek(0)
d = buffer.read()
if d == 0:
return
buffer.close()
get_logger().info(
f"delete:{item.path}",
)
path = await self._driver.put(d)
await self._repository.update(item.id, path=path, content_type="image/jpeg", size=len(d))
await self._driver.delete(item.path)

View File

@ -0,0 +1,13 @@
from server.modules.descriptions.repository import (
CharactersRepository,
ACharactersRepository,
)
from server.modules.descriptions.service import CharactersService
from server.modules.descriptions.domain import Breed
__all__ = (
"CharactersRepository",
"ACharactersRepository",
"CharactersService",
"Breed",
)

View File

@ -1,4 +1,4 @@
from server.services.descriptions.repository.repository import (
from server.modules.descriptions.repository.repository import (
CharactersRepository,
ACharactersRepository,
)

Some files were not shown because too many files have changed in this diff Show More