породы в БД
This commit is contained in:
parent
6eb8cdff6e
commit
ce5c715611
4
Makefile
4
Makefile
|
|
@ -1,5 +1,5 @@
|
||||||
api:
|
api:
|
||||||
uv run granian --interface asgi server.main:app --host 0.0.0.0
|
alembic upgrade head && uv run granian --interface asgi server.main:app --host 0.0.0.0
|
||||||
|
|
||||||
dog-train:
|
dog-train:
|
||||||
uv run ml/dogs.py
|
uv run ml/dogs.py
|
||||||
|
|
@ -18,7 +18,7 @@ pipinstall:
|
||||||
uv pip sync requirements.txt
|
uv pip sync requirements.txt
|
||||||
|
|
||||||
migrate-up:
|
migrate-up:
|
||||||
AI_DIALOG_WB_TOKEN="" AI_BAIDU_SECRET_KEY="" AI_BAIDU_API_KEY="" DB_PASS_SALT="d" SENTRY_DNS="" APP_TOKEN_SECRET="d" alembic upgrade head
|
alembic upgrade head
|
||||||
|
|
||||||
migration-generate:
|
migration-generate:
|
||||||
git rev-parse --short HEAD | xargs -I {} alembic revision --autogenerate -m "{}"
|
git rev-parse --short HEAD | xargs -I {} alembic revision --autogenerate -m "{}"
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,10 @@
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from PIL import ImageFile
|
|
||||||
import torch.nn as nn
|
import torch.nn as nn
|
||||||
from torchvision.datasets import ImageFolder # type: ignore
|
from PIL import ImageFile
|
||||||
from torch.utils.data import DataLoader
|
from torch.utils.data import DataLoader
|
||||||
|
from torchvision.datasets import ImageFolder # type: ignore
|
||||||
from train import get_labels, load_model, get_loaders, train, show, DEVICE # type: ignore
|
from train import DEVICE, get_labels, get_loaders, load_model, show, train # type: ignore
|
||||||
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,10 @@
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from PIL import ImageFile
|
|
||||||
import torch.nn as nn
|
import torch.nn as nn
|
||||||
from torchvision.datasets import ImageFolder # type: ignore
|
from PIL import ImageFile
|
||||||
from torch.utils.data import DataLoader
|
from torch.utils.data import DataLoader
|
||||||
|
from torchvision.datasets import ImageFolder # type: ignore
|
||||||
from train import get_labels, load_model, get_loaders, train, show, DEVICE # type: ignore
|
from train import DEVICE, get_labels, get_loaders, load_model, show, train # type: ignore
|
||||||
|
|
||||||
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
ImageFile.LOAD_TRUNCATED_IMAGES = True
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
import torch
|
|
||||||
from torchvision import transforms # type: ignore
|
|
||||||
import torch.nn.functional as F
|
|
||||||
from PIL import Image
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
import torch
|
||||||
|
import torch.nn.functional as F
|
||||||
|
from PIL import Image
|
||||||
|
from torchvision import transforms # type: ignore
|
||||||
|
|
||||||
# Создание labels_dict для соответствия классов и индексов
|
# Создание labels_dict для соответствия классов и индексов
|
||||||
with open("labels.json", "r") as f:
|
with open("labels.json") as f:
|
||||||
data_labels = f.read()
|
data_labels = f.read()
|
||||||
labels_dict = json.loads(data_labels)
|
labels_dict = json.loads(data_labels)
|
||||||
|
|
||||||
|
|
|
||||||
16
ml/train.py
16
ml/train.py
|
|
@ -1,13 +1,13 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import matplotlib.pyplot as plt # type: ignore
|
import matplotlib.pyplot as plt # type: ignore
|
||||||
import torch
|
import torch
|
||||||
import torch.nn as nn
|
import torch.nn as nn
|
||||||
from torchvision.datasets import ImageFolder # type: ignore
|
|
||||||
from torch.utils.data import Dataset, DataLoader, random_split
|
|
||||||
from torchvision import transforms # type: ignore
|
|
||||||
import torchvision
|
import torchvision
|
||||||
|
from torch.utils.data import DataLoader, Dataset, random_split
|
||||||
|
from torchvision import transforms # type: ignore
|
||||||
|
from torchvision.datasets import ImageFolder # type: ignore
|
||||||
from torchvision.models import ResNet50_Weights # type: ignore
|
from torchvision.models import ResNet50_Weights # type: ignore
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
||||||
|
|
||||||
|
|
@ -29,7 +29,7 @@ def get_labels(input_dir, img_size):
|
||||||
return labels_dict, dataset
|
return labels_dict, dataset
|
||||||
|
|
||||||
|
|
||||||
def get_loaders(dataset: Dataset) -> Tuple[DataLoader, DataLoader]:
|
def get_loaders(dataset: Dataset) -> tuple[DataLoader, DataLoader]:
|
||||||
# Разделение данных на тренировочные и валидационные
|
# Разделение данных на тренировочные и валидационные
|
||||||
train_size = int(0.8 * float(len(dataset))) # type: ignore[arg-type]
|
train_size = int(0.8 * float(len(dataset))) # type: ignore[arg-type]
|
||||||
val_size = len(dataset) - train_size # type: ignore[arg-type]
|
val_size = len(dataset) - train_size # type: ignore[arg-type]
|
||||||
|
|
@ -61,7 +61,7 @@ def train(
|
||||||
model: nn.Module,
|
model: nn.Module,
|
||||||
train_loader: DataLoader,
|
train_loader: DataLoader,
|
||||||
val_loader: DataLoader,
|
val_loader: DataLoader,
|
||||||
) -> Tuple[list[float], list[float], list[float], list[float]]:
|
) -> tuple[list[float], list[float], list[float], list[float]]:
|
||||||
criterion = torch.nn.CrossEntropyLoss()
|
criterion = torch.nn.CrossEntropyLoss()
|
||||||
optimizer = torch.optim.Adam(model.fc.parameters(), lr=1e-4) # type: ignore[union-attr]
|
optimizer = torch.optim.Adam(model.fc.parameters(), lr=1e-4) # type: ignore[union-attr]
|
||||||
# История метрик
|
# История метрик
|
||||||
|
|
@ -96,9 +96,7 @@ def train(
|
||||||
train_acc = 100.0 * correct / total
|
train_acc = 100.0 * correct / total
|
||||||
train_loss_history.append(train_loss)
|
train_loss_history.append(train_loss)
|
||||||
train_acc_history.append(train_acc)
|
train_acc_history.append(train_acc)
|
||||||
print(
|
print(f"Epoch {epoch + 1}/{num_epochs}, Train Loss: {train_loss:.4f}, Train Accuracy: {train_acc:.2f}%")
|
||||||
f"Epoch {epoch + 1}/{num_epochs}, Train Loss: {train_loss:.4f}, Train Accuracy: {train_acc:.2f}%"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Оценка на валидационных данных
|
# Оценка на валидационных данных
|
||||||
model.eval()
|
model.eval()
|
||||||
|
|
|
||||||
114
pyproject.toml
114
pyproject.toml
|
|
@ -48,3 +48,117 @@ default = [
|
||||||
"matplotlib>=3.10.1",
|
"matplotlib>=3.10.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# MYPY
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
exclude = [
|
||||||
|
".venv",
|
||||||
|
"venv",
|
||||||
|
"tmp",
|
||||||
|
"scripts",
|
||||||
|
"tests"
|
||||||
|
]
|
||||||
|
plugins = ["sqlalchemy.ext.mypy.plugin"]
|
||||||
|
mypy_path = "./stubs"
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
# RUFF
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
target-version = "py312"
|
||||||
|
show-fixes = true
|
||||||
|
src = ["app"]
|
||||||
|
# Same as Black.
|
||||||
|
line-length = 120
|
||||||
|
indent-width = 4
|
||||||
|
|
||||||
|
# Exclude a variety of commonly ignored directories.
|
||||||
|
exclude = [
|
||||||
|
".bzr",
|
||||||
|
".direnv",
|
||||||
|
".eggs",
|
||||||
|
".git",
|
||||||
|
".git-rewrite",
|
||||||
|
".hg",
|
||||||
|
".ipynb_checkpoints",
|
||||||
|
".mypy_cache",
|
||||||
|
".nox",
|
||||||
|
".pants.d",
|
||||||
|
".pyenv",
|
||||||
|
".pytest_cache",
|
||||||
|
".pytype",
|
||||||
|
".ruff_cache",
|
||||||
|
".svn",
|
||||||
|
".tox",
|
||||||
|
".venv",
|
||||||
|
".vscode",
|
||||||
|
"__pypackages__",
|
||||||
|
"_build",
|
||||||
|
"buck-out",
|
||||||
|
"build",
|
||||||
|
"dist",
|
||||||
|
"node_modules",
|
||||||
|
"site-packages",
|
||||||
|
"venv",
|
||||||
|
"stubs",
|
||||||
|
"scripts",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.isort]
|
||||||
|
known-first-party = ["app"]
|
||||||
|
|
||||||
|
[tool.ruff.format]
|
||||||
|
# Like Black, use double quotes for strings.
|
||||||
|
quote-style = "double"
|
||||||
|
# Like Black, indent with spaces, rather than tabs.
|
||||||
|
indent-style = "space"
|
||||||
|
# Like Black, respect magic trailing commas.
|
||||||
|
skip-magic-trailing-comma = false
|
||||||
|
# Like Black, automatically detect the appropriate line ending.
|
||||||
|
line-ending = "auto"
|
||||||
|
|
||||||
|
[tool.ruff.lint.per-file-ignores]
|
||||||
|
"stubs/*" = ["F403"]
|
||||||
|
"server/migration/*" = ["E501", "F403"]
|
||||||
|
"server/config/__init__.py" = ["E501"]
|
||||||
|
"scripts/*" = ["T201", "E501"]
|
||||||
|
"server/admin/*" = ["E501", "E711"]
|
||||||
|
"vk_api/*"= ["T201", "C416", "A001", "E501"]
|
||||||
|
"ml/*"= ["T201", "C416", "A001", "E501", "C416", "N812"]
|
||||||
|
"tests/**/*.py" = [
|
||||||
|
"E501", "ASYNC230",
|
||||||
|
# at least this three should be fine in tests:
|
||||||
|
"S101", # asserts allowed in tests...
|
||||||
|
"S106", # Possible hardcoded password assigned to argument: "password"
|
||||||
|
"S110", # consider logging the exception
|
||||||
|
"ARG", # Unused function args -> fixtures nevertheless are functionally relevant...
|
||||||
|
"FBT", # Don't care about booleans as positional arguments in tests, e.g. via @pytest.mark.parametrize()
|
||||||
|
# The below are debateable
|
||||||
|
"PLR2004", # Magic value used in comparison, ...
|
||||||
|
"S311", # Standard pseudo-random generators are not suitable for cryptographic purposes
|
||||||
|
"INP001", # File `...` is part of an implicit namespace package. Add an `__init__.py`.
|
||||||
|
"SLF001", # Private member accessed: `_...`
|
||||||
|
]
|
||||||
|
"tests/__init__.py" = ["I001"]
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
# https://docs.astral.sh/ruff/rules/
|
||||||
|
select = ["DTZ", "F", "C4", "B", "A", "E", "T", "I", "N", "UP", "ASYNC", "Q"]
|
||||||
|
ignore = ["E712", "B904", "B019", "C417"]
|
||||||
|
|
||||||
|
# Allow fix for all enabled rules (when `--fix`) is provided.
|
||||||
|
fixable = ["ALL"]
|
||||||
|
unfixable = []
|
||||||
|
# Allow unused variables when underscore-prefixed.
|
||||||
|
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||||
|
|
||||||
|
[tool.ruff.lint.mccabe]
|
||||||
|
# https://docs.astral.sh/ruff/settings/#mccabe
|
||||||
|
# Flag errors (`C901`) whenever the complexity level exceeds 5.
|
||||||
|
max-complexity = 12
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-pytest-style]
|
||||||
|
# https://docs.astral.sh/ruff/settings/#flake8-pytest-style
|
||||||
|
fixture-parentheses = false
|
||||||
|
mark-parentheses = false
|
||||||
|
|
|
||||||
|
|
@ -22,9 +22,7 @@ class AppConfig:
|
||||||
sentry_dns: str = field("SENTRY_DNS", default="")
|
sentry_dns: str = field("SENTRY_DNS", default="")
|
||||||
log_level: str = field("LOG_LEVEL", "INFO")
|
log_level: str = field("LOG_LEVEL", "INFO")
|
||||||
|
|
||||||
db_uri: str = field(
|
db_uri: str = field("DB_URI", "postgresql+asyncpg://svcuser:svcpass@localhost:5432/svc")
|
||||||
"DB_URI", "postgresql+asyncpg://svcuser:svcpass@localhost:5432/svc"
|
|
||||||
)
|
|
||||||
db_pass_salt: str = field("DB_PASS_SALT", "")
|
db_pass_salt: str = field("DB_PASS_SALT", "")
|
||||||
db_search_path: str = field("DB_SEARCH_PATH", "beerds")
|
db_search_path: str = field("DB_SEARCH_PATH", "beerds")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,13 @@
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
|
|
||||||
class CacheRepository(metaclass=ABCMeta):
|
class CacheRepository(metaclass=ABCMeta):
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def get(self, key: str) -> Optional[str]:
|
async def get(self, key: str) -> str | None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def set(self, key: str, data: str, _exp_min: Optional[int] = None):
|
async def set(self, key: str, data: str, _exp_min: int | None = None):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -23,10 +22,10 @@ class LocalCacheRepository(CacheRepository):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._data = {}
|
self._data = {}
|
||||||
|
|
||||||
async def get(self, key: str) -> Optional[str]:
|
async def get(self, key: str) -> str | None:
|
||||||
return self._data.get(key)
|
return self._data.get(key)
|
||||||
|
|
||||||
async def set(self, key: str, data: str, _exp_min: Optional[int] = None):
|
async def set(self, key: str, data: str, _exp_min: int | None = None):
|
||||||
self._data[key] = data
|
self._data[key] = data
|
||||||
|
|
||||||
async def delete(self, key: str):
|
async def delete(self, key: str):
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,12 @@
|
||||||
"""Abstract realiztion for DB"""
|
"""Abstract realiztion for DB"""
|
||||||
|
|
||||||
from typing import Any, AsyncContextManager, Awaitable, Callable, TypeAlias
|
from collections.abc import Awaitable, Callable
|
||||||
|
from contextlib import AbstractAsyncContextManager as AsyncContextManager
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from server.config import AppConfig
|
from server.config import AppConfig
|
||||||
|
|
||||||
ExecuteFun: TypeAlias = Callable[[Any], Awaitable[None]]
|
type ExecuteFun = Callable[[Any], Awaitable[None]]
|
||||||
|
|
||||||
|
|
||||||
class ConnectError(Exception):
|
class ConnectError(Exception):
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,9 @@
|
||||||
from typing import Type, TypeVar
|
|
||||||
|
|
||||||
from sqlalchemy.orm import registry
|
from sqlalchemy.orm import registry
|
||||||
|
|
||||||
mapper_registry = registry()
|
mapper_registry = registry()
|
||||||
|
|
||||||
DC = TypeVar("DC")
|
|
||||||
|
|
||||||
|
|
||||||
def dict_to_dataclass(data: dict, class_type: Type[DC]) -> DC:
|
def dict_to_dataclass[T](data: dict, class_type: type[T]) -> T:
|
||||||
return class_type(**data)
|
return class_type(**data)
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
from typing import Any, AsyncContextManager
|
from contextlib import AbstractAsyncContextManager as AsyncContextManager
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from server.config import AppConfig
|
from server.config import AppConfig
|
||||||
from server.infra.db.abc import AbstractDB, AbstractSession
|
from server.infra.db.abc import AbstractDB, AbstractSession
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from dataclasses import Field, asdict, dataclass
|
from dataclasses import Field, asdict, dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any, ClassVar, Optional, Protocol, assert_never
|
from typing import Any, ClassVar, Protocol, assert_never
|
||||||
|
|
||||||
from sqlalchemy import Select, and_, or_
|
from sqlalchemy import Select, and_, or_
|
||||||
|
|
||||||
|
|
@ -28,7 +28,7 @@ class FilterLeftField(Protocol):
|
||||||
class Filter:
|
class Filter:
|
||||||
right: Any
|
right: Any
|
||||||
sign: FilterSign
|
sign: FilterSign
|
||||||
left: Optional[FilterLeftField] = None
|
left: FilterLeftField | None = None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def not_eq(f1: Any, f2: Any):
|
def not_eq(f1: Any, f2: Any):
|
||||||
|
|
@ -79,33 +79,27 @@ class RestrictionField:
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class QueryRestriction:
|
class QueryRestriction:
|
||||||
filters: Optional[list[Filter]] = None
|
filters: list[Filter] | None = None
|
||||||
limit: Optional[int] = None
|
limit: int | None = None
|
||||||
offset: Optional[int] = None
|
offset: int | None = None
|
||||||
sort: Optional[list[RestrictionField]] = None
|
sort: list[RestrictionField] | None = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=False)
|
@dataclass(frozen=False)
|
||||||
class FilterQuery:
|
class FilterQuery:
|
||||||
filters: list[Filter]
|
filters: list[Filter]
|
||||||
limit: Optional[int] = None
|
limit: int | None = None
|
||||||
offset: Optional[int] = None
|
offset: int | None = None
|
||||||
sort: Optional[list[RestrictionField]] = None
|
sort: list[RestrictionField] | None = None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def mass_and(fields: list[object], values: list[Any]) -> "FilterQuery":
|
def mass_and(fields: list[object], values: list[Any]) -> "FilterQuery":
|
||||||
return FilterQuery(
|
return FilterQuery(filters=[Filter.eq(field, val) for field, val in zip(fields, values, strict=True)])
|
||||||
filters=[Filter.eq(field, val) for field, val in zip(fields, values)]
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def mass_or(fields: list[object], values: list[Any]) -> "FilterQuery":
|
def mass_or(fields: list[object], values: list[Any]) -> "FilterQuery":
|
||||||
return FilterQuery(
|
return FilterQuery(
|
||||||
filters=[
|
filters=[Filter.or_([Filter.eq(field, val) for field, val in zip(fields, values, strict=True)])]
|
||||||
Filter.or_(
|
|
||||||
[Filter.eq(field, val) for field, val in zip(fields, values)]
|
|
||||||
)
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
@ -119,7 +113,7 @@ class FilterQuery:
|
||||||
def add_and(self, field: object, value: Any):
|
def add_and(self, field: object, value: Any):
|
||||||
self.filters.append(Filter.eq(field, value))
|
self.filters.append(Filter.eq(field, value))
|
||||||
|
|
||||||
def add_query_restistions(self, q_restriction: Optional[QueryRestriction] = None):
|
def add_query_restistions(self, q_restriction: QueryRestriction | None = None):
|
||||||
if not q_restriction:
|
if not q_restriction:
|
||||||
return None
|
return None
|
||||||
if q_restriction.limit:
|
if q_restriction.limit:
|
||||||
|
|
@ -137,9 +131,7 @@ class DataclassInstance(Protocol):
|
||||||
__dataclass_fields__: ClassVar[dict[str, Field[Any]]]
|
__dataclass_fields__: ClassVar[dict[str, Field[Any]]]
|
||||||
|
|
||||||
|
|
||||||
async def indexes_by_id(
|
async def indexes_by_id(input_data: list, values: list[str], id_name="id") -> list[int] | None:
|
||||||
input_data: list, values: list[str], id_name="id"
|
|
||||||
) -> Optional[list[int]]:
|
|
||||||
r_data: list[int] = []
|
r_data: list[int] = []
|
||||||
for i, _ in enumerate(input_data):
|
for i, _ in enumerate(input_data):
|
||||||
if getattr(input_data[i], id_name) in values:
|
if getattr(input_data[i], id_name) in values:
|
||||||
|
|
@ -149,9 +141,7 @@ async def indexes_by_id(
|
||||||
return r_data
|
return r_data
|
||||||
|
|
||||||
|
|
||||||
def data_by_filter[T: DataclassInstance](
|
def data_by_filter[T: DataclassInstance](input_data: list[T], q: FilterQuery) -> list[T]:
|
||||||
input_data: list[T], q: FilterQuery
|
|
||||||
) -> list[T]:
|
|
||||||
# can't do query AND(OR() + AND())
|
# can't do query AND(OR() + AND())
|
||||||
data: list[T] = []
|
data: list[T] = []
|
||||||
data_or: list[T] = []
|
data_or: list[T] = []
|
||||||
|
|
@ -245,14 +235,10 @@ def sqlalchemy_conditions(q: FilterQuery):
|
||||||
conditions = []
|
conditions = []
|
||||||
for f in q.filters:
|
for f in q.filters:
|
||||||
if f.sign == FilterSign.OR:
|
if f.sign == FilterSign.OR:
|
||||||
conditions.append(
|
conditions.append(or_(*sqlalchemy_conditions(q=FilterQuery(filters=f.right))))
|
||||||
or_(*sqlalchemy_conditions(q=FilterQuery(filters=f.right)))
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
if f.sign == FilterSign.AND:
|
if f.sign == FilterSign.AND:
|
||||||
conditions.append(
|
conditions.append(and_(*sqlalchemy_conditions(q=FilterQuery(filters=f.right))))
|
||||||
and_(*sqlalchemy_conditions(q=FilterQuery(filters=f.right)))
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
if f.left is None:
|
if f.left is None:
|
||||||
continue
|
continue
|
||||||
|
|
@ -282,9 +268,7 @@ def sqlalchemy_conditions(q: FilterQuery):
|
||||||
return conditions
|
return conditions
|
||||||
|
|
||||||
|
|
||||||
def sqlalchemy_restrictions(
|
def sqlalchemy_restrictions(f: FilterQuery, q: Select, dict_to_sort: dict | None = None) -> Select:
|
||||||
f: FilterQuery, q: Select, dict_to_sort: Optional[dict] = None
|
|
||||||
) -> Select:
|
|
||||||
if f.limit:
|
if f.limit:
|
||||||
q = q.limit(f.limit)
|
q = q.limit(f.limit)
|
||||||
if f.offset:
|
if f.offset:
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
from server.infra.web.description import DescriptionController
|
from server.infra.web.description import DescriptionController
|
||||||
from server.infra.web.seo import SeoController
|
|
||||||
from server.infra.web.recognizer import BreedsController
|
from server.infra.web.recognizer import BreedsController
|
||||||
|
from server.infra.web.seo import SeoController
|
||||||
|
|
||||||
__all__ = ("DescriptionController", "SeoController", "BreedsController")
|
__all__ = ("DescriptionController", "SeoController", "BreedsController")
|
||||||
|
|
|
||||||
|
|
@ -34,9 +34,7 @@ class DescriptionController(Controller):
|
||||||
async def dogs_characteristics(self) -> Template:
|
async def dogs_characteristics(self) -> Template:
|
||||||
characters_service: CharactersService = inject.instance(CharactersService)
|
characters_service: CharactersService = inject.instance(CharactersService)
|
||||||
breeds = await characters_service.get_characters()
|
breeds = await characters_service.get_characters()
|
||||||
return Template(
|
return Template(template_name="dogs-characteristics.html", context={"breeds": breeds})
|
||||||
template_name="dogs-characteristics.html", context={"breeds": breeds}
|
|
||||||
)
|
|
||||||
|
|
||||||
@get("/dogs-characteristics/{name:str}")
|
@get("/dogs-characteristics/{name:str}")
|
||||||
async def beer_description(self, name: str) -> Template:
|
async def beer_description(self, name: str) -> Template:
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,12 @@
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
import inject
|
import inject
|
||||||
from litestar import (
|
from litestar import (
|
||||||
Controller,
|
Controller,
|
||||||
post,
|
post,
|
||||||
)
|
)
|
||||||
from litestar.enums import RequestEncodingType
|
|
||||||
from litestar.datastructures import UploadFile
|
from litestar.datastructures import UploadFile
|
||||||
|
from litestar.enums import RequestEncodingType
|
||||||
from litestar.params import Body
|
from litestar.params import Body
|
||||||
|
|
||||||
from server.modules.recognizer import RecognizerService
|
from server.modules.recognizer import RecognizerService
|
||||||
|
|
@ -14,17 +16,13 @@ class BreedsController(Controller):
|
||||||
path = "/beerds"
|
path = "/beerds"
|
||||||
|
|
||||||
@post("/dogs")
|
@post("/dogs")
|
||||||
async def beerds_dogs(
|
async def beerds_dogs(self, data: Annotated[UploadFile, Body(media_type=RequestEncodingType.MULTI_PART)]) -> dict:
|
||||||
self, data: UploadFile = Body(media_type=RequestEncodingType.MULTI_PART)
|
|
||||||
) -> dict:
|
|
||||||
recognizer_service: RecognizerService = inject.instance(RecognizerService)
|
recognizer_service: RecognizerService = inject.instance(RecognizerService)
|
||||||
body = await data.read()
|
body = await data.read()
|
||||||
return await recognizer_service.predict_dog_image(body)
|
return await recognizer_service.predict_dog_image(body)
|
||||||
|
|
||||||
@post("/cats")
|
@post("/cats")
|
||||||
async def beerds_cats(
|
async def beerds_cats(self, data: Annotated[UploadFile, Body(media_type=RequestEncodingType.MULTI_PART)]) -> dict:
|
||||||
self, data: UploadFile = Body(media_type=RequestEncodingType.MULTI_PART)
|
|
||||||
) -> dict:
|
|
||||||
recognizer_service: RecognizerService = inject.instance(RecognizerService)
|
recognizer_service: RecognizerService = inject.instance(RecognizerService)
|
||||||
body = await data.read()
|
body = await data.read()
|
||||||
return await recognizer_service.predict_cat_image(body)
|
return await recognizer_service.predict_cat_image(body)
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
import inject
|
import inject
|
||||||
from litestar import (
|
from litestar import (
|
||||||
Controller,
|
Controller,
|
||||||
get,
|
|
||||||
MediaType,
|
MediaType,
|
||||||
|
get,
|
||||||
)
|
)
|
||||||
|
|
||||||
from server.modules.descriptions import CharactersService, Breed
|
from server.modules.descriptions import Breed, CharactersService
|
||||||
|
|
||||||
|
|
||||||
class SeoController(Controller):
|
class SeoController(Controller):
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,20 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
from pathlib import Path
|
|
||||||
import os
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import inject
|
import inject
|
||||||
from litestar import (
|
from litestar import (
|
||||||
Litestar,
|
Litestar,
|
||||||
)
|
)
|
||||||
from litestar.contrib.jinja import JinjaTemplateEngine
|
from litestar.contrib.jinja import JinjaTemplateEngine
|
||||||
from litestar.template.config import TemplateConfig
|
|
||||||
from litestar.static_files import create_static_files_router
|
from litestar.static_files import create_static_files_router
|
||||||
|
from litestar.template.config import TemplateConfig
|
||||||
|
|
||||||
from server.config import get_app_config
|
from server.config import get_app_config
|
||||||
from server.infra.web import BreedsController, DescriptionController, SeoController
|
|
||||||
from server.infra.db import AsyncDB
|
from server.infra.db import AsyncDB
|
||||||
from server.modules.descriptions import CharactersService, CharactersRepository
|
from server.infra.web import BreedsController, DescriptionController, SeoController
|
||||||
from server.modules.recognizer import RecognizerService, RecognizerRepository
|
from server.modules.descriptions import CharactersService, PGCharactersRepository
|
||||||
|
from server.modules.recognizer import RecognizerRepository, RecognizerService
|
||||||
|
|
||||||
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
|
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
|
||||||
|
|
||||||
|
|
@ -29,7 +29,7 @@ def inject_config(binder: inject.Binder):
|
||||||
db = AsyncDB(cnf)
|
db = AsyncDB(cnf)
|
||||||
loop.run_until_complete(db.connect())
|
loop.run_until_complete(db.connect())
|
||||||
binder.bind(RecognizerService, RecognizerService(RecognizerRepository()))
|
binder.bind(RecognizerService, RecognizerService(RecognizerRepository()))
|
||||||
binder.bind(CharactersService, CharactersService(CharactersRepository()))
|
binder.bind(CharactersService, CharactersService(PGCharactersRepository(db)))
|
||||||
|
|
||||||
|
|
||||||
inject.configure(inject_config)
|
inject.configure(inject_config)
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,13 @@
|
||||||
from logging.config import fileConfig
|
from logging.config import fileConfig
|
||||||
|
|
||||||
from alembic import context
|
from alembic import context
|
||||||
from sqlalchemy import engine_from_config, pool
|
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
|
||||||
|
|
||||||
from server.config import get_app_config
|
from server.config import get_app_config
|
||||||
from server.infra.db.db_mapper import mapper_registry
|
from server.infra.db.db_mapper import mapper_registry
|
||||||
from server.modules.attachments.repository.models import *
|
from server.modules.attachments.repository.models import *
|
||||||
from server.modules.descriptions.repository.models import *
|
from server.modules.descriptions.repository.models import *
|
||||||
from server.modules.rate.repository.models import *
|
from server.modules.rate.repository.models import *
|
||||||
|
from sqlalchemy import engine_from_config, pool
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
# this is the Alembic Config object, which provides
|
||||||
# access to the values within the .ini file in use.
|
# access to the values within the .ini file in use.
|
||||||
|
|
@ -16,7 +15,7 @@ config = context.config
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
target_metadata = Base.metadata
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
for (table_name, table) in mapper_registry.metadata.tables.items():
|
for table_name, table in mapper_registry.metadata.tables.items():
|
||||||
target_metadata._add_table(table_name, table.schema, table)
|
target_metadata._add_table(table_name, table.schema, table)
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
# Interpret the config file for Python logging.
|
||||||
|
|
@ -38,8 +37,8 @@ def run_migrations_offline() -> None:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
url = "{}?options=-c search_path={}".format(
|
url = "{}?options=-c search_path={}".format(
|
||||||
str(get_app_config().db_uri).replace(
|
str(get_app_config().db_uri).replace("+asyncpg", ""),
|
||||||
"+asyncpg", ""), get_app_config().db_search_path
|
get_app_config().db_search_path,
|
||||||
)
|
)
|
||||||
context.configure(
|
context.configure(
|
||||||
url=url,
|
url=url,
|
||||||
|
|
@ -62,8 +61,8 @@ def run_migrations_online() -> None:
|
||||||
alemb_cnf = config.get_section(config.config_ini_section, {})
|
alemb_cnf = config.get_section(config.config_ini_section, {})
|
||||||
if not alemb_cnf["sqlalchemy.url"] or alemb_cnf["sqlalchemy.url"] == "driver://user:pass@localhost/dbname":
|
if not alemb_cnf["sqlalchemy.url"] or alemb_cnf["sqlalchemy.url"] == "driver://user:pass@localhost/dbname":
|
||||||
alemb_cnf["sqlalchemy.url"] = "{}?options=-c search_path={}".format(
|
alemb_cnf["sqlalchemy.url"] = "{}?options=-c search_path={}".format(
|
||||||
str(get_app_config().db_uri).replace(
|
str(get_app_config().db_uri).replace("+asyncpg", ""),
|
||||||
"+asyncpg", ""), get_app_config().db_search_path
|
get_app_config().db_search_path,
|
||||||
)
|
)
|
||||||
connectable = engine_from_config(
|
connectable = engine_from_config(
|
||||||
alemb_cnf,
|
alemb_cnf,
|
||||||
|
|
@ -73,7 +72,8 @@ def run_migrations_online() -> None:
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
with connectable.connect() as connection:
|
||||||
context.configure(
|
context.configure(
|
||||||
connection=connection, target_metadata=target_metadata,
|
connection=connection,
|
||||||
|
target_metadata=target_metadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
with context.begin_transaction():
|
with context.begin_transaction():
|
||||||
|
|
|
||||||
|
|
@ -5,56 +5,67 @@ Revises:
|
||||||
Create Date: 2026-01-12 18:28:37.783462
|
Create Date: 2026-01-12 18:28:37.783462
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
import pathlib
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = '474b572b7fe2'
|
revision: str = "474b572b7fe2"
|
||||||
down_revision: Union[str, None] = None
|
down_revision: str | None = None
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: str | Sequence[str] | None = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.create_table('attachments',
|
op.create_table(
|
||||||
sa.Column('id', sa.String(), nullable=False),
|
"attachments",
|
||||||
sa.Column('size', sa.BigInteger(), nullable=False),
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
sa.Column('storage_driver_name', sa.String(), nullable=False),
|
sa.Column("size", sa.BigInteger(), nullable=False),
|
||||||
sa.Column('path', sa.String(), nullable=False),
|
sa.Column("storage_driver_name", sa.String(), nullable=False),
|
||||||
sa.Column('media_type', sa.String(), nullable=False),
|
sa.Column("path", sa.String(), nullable=False),
|
||||||
sa.Column('content_type', sa.String(), nullable=False),
|
sa.Column("media_type", sa.String(), nullable=False),
|
||||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
sa.Column("content_type", sa.String(), nullable=False),
|
||||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
sa.Column('is_deleted', sa.Boolean(), nullable=False),
|
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.Column("is_deleted", sa.Boolean(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
)
|
)
|
||||||
op.create_table('beerds',
|
op.create_table(
|
||||||
sa.Column('id', sa.String(), nullable=False),
|
"beerds",
|
||||||
sa.Column('name', sa.Text(), nullable=False),
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
sa.Column('descriptions', sa.Text(), nullable=False),
|
sa.Column("name", sa.Text(), nullable=False),
|
||||||
sa.Column('signs', sa.JSON(), nullable=False),
|
sa.Column("alias", sa.Text(), nullable=False),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.Column("descriptions", sa.Text(), nullable=False),
|
||||||
|
sa.Column("signs", sa.JSON(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
)
|
)
|
||||||
op.create_table('votes',
|
op.create_table(
|
||||||
sa.Column('id', sa.String(), nullable=False),
|
"votes",
|
||||||
sa.Column('attachemnt_id', sa.String(), nullable=False),
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
sa.Column('beerd_id', sa.String(), nullable=False),
|
sa.Column("attachemnt_id", sa.String(), nullable=False),
|
||||||
sa.Column('rate', sa.BigInteger(), nullable=False),
|
sa.Column("beerd_id", sa.String(), nullable=False),
|
||||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
sa.Column("rate", sa.BigInteger(), nullable=False),
|
||||||
sa.ForeignKeyConstraint(['attachemnt_id'], ['attachments.id'], name='votes_attachemnt_id_fk'),
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
sa.ForeignKeyConstraint(['beerd_id'], ['beerds.id'], name='votes_beerd_id_fk'),
|
sa.ForeignKeyConstraint(["attachemnt_id"], ["attachments.id"], name="votes_attachemnt_id_fk"),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.ForeignKeyConstraint(["beerd_id"], ["beerds.id"], name="votes_beerd_id_fk"),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
)
|
)
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
with open(
|
||||||
|
pathlib.Path(__file__).resolve().parent / "dumps/beerds_insert.sql",
|
||||||
|
encoding="utf-8",
|
||||||
|
) as upgrade_file:
|
||||||
|
sql = upgrade_file.read()
|
||||||
|
op.execute(sql)
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.drop_table('votes')
|
op.drop_table("votes")
|
||||||
op.drop_table('beerds')
|
op.drop_table("beerds")
|
||||||
op.drop_table('attachments')
|
op.drop_table("attachments")
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
from sqlalchemy import CursorResult, delete, insert, select, update
|
from sqlalchemy import CursorResult, delete, insert, select, update
|
||||||
|
|
||||||
|
|
@ -11,9 +10,7 @@ from server.modules.attachments.domains.attachments import Attachment
|
||||||
|
|
||||||
class AttachmentRepository(metaclass=ABCMeta):
|
class AttachmentRepository(metaclass=ABCMeta):
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def get_by_id(
|
async def get_by_id(self, session: AbstractSession, attach_id: list[str]) -> list[Attachment]:
|
||||||
self, session: AbstractSession, attach_id: list[str]
|
|
||||||
) -> list[Attachment]:
|
|
||||||
"""Get Attachment by ID"""
|
"""Get Attachment by ID"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
@ -89,9 +86,7 @@ class MockAttachmentRepository(AttachmentRepository):
|
||||||
}
|
}
|
||||||
self._db = MockDB(get_app_config())
|
self._db = MockDB(get_app_config())
|
||||||
|
|
||||||
async def get_by_id(
|
async def get_by_id(self, session: AbstractSession, attach_id: list[str]) -> list[Attachment]:
|
||||||
self, session: AbstractSession, attach_id: list[str]
|
|
||||||
) -> list[Attachment]:
|
|
||||||
f: list[Attachment] = []
|
f: list[Attachment] = []
|
||||||
for f_id in attach_id:
|
for f_id in attach_id:
|
||||||
f_item = self._data.get(f_id)
|
f_item = self._data.get(f_id)
|
||||||
|
|
@ -119,14 +114,12 @@ class DBAttachmentRepository(AttachmentRepository):
|
||||||
def __init__(self, db: AsyncDB):
|
def __init__(self, db: AsyncDB):
|
||||||
self._db = db
|
self._db = db
|
||||||
|
|
||||||
async def get_by_id(
|
async def get_by_id(self, session: AbstractSession, attach_id: list[str]) -> list[Attachment]:
|
||||||
self, session: AbstractSession, attach_id: list[str]
|
|
||||||
) -> list[Attachment]:
|
|
||||||
q = select(Attachment).where(
|
q = select(Attachment).where(
|
||||||
Attachment.id.in_(attach_id) # type: ignore
|
Attachment.id.in_(attach_id) # type: ignore
|
||||||
)
|
)
|
||||||
attachment: list[Attachment] = []
|
attachment: list[Attachment] = []
|
||||||
result: CursorResult[Tuple[Attachment]] = await session.execute(q) # type: ignore
|
result: CursorResult[tuple[Attachment]] = await session.execute(q) # type: ignore
|
||||||
for d in result.all():
|
for d in result.all():
|
||||||
attachment.append(d[0])
|
attachment.append(d[0])
|
||||||
return attachment
|
return attachment
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,13 @@
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from dataclasses_ujson.dataclasses_ujson import UJsonMixin
|
from dataclasses_ujson.dataclasses_ujson import UJsonMixin # type: ignore
|
||||||
from sqlalchemy import BigInteger, Boolean, Column, DateTime, String
|
from sqlalchemy import BigInteger, Boolean, Column, DateTime, String
|
||||||
|
|
||||||
from server.config import get_app_config
|
from server.config import get_app_config
|
||||||
from server.infra.db.db_mapper import mapper_registry
|
from server.infra.db.db_mapper import mapper_registry
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@mapper_registry.mapped
|
@mapper_registry.mapped
|
||||||
@dataclass
|
@dataclass
|
||||||
class Attachment(UJsonMixin):
|
class Attachment(UJsonMixin):
|
||||||
|
|
@ -23,10 +22,12 @@ class Attachment(UJsonMixin):
|
||||||
content_type: str = field(metadata={"sa": Column(String(), nullable=False)})
|
content_type: str = field(metadata={"sa": Column(String(), nullable=False)})
|
||||||
|
|
||||||
created_at: datetime = field(
|
created_at: datetime = field(
|
||||||
default=datetime.now(UTC), metadata={"sa": Column(DateTime(timezone=True), nullable=False)}
|
default=datetime.now(UTC),
|
||||||
|
metadata={"sa": Column(DateTime(timezone=True), nullable=False)},
|
||||||
)
|
)
|
||||||
updated_at: datetime = field(
|
updated_at: datetime = field(
|
||||||
default=datetime.now(UTC), metadata={"sa": Column(DateTime(timezone=True), nullable=False)}
|
default=datetime.now(UTC),
|
||||||
|
metadata={"sa": Column(DateTime(timezone=True), nullable=False)},
|
||||||
)
|
)
|
||||||
is_deleted: bool = field(default=False, metadata={"sa": Column(Boolean(), nullable=False, default=False)})
|
is_deleted: bool = field(default=False, metadata={"sa": Column(Boolean(), nullable=False, default=False)})
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,13 @@
|
||||||
import hashlib
|
import hashlib
|
||||||
import os.path
|
import os.path
|
||||||
|
import uuid
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from collections.abc import AsyncIterable, AsyncIterator
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, AsyncIterable, AsyncIterator, Optional
|
from typing import Any
|
||||||
import uuid
|
|
||||||
|
|
||||||
import aioboto3 # type: ignore
|
import aioboto3 # type: ignore
|
||||||
import aiofiles
|
import aiofiles
|
||||||
|
|
@ -46,7 +47,7 @@ class StorageDriver(metaclass=ABCMeta):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def take(self, path: str) -> Optional[bytes]:
|
async def take(self, path: str) -> bytes | None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -78,7 +79,7 @@ class LocalStorageDriver(StorageDriver):
|
||||||
await f.write(data)
|
await f.write(data)
|
||||||
return str(path)
|
return str(path)
|
||||||
|
|
||||||
async def take(self, path: str) -> Optional[bytes]:
|
async def take(self, path: str) -> bytes | None:
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
return None
|
return None
|
||||||
async with aiofiles.open(path, "rb") as f:
|
async with aiofiles.open(path, "rb") as f:
|
||||||
|
|
@ -112,7 +113,7 @@ class MockStorageDriver(StorageDriver):
|
||||||
self._store[path] = data
|
self._store[path] = data
|
||||||
return path
|
return path
|
||||||
|
|
||||||
async def take(self, path: str) -> Optional[bytes]:
|
async def take(self, path: str) -> bytes | None:
|
||||||
return self._store.get(path)
|
return self._store.get(path)
|
||||||
|
|
||||||
async def delete(self, path: str):
|
async def delete(self, path: str):
|
||||||
|
|
@ -138,9 +139,7 @@ class S3StorageDriver(StorageDriver):
|
||||||
return self._session.client("s3", endpoint_url=self._cnf.fs_s3_endpoint)
|
return self._session.client("s3", endpoint_url=self._cnf.fs_s3_endpoint)
|
||||||
|
|
||||||
def _normalize_path(self, path: str) -> str:
|
def _normalize_path(self, path: str) -> str:
|
||||||
return f"{S3StorageDriver._prefix}{path}".replace(
|
return f"{S3StorageDriver._prefix}{path}".replace(self._cnf.fs_local_mount_dir, "")
|
||||||
self._cnf.fs_local_mount_dir, ""
|
|
||||||
)
|
|
||||||
|
|
||||||
async def put(self, data: bytes) -> str:
|
async def put(self, data: bytes) -> str:
|
||||||
sign = hashlib.file_digest(BytesIO(data), "sha256").hexdigest()
|
sign = hashlib.file_digest(BytesIO(data), "sha256").hexdigest()
|
||||||
|
|
@ -176,12 +175,10 @@ class S3StorageDriver(StorageDriver):
|
||||||
self._logger.error(f"stream client error: {str(e)}, path: {path}")
|
self._logger.error(f"stream client error: {str(e)}, path: {path}")
|
||||||
raise FileNotFoundError
|
raise FileNotFoundError
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._logger.error(
|
self._logger.error(f"stream error: {type(e).__name__} {str(e)}, path: {path}")
|
||||||
f"stream error: {type(e).__name__} {str(e)}, path: {path}"
|
|
||||||
)
|
|
||||||
raise FileNotFoundError
|
raise FileNotFoundError
|
||||||
|
|
||||||
async def take(self, path: str) -> Optional[bytes]:
|
async def take(self, path: str) -> bytes | None:
|
||||||
buffer = BytesIO()
|
buffer = BytesIO()
|
||||||
async for chunk in self.stream(path):
|
async for chunk in self.stream(path):
|
||||||
if chunk:
|
if chunk:
|
||||||
|
|
@ -191,9 +188,7 @@ class S3StorageDriver(StorageDriver):
|
||||||
|
|
||||||
async def delete(self, path: str) -> None:
|
async def delete(self, path: str) -> None:
|
||||||
async with await self._client() as s3:
|
async with await self._client() as s3:
|
||||||
await s3.delete_object(
|
await s3.delete_object(Bucket=self._cnf.fs_s3_bucket, Key=self._normalize_path(path))
|
||||||
Bucket=self._cnf.fs_s3_bucket, Key=self._normalize_path(path)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
RESIZE_MAX_SIZE = 100_000
|
RESIZE_MAX_SIZE = 100_000
|
||||||
|
|
@ -243,9 +238,7 @@ class AtachmentService:
|
||||||
return parts.replace("/", "")
|
return parts.replace("/", "")
|
||||||
|
|
||||||
def url(self, attachment_id: str, content_type: str | None = None) -> str:
|
def url(self, attachment_id: str, content_type: str | None = None) -> str:
|
||||||
return f"{self._cnf.app_public_url}/api/v0/attachment/{attachment_id}.original.{
|
return f"{self._cnf.app_public_url}/api/v0/attachment/{attachment_id}.original.{self.extension(content_type)}"
|
||||||
self.extension(content_type)
|
|
||||||
}"
|
|
||||||
|
|
||||||
async def create(self, file: bytes, user_id: str) -> Attachment:
|
async def create(self, file: bytes, user_id: str) -> Attachment:
|
||||||
path = await self._driver.put(file)
|
path = await self._driver.put(file)
|
||||||
|
|
@ -264,9 +257,7 @@ class AtachmentService:
|
||||||
await self._repository.create(attach)
|
await self._repository.create(attach)
|
||||||
return attach
|
return attach
|
||||||
|
|
||||||
async def get_info(
|
async def get_info(self, session: AbstractSession | None, attach_id: list[str]) -> list[Attachment]:
|
||||||
self, session: AbstractSession | None, attach_id: list[str]
|
|
||||||
) -> list[Attachment]:
|
|
||||||
if not attach_id:
|
if not attach_id:
|
||||||
return []
|
return []
|
||||||
if session is not None:
|
if session is not None:
|
||||||
|
|
@ -277,17 +268,13 @@ class AtachmentService:
|
||||||
def get_name(self, attachment: Attachment) -> str:
|
def get_name(self, attachment: Attachment) -> str:
|
||||||
return f"{attachment.id}.{self.extension(attachment.content_type)}"
|
return f"{attachment.id}.{self.extension(attachment.content_type)}"
|
||||||
|
|
||||||
async def get_data(
|
async def get_data(self, session: AbstractSession, attach_id: str) -> bytes | None:
|
||||||
self, session: AbstractSession, attach_id: str
|
|
||||||
) -> Optional[bytes]:
|
|
||||||
file = await self._repository.get_by_id(session, [attach_id])
|
file = await self._repository.get_by_id(session, [attach_id])
|
||||||
if not file:
|
if not file:
|
||||||
return None
|
return None
|
||||||
return await self._driver.take(file[0].path)
|
return await self._driver.take(file[0].path)
|
||||||
|
|
||||||
async def get_stream(
|
async def get_stream(self, session: AbstractSession | None, attach_id: str) -> AsyncIterator[bytes]:
|
||||||
self, session: AbstractSession | None, attach_id: str
|
|
||||||
) -> AsyncIterator[bytes]:
|
|
||||||
async def _stream_iterator(is_empty: bool):
|
async def _stream_iterator(is_empty: bool):
|
||||||
if is_empty:
|
if is_empty:
|
||||||
return
|
return
|
||||||
|
|
@ -343,7 +330,5 @@ class AtachmentService:
|
||||||
f"delete:{item.path}",
|
f"delete:{item.path}",
|
||||||
)
|
)
|
||||||
path = await self._driver.put(d)
|
path = await self._driver.put(d)
|
||||||
await self._repository.update(
|
await self._repository.update(item.id, path=path, content_type="image/jpeg", size=len(d))
|
||||||
item.id, path=path, content_type="image/jpeg", size=len(d)
|
|
||||||
)
|
|
||||||
await self._driver.delete(item.path)
|
await self._driver.delete(item.path)
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,11 @@
|
||||||
from server.modules.descriptions.repository import (
|
|
||||||
CharactersRepository,
|
|
||||||
ACharactersRepository,
|
|
||||||
)
|
|
||||||
from server.modules.descriptions.service import CharactersService
|
|
||||||
from server.modules.descriptions.domain import Breed
|
from server.modules.descriptions.domain import Breed
|
||||||
|
from server.modules.descriptions.repository import ACharactersRepository, CharactersRepository, PGCharactersRepository
|
||||||
|
from server.modules.descriptions.service import CharactersService
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"CharactersRepository",
|
"CharactersRepository",
|
||||||
"ACharactersRepository",
|
"ACharactersRepository",
|
||||||
"CharactersService",
|
"CharactersService",
|
||||||
|
"PGCharactersRepository",
|
||||||
"Breed",
|
"Breed",
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ from dataclasses import dataclass
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class Breed:
|
class Breed:
|
||||||
|
id: str
|
||||||
name: str
|
name: str
|
||||||
alias: str
|
alias: str
|
||||||
description: str
|
description: str
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
from server.modules.descriptions.repository.repository import (
|
from server.modules.descriptions.repository.repository import (
|
||||||
CharactersRepository,
|
|
||||||
ACharactersRepository,
|
ACharactersRepository,
|
||||||
|
CharactersRepository,
|
||||||
|
PGCharactersRepository,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = ("CharactersRepository", "ACharactersRepository")
|
__all__ = ("CharactersRepository", "ACharactersRepository", "PGCharactersRepository")
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,3 @@
|
||||||
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
{
|
||||||
"people_friendly": true,
|
"people_friendly": true,
|
||||||
"child_friendly": true,
|
"child_friendly": true,
|
||||||
|
|
@ -14,4 +11,3 @@
|
||||||
"tolerates_loneliness": false,
|
"tolerates_loneliness": false,
|
||||||
"hypoallergenic": false
|
"hypoallergenic": false
|
||||||
}
|
}
|
||||||
```
|
|
||||||
|
|
@ -1,6 +1,3 @@
|
||||||
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
{
|
||||||
"active": true,
|
"active": true,
|
||||||
"colors": ["blue", "gray", "black"],
|
"colors": ["blue", "gray", "black"],
|
||||||
|
|
@ -10,4 +7,3 @@
|
||||||
"good_health": false,
|
"good_health": false,
|
||||||
"tolerates_loneliness": false
|
"tolerates_loneliness": false
|
||||||
}
|
}
|
||||||
```
|
|
||||||
|
|
@ -1,6 +1,3 @@
|
||||||
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
{
|
||||||
"people_friendly": true,
|
"people_friendly": true,
|
||||||
"child_friendly": true,
|
"child_friendly": true,
|
||||||
|
|
@ -13,4 +10,3 @@
|
||||||
"tolerates_loneliness": false,
|
"tolerates_loneliness": false,
|
||||||
"hypoallergenic": false
|
"hypoallergenic": false
|
||||||
}
|
}
|
||||||
```
|
|
||||||
|
|
@ -1,6 +1,3 @@
|
||||||
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
{
|
||||||
"people_friendly": true,
|
"people_friendly": true,
|
||||||
"child_friendly": true,
|
"child_friendly": true,
|
||||||
|
|
@ -17,4 +14,3 @@
|
||||||
"tolerates_loneliness": false,
|
"tolerates_loneliness": false,
|
||||||
"hypoallergenic": false
|
"hypoallergenic": false
|
||||||
}
|
}
|
||||||
```
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
It seems like the text you've shared is repeated multiple times, possibly due to an error. Could you clarify your question or specify what you need help with? For example, are you looking for tips on dog care, training, nutrition, or something else? Let me know, and I'll provide a clear, concise response! 🐾
|
|
||||||
|
|
@ -1,6 +1,3 @@
|
||||||
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
{
|
||||||
"people_friendly": true,
|
"people_friendly": true,
|
||||||
"child_friendly": true,
|
"child_friendly": true,
|
||||||
|
|
@ -13,4 +10,3 @@
|
||||||
"tolerates_loneliness": false,
|
"tolerates_loneliness": false,
|
||||||
"hypoallergenic": false
|
"hypoallergenic": false
|
||||||
}
|
}
|
||||||
```
|
|
||||||
|
|
@ -1,16 +1,7 @@
|
||||||
|
{
|
||||||
|
"people_friendly": true,
|
||||||
The Entlebucher Mountain Dog exhibits the following traits based on the provided description:
|
"child_friendly": true,
|
||||||
|
"active": true,
|
||||||
- **Child-friendly**: Yes. The text explicitly states, "Прекрасно ладят с детьми" (Great with children).
|
"need_attentions": true,
|
||||||
- **High energy**: Yes. The breed requires daily walks, games, and activities, and lacks of stimulation may lead to destructive behavior.
|
"good_health": false
|
||||||
- **Friendly**: Yes. The dog is described as "обычно дружелюбны" (usually friendly) with people and other animals, and it is noted that they are good with children.
|
}
|
||||||
|
|
||||||
Other attributes:
|
|
||||||
- **People-friendly**: Ambiguous. While the breed is friendly with people, the term "people-friendly" is not explicitly mentioned.
|
|
||||||
- **Dog-friendly**: Unclear. The text notes "обычно дружелюбны" (usually friendly) with other animals but mentions a hunting instinct toward small animals, which may not directly apply to dogs.
|
|
||||||
- **Low maintenance**: No. The breed requires regular grooming (brushing) and active socialization/training.
|
|
||||||
- **Hypoallergenic**: No. There is no mention of hypoallergenic traits.
|
|
||||||
|
|
||||||
**Final Answer**:
|
|
||||||
Child-friendly, High energy, Friendly.
|
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,11 @@
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import UTC, datetime
|
|
||||||
|
|
||||||
from dataclasses_ujson.dataclasses_ujson import UJsonMixin
|
from dataclasses_ujson.dataclasses_ujson import UJsonMixin # type: ignore
|
||||||
from sqlalchemy import BigInteger, Boolean, Column, DateTime, JSON, String, Text
|
from sqlalchemy import JSON, Column, String, Text
|
||||||
|
|
||||||
from server.config import get_app_config
|
|
||||||
from server.infra.db.db_mapper import mapper_registry
|
from server.infra.db.db_mapper import mapper_registry
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@mapper_registry.mapped
|
@mapper_registry.mapped
|
||||||
@dataclass
|
@dataclass
|
||||||
class Beerds(UJsonMixin):
|
class Beerds(UJsonMixin):
|
||||||
|
|
@ -17,10 +14,6 @@ class Beerds(UJsonMixin):
|
||||||
|
|
||||||
id: str = field(metadata={"sa": Column(String(), primary_key=True, nullable=False)})
|
id: str = field(metadata={"sa": Column(String(), primary_key=True, nullable=False)})
|
||||||
name: str = field(metadata={"sa": Column(Text(), nullable=False)})
|
name: str = field(metadata={"sa": Column(Text(), nullable=False)})
|
||||||
descriptions: str = field(
|
alias: str = field(metadata={"sa": Column(Text(), nullable=False)})
|
||||||
default=datetime.now(UTC), metadata={"sa": Column(Text(), nullable=False)}
|
descriptions: str = field(metadata={"sa": Column(Text(), nullable=False)})
|
||||||
)
|
signs: dict | None = field(default=None, metadata={"sa": Column(JSON(), nullable=False)})
|
||||||
signs: dict = field(
|
|
||||||
default=datetime.now(UTC), metadata={"sa": Column(JSON(), nullable=False)}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,12 @@
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from aiocache import cached, Cache # type: ignore
|
from aiocache import Cache, cached # type: ignore
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
from server.infra.db import AsyncDB
|
from server.infra.db import AsyncDB
|
||||||
from server.modules.descriptions.domain import Breed
|
from server.modules.descriptions.domain import Breed
|
||||||
|
from server.modules.descriptions.repository import models
|
||||||
|
|
||||||
|
|
||||||
class ACharactersRepository(metaclass=ABCMeta):
|
class ACharactersRepository(metaclass=ABCMeta):
|
||||||
|
|
@ -12,6 +14,7 @@ class ACharactersRepository(metaclass=ABCMeta):
|
||||||
async def get_characters(self) -> list[Breed]:
|
async def get_characters(self) -> list[Breed]:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
async def get_character(self, alias: str) -> Breed | None:
|
async def get_character(self, alias: str) -> Breed | None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
@ -28,11 +31,10 @@ class CharactersRepository(ACharactersRepository):
|
||||||
# Идем по каждому текстовому файлу с описанием породы
|
# Идем по каждому текстовому файлу с описанием породы
|
||||||
for breed_file in breed_dir.glob("*.txt"):
|
for breed_file in breed_dir.glob("*.txt"):
|
||||||
breed_name = breed_file.stem # имя файла без расширения - название породы
|
breed_name = breed_file.stem # имя файла без расширения - название породы
|
||||||
description = breed_file.read_text(
|
description = breed_file.read_text(encoding="utf-8") # читаем описание из файла
|
||||||
encoding="utf-8"
|
|
||||||
) # читаем описание из файла
|
|
||||||
breeds.append(
|
breeds.append(
|
||||||
Breed(
|
Breed(
|
||||||
|
id=breed_name,
|
||||||
name=breed_name.replace("_", " "),
|
name=breed_name.replace("_", " "),
|
||||||
alias=breed_file.stem,
|
alias=breed_file.stem,
|
||||||
description=description.strip(),
|
description=description.strip(),
|
||||||
|
|
@ -55,30 +57,71 @@ class PGCharactersRepository(ACharactersRepository):
|
||||||
def __init__(self, db: AsyncDB):
|
def __init__(self, db: AsyncDB):
|
||||||
self._db = db
|
self._db = db
|
||||||
|
|
||||||
@cached(ttl=60, cache=Cache.MEMORY)
|
# ───────────────────────────────────────────────────────────────────── #
|
||||||
|
# 8️⃣ Кешируемый метод, который возвращает **все** породы
|
||||||
|
# ───────────────────────────────────────────────────────────────────── #
|
||||||
|
@cached(ttl=60, cache=Cache.MEMORY) # 1‑мин. кеш
|
||||||
async def get_characters(self) -> list[Breed]:
|
async def get_characters(self) -> list[Breed]:
|
||||||
breed_dir = Path("server/modules/descriptions/repository/breed_descriptions")
|
"""
|
||||||
breeds: list[Breed] = []
|
Читает данные из таблицы `beerds.beerds` и преобразует каждую строку
|
||||||
|
в экземпляр `Breed`. Поле `signs` игнорируется – в `Breed` его нет.
|
||||||
|
"""
|
||||||
|
|
||||||
# Идем по каждому текстовому файлу с описанием породы
|
async with self._db.async_session() as session:
|
||||||
for breed_file in breed_dir.glob("*.txt"):
|
# Писем SELECT‑запрос (получаем все строки)
|
||||||
breed_name = breed_file.stem # имя файла без расширения - название породы
|
stmt = select(
|
||||||
description = breed_file.read_text(
|
models.Beerds.id,
|
||||||
encoding="utf-8"
|
models.Beerds.name,
|
||||||
) # читаем описание из файла
|
models.Beerds.alias,
|
||||||
breeds.append(
|
models.Beerds.descriptions,
|
||||||
Breed(
|
|
||||||
name=breed_name.replace("_", " "),
|
|
||||||
alias=breed_file.stem,
|
|
||||||
description=description.strip(),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
breeds.sort(key=lambda b: b.name)
|
result = await session.execute(stmt)
|
||||||
|
rows = result.fetchall()
|
||||||
|
|
||||||
|
# Конвертируем в Breed
|
||||||
|
breeds: list[Breed] = [
|
||||||
|
Breed(
|
||||||
|
id=str(row.id),
|
||||||
|
name=row.name.strip(),
|
||||||
|
alias=row.alias.strip(),
|
||||||
|
description=row.descriptions.strip(),
|
||||||
|
)
|
||||||
|
for row in rows
|
||||||
|
]
|
||||||
|
|
||||||
|
# Сортируем по имени, как было в файле‑реализации
|
||||||
|
breeds.sort(key=lambda b: b.name.lower())
|
||||||
return breeds
|
return breeds
|
||||||
|
|
||||||
|
# ───────────────────────────────────────────────────────────────────── #
|
||||||
|
# 9️⃣ Получить конкретную породу по псевдониму
|
||||||
|
# ───────────────────────────────────────────────────────────────────── #
|
||||||
async def get_character(self, alias: str) -> Breed | None:
|
async def get_character(self, alias: str) -> Breed | None:
|
||||||
breeds = await self.get_characters()
|
"""
|
||||||
data = [b for b in breeds if b.alias == alias]
|
Быстрый запрос без получения всех пород. Если результат
|
||||||
if len(data) == 0:
|
пустой – возвращаем `None`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
async with self._db.async_session() as session:
|
||||||
|
stmt = (
|
||||||
|
select(
|
||||||
|
models.Beerds.id,
|
||||||
|
models.Beerds.name,
|
||||||
|
models.Beerds.alias,
|
||||||
|
models.Beerds.descriptions,
|
||||||
|
)
|
||||||
|
.where(models.Beerds.alias == alias)
|
||||||
|
.limit(1)
|
||||||
|
)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
row = result.fetchone()
|
||||||
|
|
||||||
|
if row is None: # pragma: no cover
|
||||||
return None
|
return None
|
||||||
return data[0]
|
|
||||||
|
return Breed(
|
||||||
|
id=str(row.id),
|
||||||
|
name=row.name.strip(),
|
||||||
|
alias=row.alias.strip(),
|
||||||
|
description=row.descriptions.strip(),
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,16 @@
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
|
|
||||||
from dataclasses_ujson.dataclasses_ujson import UJsonMixin
|
from dataclasses_ujson.dataclasses_ujson import UJsonMixin # type: ignore
|
||||||
from sqlalchemy import BigInteger, Boolean, Column, DateTime, ForeignKeyConstraint, String
|
|
||||||
|
|
||||||
from server.config import get_app_config
|
from server.config import get_app_config
|
||||||
from server.infra.db.db_mapper import mapper_registry
|
from server.infra.db.db_mapper import mapper_registry
|
||||||
|
from sqlalchemy import (
|
||||||
|
BigInteger,
|
||||||
|
Column,
|
||||||
|
DateTime,
|
||||||
|
ForeignKeyConstraint,
|
||||||
|
String,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@mapper_registry.mapped
|
@mapper_registry.mapped
|
||||||
|
|
@ -24,7 +28,8 @@ class Vote(UJsonMixin):
|
||||||
beerd_id: str = field(metadata={"sa": Column(String(), nullable=False)})
|
beerd_id: str = field(metadata={"sa": Column(String(), nullable=False)})
|
||||||
rate: int = field(metadata={"sa": Column(BigInteger(), nullable=False)})
|
rate: int = field(metadata={"sa": Column(BigInteger(), nullable=False)})
|
||||||
created_at: datetime = field(
|
created_at: datetime = field(
|
||||||
default=datetime.now(UTC), metadata={"sa": Column(DateTime(timezone=True), nullable=False)}
|
default=datetime.now(UTC),
|
||||||
|
metadata={"sa": Column(DateTime(timezone=True), nullable=False)},
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
from server.modules.recognizer.repository import (
|
from server.modules.recognizer.repository import (
|
||||||
RecognizerRepository,
|
|
||||||
ARecognizerRepository,
|
ARecognizerRepository,
|
||||||
|
RecognizerRepository,
|
||||||
)
|
)
|
||||||
from server.modules.recognizer.service import RecognizerService
|
from server.modules.recognizer.service import RecognizerService
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
from server.modules.recognizer.repository.repository import (
|
from server.modules.recognizer.repository.repository import (
|
||||||
RecognizerRepository,
|
|
||||||
ARecognizerRepository,
|
ARecognizerRepository,
|
||||||
|
RecognizerRepository,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = ("RecognizerRepository", "ARecognizerRepository")
|
__all__ = ("RecognizerRepository", "ARecognizerRepository")
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
|
||||||
from aiocache import cached, Cache # type: ignore
|
|
||||||
import ujson
|
import ujson
|
||||||
|
from aiocache import Cache, cached # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class ARecognizerRepository(metaclass=ABCMeta):
|
class ARecognizerRepository(metaclass=ABCMeta):
|
||||||
|
|
@ -29,26 +29,22 @@ class RecognizerRepository(ARecognizerRepository):
|
||||||
|
|
||||||
@cached(ttl=60, cache=Cache.MEMORY)
|
@cached(ttl=60, cache=Cache.MEMORY)
|
||||||
async def images_dogs(self) -> dict:
|
async def images_dogs(self) -> dict:
|
||||||
with open("server/modules/recognizer/repository/meta/images.json", "r") as f:
|
with open("server/modules/recognizer/repository/meta/images.json") as f: # noqa: ASYNC230
|
||||||
return ujson.loads(f.read())["dog"]
|
return ujson.loads(f.read())["dog"]
|
||||||
|
|
||||||
@cached(ttl=60, cache=Cache.MEMORY)
|
@cached(ttl=60, cache=Cache.MEMORY)
|
||||||
async def images_cats(self) -> dict:
|
async def images_cats(self) -> dict:
|
||||||
with open("server/modules/recognizer/repository/meta/images.json", "r") as f:
|
with open("server/modules/recognizer/repository/meta/images.json") as f: # noqa: ASYNC230
|
||||||
return ujson.loads(f.read())["cat"]
|
return ujson.loads(f.read())["cat"]
|
||||||
|
|
||||||
@lru_cache
|
@lru_cache
|
||||||
def labels_cats(self) -> dict:
|
def labels_cats(self) -> dict:
|
||||||
with open(
|
with open("server/modules/recognizer/repository/meta/labels_cats.json") as f: # noqa: ASYNC230
|
||||||
"server/modules/recognizer/repository/meta/labels_cats.json", "r"
|
|
||||||
) as f:
|
|
||||||
data_labels = f.read()
|
data_labels = f.read()
|
||||||
return ujson.loads(data_labels)
|
return ujson.loads(data_labels)
|
||||||
|
|
||||||
@lru_cache
|
@lru_cache
|
||||||
def labels_dogs(self) -> dict:
|
def labels_dogs(self) -> dict:
|
||||||
with open(
|
with open("server/modules/recognizer/repository/meta/labels_dogs.json") as f: # noqa: ASYNC230
|
||||||
"server/modules/recognizer/repository/meta/labels_dogs.json", "r"
|
|
||||||
) as f:
|
|
||||||
data_labels = f.read()
|
data_labels = f.read()
|
||||||
return ujson.loads(data_labels)
|
return ujson.loads(data_labels)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
from typing import NewType, Any
|
|
||||||
import os
|
|
||||||
import io
|
import io
|
||||||
|
import os
|
||||||
|
from typing import Any, NewType
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
|
|
@ -10,7 +10,6 @@ from torchvision import transforms # type: ignore
|
||||||
|
|
||||||
from server.modules.recognizer.repository import ARecognizerRepository
|
from server.modules.recognizer.repository import ARecognizerRepository
|
||||||
|
|
||||||
|
|
||||||
TorchModel = NewType("TorchModel", torch.nn.Module)
|
TorchModel = NewType("TorchModel", torch.nn.Module)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -49,15 +48,10 @@ class RecognizerService:
|
||||||
images.append(
|
images.append(
|
||||||
{
|
{
|
||||||
"name": name,
|
"name": name,
|
||||||
"url": [
|
"url": [f"/static/assets/dog/{predicted_label}/{i}" for i in images_dogs[predicted_label]],
|
||||||
f"/static/assets/dog/{predicted_label}/{i}"
|
|
||||||
for i in images_dogs[predicted_label]
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
description.setdefault(name, []).append(
|
description.setdefault(name, []).append(f"/dogs-characteristics/{name.replace(' ', '_')}")
|
||||||
f"/dogs-characteristics/{name.replace(' ', '_')}"
|
|
||||||
)
|
|
||||||
results[probabilities] = name
|
results[probabilities] = name
|
||||||
return {
|
return {
|
||||||
"results": results,
|
"results": results,
|
||||||
|
|
@ -77,10 +71,7 @@ class RecognizerService:
|
||||||
images.append(
|
images.append(
|
||||||
{
|
{
|
||||||
"name": name,
|
"name": name,
|
||||||
"url": [
|
"url": [f"/static/assets/cat/{predicted_label}/{i}" for i in images_cats[predicted_label]],
|
||||||
f"/static/assets/cat/{predicted_label}/{i}"
|
|
||||||
for i in images_cats[predicted_label]
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
results[probabilities] = name
|
results[probabilities] = name
|
||||||
|
|
@ -99,9 +90,7 @@ class RecognizerService:
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
input_tensor = preprocess(Image.open(io.BytesIO(image)))
|
input_tensor = preprocess(Image.open(io.BytesIO(image)))
|
||||||
input_batch = input_tensor.unsqueeze(0).to(
|
input_batch = input_tensor.unsqueeze(0).to(device) # Добавляем dimension для батча
|
||||||
device
|
|
||||||
) # Добавляем dimension для батча
|
|
||||||
|
|
||||||
with torch.no_grad():
|
with torch.no_grad():
|
||||||
output = model(input_batch)
|
output = model(input_batch)
|
||||||
|
|
@ -112,7 +101,5 @@ class RecognizerService:
|
||||||
|
|
||||||
predicted_data = []
|
predicted_data = []
|
||||||
for i in range(k):
|
for i in range(k):
|
||||||
predicted_data.append(
|
predicted_data.append((predicted_idx[i].item(), float(topk_probs[i].item())))
|
||||||
(predicted_idx[i].item(), float(topk_probs[i].item()))
|
|
||||||
)
|
|
||||||
return predicted_data
|
return predicted_data
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import requests # type: ignore
|
import requests # type: ignore
|
||||||
|
|
||||||
# Получить токен чтобы:
|
# Получить токен чтобы:
|
||||||
|
|
@ -12,9 +13,7 @@ group_id = 220240483
|
||||||
dir = "../assets/dog"
|
dir = "../assets/dog"
|
||||||
list_labels = [fname for fname in os.listdir(dir)]
|
list_labels = [fname for fname in os.listdir(dir)]
|
||||||
|
|
||||||
r = requests.get(
|
r = requests.get(f"{VK_URL}photos.getAll{postfix}&access_token={TOKEN}&owner_id=-{group_id}&count=200")
|
||||||
f"{VK_URL}photos.getAll{postfix}&access_token={TOKEN}&owner_id=-{group_id}&count=200"
|
|
||||||
)
|
|
||||||
if "error" in r.json():
|
if "error" in r.json():
|
||||||
print("error", r.json())
|
print("error", r.json())
|
||||||
exit()
|
exit()
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue