1
0
mirror of https://github.com/ijaric/voice_assistant.git synced 2025-05-24 06:23:28 +00:00

Merge branch 'features/#45_agent' into tasks/#45_agent

This commit is contained in:
Григорич 2023-10-14 23:56:38 +03:00 committed by GitHub
commit c70f4ff726
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
109 changed files with 3958 additions and 130 deletions

View File

@ -99,6 +99,23 @@ jobs:
make lint
- name: Test Package
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
JWT_SECRET_KEY: ${{ secrets.JWT_SECRET_KEY }}
POSTGRES_DRIVER: ${{ vars.POSTGRES_DRIVER }}
POSTGRES_HOST: ${{ vars.POSTGRES_HOST }}
POSTGRES_PORT: ${{ vars.POSTGRES_PORT }}
POSTGRES_USER: ${{ vars.POSTGRES_USER }}
NGINX_PORT: ${{ vars.NGINX_PORT }}
API_HOST: ${{ vars.API_HOST }}
API_PORT: ${{ vars.API_PORT }}
APP_RELOAD: ${{ vars.APP_RELOAD }}
TTS_YANDEX_API_KEY: ${{ secrets.TTS_YANDEX_API_KEY }}
TTS_ELEVEN_LABS_API_KEY: ${{ secrets.TTS_ELEVEN_LABS_API_KEY }}
TTS_YANDEX_AUDIO_FORMAT: ${{ vars.TTS_YANDEX_AUDIO_FORMAT }}
TTS_YANDEX_SAMPLE_RATE_HERTZ: ${{ vars.TTS_YANDEX_SAMPLE_RATE_HERTZ }}
TTS_ELEVEN_LABS_DEFAULT_VOICE_ID: ${{ vars.TTS_ELEVEN_LABS_DEFAULT_VOICE_ID }}
working-directory: src/${{ matrix.package }}
run: |
make ci-test

View File

@ -15,14 +15,27 @@ NGINX_PORT=80
API_HOST=0.0.0.0
API_PORT=8000
TEST_API_PROTOCOL=http
TEST_API_HOST=api
TEST_API_PORT=8000
JWT_SECRET_KEY=v9LctjUWwol4XbvczPiLFMDtZ8aal7mm
JWT_ALGORITHM=HS256
APP_RELOAD=True
VOICE_AVAILABLE_FORMATS=mp3,ogg,wav
VOICE_AVAILABLE_FORMATS=mp3,ogg,wav,oga
VOICE_MAX_INPUT_SIZE=5120 # 5MB
VOICE_MAX_INPUT_SECONDS=30
OPENAI_API_KEY=sk-1234567890
OPENAI_STT_MODEL=whisper-1
TTS_YANDEX_API_KEY=
TTS_YANDEX_AUDIO_FORMAT=oggopus
TTS_YANDEX_SAMPLE_RATE_HERTZ=48000
TTS_YANDEX_TIMEOUT_SECONDS=30
TTS_ELEVEN_LABS_API_KEY=
TTS_ELEVEN_LABS_DEFAULT_VOICE_ID=EXAVITQu4vr4xnSDxMaL
TTS_ELEVEN_LABS_TIMEOUT_SECONDS=30

View File

@ -13,7 +13,7 @@ COPY poetry.toml /opt/app/poetry.toml
WORKDIR /opt/app
RUN pip install poetry \
RUN pip install poetry \
&& poetry install --no-dev
COPY bin /opt/app/bin

View File

@ -0,0 +1,18 @@
FROM python:3.11
RUN apt-get update
WORKDIR /opt/app
COPY pyproject.toml ./
COPY poetry.lock ./
RUN apt-get update \
&& pip install poetry \
&& poetry config virtualenvs.create false \
&& poetry install --no-dev
COPY tests tests
COPY lib lib
CMD ["pytest"]

View File

@ -1,3 +1,3 @@
include ../../common_makefile.mk
PROJECT_FOLDERS = bin lib tests
PROJECT_FOLDERS = bin lib tests

View File

@ -0,0 +1,56 @@
version: "3"
services:
postgres:
image: postgres:15.2
restart: always
environment:
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_DB: ${POSTGRES_NAME}
env_file:
- .env
expose:
- "${POSTGRES_PORT}"
volumes:
- postgres_data:/var/lib/postgresql/data/
networks:
- backend_network
api:
build:
context: .
container_name: api
image: fastapi_app
restart: always
entrypoint: ["/opt/app/entrypoint.sh"]
env_file:
- .env
expose:
- "${API_PORT}"
depends_on:
- postgres
networks:
- backend_network
- api_network
tests:
build:
context: .
dockerfile: "Dockerfile.tests"
env_file:
- .env
depends_on:
- postgres
- api
networks:
- api_network
volumes:
postgres_data:
networks:
api_network:
driver: bridge
backend_network:
driver: bridge

View File

@ -1,4 +1,10 @@
from .agent import AgentHandler
from .health import basic_router
from .voice_responce_handler import VoiceResponseHandler
__all__ = ["AgentHandler", "basic_router"]
__all__ = [
"AgentHandler",
"VoiceResponseHandler",
"basic_router",
]

View File

@ -0,0 +1,45 @@
import http
import io
import fastapi
import lib.stt.services as stt_services
# import lib.tts.services as tts_service
# import lib.models as models
class VoiceResponseHandler:
def __init__(
self,
stt: stt_services.SpeechService,
# tts: tts_service.TTSService,
):
self.stt = stt
# self.tts = tts
self.router = fastapi.APIRouter()
self.router.add_api_route(
"/",
self.voice_response,
methods=["POST"],
summary="Ответ голосового помощника",
description="Маршрут возвращает потоковый ответ аудио",
)
async def voice_response(
self,
voice: bytes = fastapi.File(...),
) -> fastapi.responses.StreamingResponse:
voice_text: str = await self.stt.recognize(voice)
if voice_text == "":
raise fastapi.HTTPException(status_code=http.HTTPStatus.BAD_REQUEST, detail="Speech recognition failed")
# TODO: Добавить обработку текста через клиента openai
# TODO: Добавить синтез речи через клиента tts
# TODO: Заменить заглушку на реальный ответ
# response = await self.tts.get_audio_as_bytes(
# models.TTSCreateRequestModel(
# text=voice_text,
# )
# )
# return fastapi.responses.StreamingResponse(io.BytesIO(response.audio_content), media_type="audio/ogg")
return fastapi.responses.StreamingResponse(io.BytesIO(voice), media_type="audio/ogg")

View File

@ -1,3 +1,5 @@
from .base import HealthResponse
__all__ = ["HealthResponse"]
__all__ = [
"HealthResponse",
]

View File

@ -12,7 +12,9 @@ import lib.app.errors as app_errors
import lib.app.settings as app_settings
import lib.app.split_settings as app_split_settings
import lib.clients as clients
import lib.models as models
import lib.stt as stt
import lib.tts as tts
logger = logging.getLogger(__name__)
@ -60,15 +62,29 @@ class Application:
logger.info("Initializing clients")
http_yandex_tts_client = clients.AsyncHttpClient(
base_url="yandex", # todo add yandex api url from settings
proxy_settings=settings.proxy,
base_url=settings.tts_yandex.base_url,
headers=settings.tts_yandex.base_headers,
timeout=settings.tts_yandex.timeout_seconds,
)
http_eleven_labs_tts_client = clients.AsyncHttpClient(
base_url=settings.tts_eleven_labs.base_url,
headers=settings.tts_eleven_labs.base_headers,
timeout=settings.tts_eleven_labs.timeout_seconds,
)
disposable_resources.append(
DisposableResource(
name="http_client yandex",
dispose_callback=http_yandex_tts_client.close(),
)
)
disposable_resources.append(
DisposableResource(
name="http_client eleven labs",
dispose_callback=http_eleven_labs_tts_client.close(),
)
)
# Repositories
@ -76,6 +92,16 @@ class Application:
stt_repository: stt.STTProtocol = stt.OpenaiSpeechRepository(settings=settings)
chat_history_repository = agent.ChatHistoryRepository(pg_async_session=postgres_client.get_async_session())
tts_yandex_repository = tts.TTSYandexRepository(
tts_settings=app_split_settings.TTSYandexSettings(),
client=http_yandex_tts_client,
)
tts_eleven_labs_repository = tts.TTSElevenLabsRepository(
tts_settings=app_split_settings.TTSElevenLabsSettings(),
client=http_eleven_labs_tts_client,
is_models_from_api=True,
)
# Caches
logger.info("Initializing caches")
@ -85,12 +111,25 @@ class Application:
logger.info("Initializing services")
stt_service: stt.SpeechService = stt.SpeechService(repository=stt_repository) # type: ignore
tts_service: tts.TTSService = tts.TTSService( # type: ignore
repositories={
models.VoiceModelProvidersEnum.YANDEX: tts_yandex_repository,
models.VoiceModelProvidersEnum.ELEVEN_LABS: tts_eleven_labs_repository,
},
)
# Handlers
logger.info("Initializing handlers")
liveness_probe_handler = api_v1_handlers.basic_router
agent_handler = api_v1_handlers.AgentHandler(chat_history_repository=chat_history_repository).router
# TODO: объявить сервисы tts и openai и добавить их в voice_response_handler
voice_response_handler = api_v1_handlers.VoiceResponseHandler(
stt=stt_service,
# tts=tts_service, # TODO
).router
logger.info("Creating application")
fastapi_app = fastapi.FastAPI(
@ -104,6 +143,7 @@ class Application:
# Routes
fastapi_app.include_router(liveness_probe_handler, prefix="/api/v1/health", tags=["health"])
fastapi_app.include_router(agent_handler, prefix="/api/v1/agent", tags=["testing"])
fastapi_app.include_router(voice_response_handler, prefix="/api/v1/voice", tags=["voice"])
application = Application(
settings=settings,

View File

@ -1,24 +1,16 @@
import pydantic
import pydantic_settings
import lib.app.split_settings as app_split_settings
class Settings(pydantic_settings.BaseSettings):
api: app_split_settings.ApiSettings = pydantic.Field(default_factory=lambda: app_split_settings.ApiSettings())
app: app_split_settings.AppSettings = pydantic.Field(default_factory=lambda: app_split_settings.AppSettings())
postgres: app_split_settings.PostgresSettings = pydantic.Field(
default_factory=lambda: app_split_settings.PostgresSettings()
)
logger: app_split_settings.LoggingSettings = pydantic.Field(
default_factory=lambda: app_split_settings.LoggingSettings()
)
openai: app_split_settings.OpenaiSettings = pydantic.Field(
default_factory=lambda: app_split_settings.OpenaiSettings()
)
project: app_split_settings.ProjectSettings = pydantic.Field(
default_factory=lambda: app_split_settings.ProjectSettings()
)
proxy: app_split_settings.ProxySettings = pydantic.Field(default_factory=lambda: app_split_settings.ProxySettings())
voice: app_split_settings.VoiceSettings = pydantic.Field(default_factory=lambda: app_split_settings.VoiceSettings())
api: app_split_settings.ApiSettings = app_split_settings.ApiSettings()
app: app_split_settings.AppSettings = app_split_settings.AppSettings()
postgres: app_split_settings.PostgresSettings = app_split_settings.PostgresSettings()
logger: app_split_settings.LoggingSettings = app_split_settings.LoggingSettings()
openai: app_split_settings.OpenaiSettings = app_split_settings.OpenaiSettings()
project: app_split_settings.ProjectSettings = app_split_settings.ProjectSettings()
proxy: app_split_settings.ProxySettings = app_split_settings.ProxySettings()
voice: app_split_settings.VoiceSettings = app_split_settings.VoiceSettings()
tts_yandex: app_split_settings.TTSYandexSettings = app_split_settings.TTSYandexSettings()
tts_eleven_labs: app_split_settings.TTSElevenLabsSettings = app_split_settings.TTSElevenLabsSettings()

View File

@ -5,6 +5,7 @@ from .openai import *
from .postgres import *
from .project import *
from .proxy import *
from .tts import *
from .voice import *
__all__ = [
@ -15,6 +16,8 @@ __all__ = [
"PostgresSettings",
"ProjectSettings",
"ProxySettings",
"TTSElevenLabsSettings",
"TTSYandexSettings",
"VoiceSettings",
"get_logging_config",
]

View File

@ -0,0 +1,7 @@
from .eleven_labs import *
from .yandex import *
__all__ = [
"TTSElevenLabsSettings",
"TTSYandexSettings",
]

View File

@ -0,0 +1,26 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class TTSElevenLabsSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="TTS_ELEVEN_LABS_",
env_file_encoding="utf-8",
extra="ignore",
)
api_key: pydantic.SecretStr = pydantic.Field(default=...)
default_voice_id: str = "EXAVITQu4vr4xnSDxMaL"
base_url: str = "https://api.elevenlabs.io/v1/"
timeout_seconds: int = 30
@property
def base_headers(self) -> dict[str, str]:
return {
"Accept": "audio/mpeg",
"Content-Type": "application/json",
"xi-api-key": self.api_key.get_secret_value(),
}

View File

@ -0,0 +1,28 @@
import typing
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class TTSYandexSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="TTS_YANDEX_",
env_file_encoding="utf-8",
extra="ignore",
)
audio_format: typing.Literal["oggopus", "mp3", "lpcm"] = "oggopus"
sample_rate_hertz: int = 48000
api_key: pydantic.SecretStr = pydantic.Field(default=...)
base_url: str = "https://tts.api.cloud.yandex.net/speech/v1/"
timeout_seconds: int = 30
@property
def base_headers(self) -> dict[str, str]:
return {
"Authorization": f"Api-Key {self.api_key.get_secret_value()}",
"Content-Type": "application/x-www-form-urlencoded",
}

View File

@ -8,7 +8,7 @@ import lib.app.split_settings as app_split_settings
class AsyncHttpClient(httpx.AsyncClient):
def __init__(
self,
proxy_settings: app_split_settings.ProxySettings,
proxy_settings: app_split_settings.ProxySettings | None = None,
base_url: str | None = None,
**client_params: typing.Any,
) -> None:
@ -17,10 +17,10 @@ class AsyncHttpClient(httpx.AsyncClient):
self.proxies = self.__get_proxies_from_settings()
self.client_params = client_params
super().__init__(base_url=self.base_url, proxies=self.proxies, **client_params)
super().__init__(base_url=self.base_url, proxies=self.proxies, **client_params) # type: ignore[reportGeneralTypeIssues]
def __get_proxies_from_settings(self) -> dict[str, str] | None:
if not self.proxy_settings.enable:
if not self.proxy_settings or not self.proxy_settings.enable:
return None
proxies = {"all://": self.proxy_settings.dsn}
return proxies

View File

@ -2,5 +2,27 @@ from .chat_history import Message, RequestChatHistory, RequestChatMessage, Reque
from .embedding import Embedding
from .movies import Movie
from .token import Token
from .tts import *
__all__ = ["Embedding", "Message", "Movie", "RequestChatHistory", "RequestChatMessage", "RequestLastSessionId", "Token"]
__all__ = [
"AVAILABLE_MODELS_TYPE",
"Base",
"BaseLanguageCodesEnum",
"BaseVoiceModel",
"ElevenLabsLanguageCodesEnum",
"ElevenLabsListVoiceModelsModel",
"ElevenLabsVoiceModel",
"IdCreatedUpdatedBaseMixin",
"LANGUAGE_CODES_ENUM_TYPE",
"LIST_VOICE_MODELS_TYPE",
"TTSCreateRequestModel",
"TTSCreateResponseModel",
"TTSSearchVoiceRequestModel",
"Token",
"VoiceModelProvidersEnum",
"YandexLanguageCodesEnum",
"YandexListVoiceModelsModel",
"YandexVoiceModel",
]

View File

@ -0,0 +1,20 @@
from .models import *
from .voice import *
__all__ = [
"AVAILABLE_MODELS_TYPE",
"BaseLanguageCodesEnum",
"BaseVoiceModel",
"ElevenLabsLanguageCodesEnum",
"ElevenLabsListVoiceModelsModel",
"ElevenLabsVoiceModel",
"LANGUAGE_CODES_ENUM_TYPE",
"LIST_VOICE_MODELS_TYPE",
"TTSCreateRequestModel",
"TTSCreateResponseModel",
"TTSSearchVoiceRequestModel",
"VoiceModelProvidersEnum",
"YandexLanguageCodesEnum",
"YandexListVoiceModelsModel",
"YandexVoiceModel",
]

View File

@ -0,0 +1,64 @@
import pydantic
import lib.models.tts.voice as models_tts_voice
import lib.models.tts.voice.languages as models_tts_languages
AVAILABLE_MODELS_TYPE = models_tts_voice.YandexVoiceModel | models_tts_voice.ElevenLabsVoiceModel
LIST_VOICE_MODELS_TYPE = models_tts_voice.YandexListVoiceModelsModel | models_tts_voice.ElevenLabsListVoiceModelsModel
DEFAULT_MODEL = models_tts_voice.ElevenLabsVoiceModel(
voice_id="eleven_multilingual_v2",
languages=[
models_tts_languages.ElevenLabsLanguageCodesEnum.ENGLISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.JAPANESE,
models_tts_languages.ElevenLabsLanguageCodesEnum.CHINESE,
models_tts_languages.ElevenLabsLanguageCodesEnum.GERMAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.HINDI,
models_tts_languages.ElevenLabsLanguageCodesEnum.FRENCH,
models_tts_languages.ElevenLabsLanguageCodesEnum.KOREAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.PORTUGUESE,
models_tts_languages.ElevenLabsLanguageCodesEnum.ITALIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.SPANISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.INDONESIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.DUTCH,
models_tts_languages.ElevenLabsLanguageCodesEnum.TURKISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.FILIPINO,
models_tts_languages.ElevenLabsLanguageCodesEnum.POLISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.SWEDISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.BULGARIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.ROMANIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.ARABIC,
models_tts_languages.ElevenLabsLanguageCodesEnum.CZECH,
models_tts_languages.ElevenLabsLanguageCodesEnum.GREEK,
models_tts_languages.ElevenLabsLanguageCodesEnum.FINNISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.CROATIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.MALAY,
models_tts_languages.ElevenLabsLanguageCodesEnum.SLOVAK,
models_tts_languages.ElevenLabsLanguageCodesEnum.DANISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.TAMIL,
models_tts_languages.ElevenLabsLanguageCodesEnum.UKRAINIAN,
],
)
class TTSCreateRequestModel(pydantic.BaseModel):
model_config = pydantic.ConfigDict(use_enum_values=True)
voice_model: AVAILABLE_MODELS_TYPE = DEFAULT_MODEL
text: str
class TTSCreateResponseModel(pydantic.BaseModel):
audio_content: bytes
class TTSSearchVoiceRequestModel(pydantic.BaseModel):
voice_id: str | None = None
voice_name: str | None = None
languages: list[models_tts_languages.LANGUAGE_CODES_ENUM_TYPE] | None = None
company_name: str | None = None
@pydantic.model_validator(mode="after")
def check_at_least_one_field(self):
if not any((self.voice_name, self.languages, self.company_name)):
raise ValueError("At least one field required")
return self

View File

@ -0,0 +1,17 @@
from .base import *
from .eleven_labs import *
from .languages import *
from .yandex import *
__all__ = [
"BaseLanguageCodesEnum",
"BaseVoiceModel",
"ElevenLabsLanguageCodesEnum",
"ElevenLabsListVoiceModelsModel",
"ElevenLabsVoiceModel",
"LANGUAGE_CODES_ENUM_TYPE",
"VoiceModelProvidersEnum",
"YandexLanguageCodesEnum",
"YandexListVoiceModelsModel",
"YandexVoiceModel",
]

View File

@ -0,0 +1,29 @@
import enum
import typing
import pydantic
import lib.models.tts.voice.languages as models_tts_languages
class VoiceModelProvidersEnum(enum.Enum):
YANDEX = "yandex"
ELEVEN_LABS = "eleven_labs"
class BaseVoiceModel(pydantic.BaseModel):
voice_id: str
voice_name: str | None = None
languages: list[models_tts_languages.LANGUAGE_CODES_ENUM_TYPE]
provider: VoiceModelProvidersEnum
@pydantic.model_validator(mode="before")
@classmethod
def check_voice_name_exists(cls, data: typing.Any) -> typing.Any:
if not data:
return data
voice_id = data.get("voice_id")
voice_name = data.get("voice_name")
if not voice_name and voice_id:
data["voice_name"] = voice_id
return data

View File

@ -0,0 +1,83 @@
import typing
import pydantic
import lib.models.tts.voice.base as models_tts_base
import lib.models.tts.voice.languages as models_tts_languages
class ElevenLabsVoiceModel(models_tts_base.BaseVoiceModel):
model_config = pydantic.ConfigDict(use_enum_values=True)
voice_id: str
voice_name: str | None = None
languages: list[models_tts_languages.LANGUAGE_CODES_ENUM_TYPE]
provider: models_tts_base.VoiceModelProvidersEnum = models_tts_base.VoiceModelProvidersEnum.ELEVEN_LABS
class ElevenLabsListVoiceModelsModel(pydantic.BaseModel):
models: list[ElevenLabsVoiceModel] = [
ElevenLabsVoiceModel(
voice_id="eleven_multilingual_v1",
languages=[
models_tts_languages.ElevenLabsLanguageCodesEnum.ENGLISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.GERMAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.POLISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.SPANISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.ITALIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.FRENCH,
models_tts_languages.ElevenLabsLanguageCodesEnum.PORTUGUESE,
models_tts_languages.ElevenLabsLanguageCodesEnum.HINDI,
models_tts_languages.ElevenLabsLanguageCodesEnum.ARABIC,
],
),
ElevenLabsVoiceModel(
voice_id="eleven_multilingual_v2",
languages=[
models_tts_languages.ElevenLabsLanguageCodesEnum.ENGLISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.JAPANESE,
models_tts_languages.ElevenLabsLanguageCodesEnum.CHINESE,
models_tts_languages.ElevenLabsLanguageCodesEnum.GERMAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.HINDI,
models_tts_languages.ElevenLabsLanguageCodesEnum.FRENCH,
models_tts_languages.ElevenLabsLanguageCodesEnum.KOREAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.PORTUGUESE,
models_tts_languages.ElevenLabsLanguageCodesEnum.ITALIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.SPANISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.INDONESIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.DUTCH,
models_tts_languages.ElevenLabsLanguageCodesEnum.TURKISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.FILIPINO,
models_tts_languages.ElevenLabsLanguageCodesEnum.POLISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.SWEDISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.BULGARIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.ROMANIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.ARABIC,
models_tts_languages.ElevenLabsLanguageCodesEnum.CZECH,
models_tts_languages.ElevenLabsLanguageCodesEnum.GREEK,
models_tts_languages.ElevenLabsLanguageCodesEnum.FINNISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.CROATIAN,
models_tts_languages.ElevenLabsLanguageCodesEnum.MALAY,
models_tts_languages.ElevenLabsLanguageCodesEnum.SLOVAK,
models_tts_languages.ElevenLabsLanguageCodesEnum.DANISH,
models_tts_languages.ElevenLabsLanguageCodesEnum.TAMIL,
models_tts_languages.ElevenLabsLanguageCodesEnum.UKRAINIAN,
],
),
ElevenLabsVoiceModel(
voice_id="eleven_multilingual_v2",
languages=[models_tts_languages.ElevenLabsLanguageCodesEnum.ENGLISH],
),
]
@classmethod
def from_api(cls, voice_models_from_api: list[dict[str, typing.Any]]) -> typing.Self:
voice_models = []
for voice_model in voice_models_from_api:
voice_model["voice_id"] = voice_model.pop("model_id")
voice_model["voice_name"] = voice_model.pop("name")
voice_model["languages"] = [
models_tts_languages.ElevenLabsLanguageCodesEnum(item.get("language_id"))
for item in voice_model.pop("languages")
]
voice_models.append(ElevenLabsVoiceModel.model_validate(voice_model))
return ElevenLabsListVoiceModelsModel(models=voice_models)

View File

@ -0,0 +1,83 @@
import enum
class BaseLanguageCodesEnum(enum.Enum):
RUSSIAN = "ru"
ENGLISH = "en"
KAZAKH = "kk"
GERMAN = "de"
HEBREW = "he"
UZBEK = "uz"
JAPANESE = "ja"
CHINESE = "zh"
HINDI = "hi"
FRENCH = "fr"
KOREAN = "ko"
PORTUGUESE = "pt"
ITALIAN = "it"
SPANISH = "es"
INDONESIAN = "id"
DUTCH = "nl"
TURKISH = "tr"
FILIPINO = "fil"
POLISH = "pl"
SWEDISH = "sv"
BULGARIAN = "bg"
ROMANIAN = "ro"
ARABIC = "ar"
CZECH = "cs"
GREEK = "el"
FINNISH = "fi"
CROATIAN = "hr"
MALAY = "ms"
SLOVAK = "sk"
DANISH = "da"
TAMIL = "ta"
UKRAINIAN = "uk"
class ElevenLabsLanguageCodesEnum(enum.Enum):
RUSSIAN = "ru"
ENGLISH = "en"
KAZAKH = "kk"
GERMAN = "de"
HEBREW = "he"
UZBEK = "uz"
JAPANESE = "ja"
CHINESE = "zh"
HINDI = "hi"
FRENCH = "fr"
KOREAN = "ko"
PORTUGUESE = "pt"
ITALIAN = "it"
SPANISH = "es"
INDONESIAN = "id"
DUTCH = "nl"
TURKISH = "tr"
FILIPINO = "fil"
POLISH = "pl"
SWEDISH = "sv"
BULGARIAN = "bg"
ROMANIAN = "ro"
ARABIC = "ar"
CZECH = "cs"
GREEK = "el"
FINNISH = "fi"
CROATIAN = "hr"
MALAY = "ms"
SLOVAK = "sk"
DANISH = "da"
TAMIL = "ta"
UKRAINIAN = "uk"
class YandexLanguageCodesEnum(enum.Enum):
RUSSIAN = "ru-RU"
ENGLISH = "en-US"
KAZAKH = "kk-KK"
GERMAN = "de-DE"
HEBREW = "he-IL"
UZBEK = "uz-UZ"
LANGUAGE_CODES_ENUM_TYPE = BaseLanguageCodesEnum | ElevenLabsLanguageCodesEnum | YandexLanguageCodesEnum

View File

@ -0,0 +1,99 @@
import typing
import pydantic
import lib.models.tts.voice.base as models_tts_base
import lib.models.tts.voice.languages as models_tts_languages
class YandexVoiceModel(models_tts_base.BaseVoiceModel):
voice_id: str
voice_name: str | None = None
languages: list[models_tts_languages.LANGUAGE_CODES_ENUM_TYPE]
provider: models_tts_base.VoiceModelProvidersEnum = models_tts_base.VoiceModelProvidersEnum.YANDEX
role: str | None = None
@pydantic.model_validator(mode="before")
@classmethod
def check_voice_name_exists(cls, data: typing.Any) -> typing.Any:
if not data:
return data
voice_id = data.get("voice_id")
voice_name = data.get("voice_name")
role = data.get("role")
if not voice_name and voice_id:
data["voice_name"] = f"{voice_id} {role}" if role else voice_id
return data
class YandexListVoiceModelsModel(pydantic.BaseModel):
models: list[YandexVoiceModel] = [
YandexVoiceModel(
voice_id="ermil", role="neutral", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="ermil", role="good", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="alena", role="neutral", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="alena", role="good", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="jane", role="neutral", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="jane", role="good", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="jane", role="evil", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="omazh", role="neutral", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="omazh", role="evil", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="zahar", role="neutral", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="zahar", role="good", languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="filipp", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="madirus", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(voice_id="dasha", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]),
YandexVoiceModel(voice_id="julia", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]),
YandexVoiceModel(voice_id="lera", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]),
YandexVoiceModel(
voice_id="marina", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="alexander", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(
voice_id="kirill", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]
),
YandexVoiceModel(voice_id="anton", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.RUSSIAN]),
YandexVoiceModel(voice_id="john", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.ENGLISH]),
YandexVoiceModel(voice_id="amira", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.KAZAKH]),
YandexVoiceModel(voice_id="madi", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.KAZAKH]),
YandexVoiceModel(voice_id="lea", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.GERMAN]),
YandexVoiceModel(
voice_id="naomi", role="modern", languages=[models_tts_languages.YandexLanguageCodesEnum.HEBREW]
),
YandexVoiceModel(
voice_id="naomi", role="classic", languages=[models_tts_languages.YandexLanguageCodesEnum.HEBREW]
),
YandexVoiceModel(voice_id="nigora", role=None, languages=[models_tts_languages.YandexLanguageCodesEnum.UZBEK]),
]
@classmethod
def from_api(cls, voice_models_from_api: list[dict[str, typing.Any]]) -> typing.Self:
voice_models = [YandexVoiceModel.model_validate(voice_model) for voice_model in voice_models_from_api]
return YandexListVoiceModelsModel(models=voice_models)

View File

@ -1,8 +1,11 @@
import http
import mimetypes
import tempfile
import fastapi
import magic
import openai
import pydantic
import lib.app.settings as app_settings
import lib.stt as stt
@ -24,15 +27,24 @@ class OpenaiSpeechRepository:
async def speech_to_text(self, audio: bytes) -> str:
file_extension = self.__get_file_extension_from_bytes(audio)
if not file_extension:
raise ValueError("File extension is not supported")
voice: stt.models.SttVoice = stt.models.SttVoice(
audio_size=len(audio) // 1024, # audio size in MB,
audio_format=file_extension,
audio_data=audio,
voice_settings=self.settings.voice,
)
if not file_extension or file_extension not in self.settings.voice.available_formats:
raise fastapi.HTTPException(
status_code=http.HTTPStatus.UNSUPPORTED_MEDIA_TYPE,
detail=f"File extension is not supported. "
f"Available extensions: {self.settings.voice.available_formats}",
)
try:
voice: stt.models.SttVoice = stt.models.SttVoice(
audio_size=len(audio) // 1024, # audio size in MB,
audio_format=file_extension,
audio_data=audio,
voice_settings=self.settings.voice,
)
except (pydantic.ValidationError, ValueError) as e:
raise fastapi.HTTPException(
status_code=http.HTTPStatus.BAD_REQUEST,
detail=f"Voice validation error: {e}",
)
try:
with tempfile.NamedTemporaryFile(suffix=f".{file_extension}") as temp_file:
@ -40,8 +52,14 @@ class OpenaiSpeechRepository:
temp_file.seek(0)
transcript = openai.Audio.transcribe(self.settings.openai.stt_model, temp_file) # type: ignore
except openai.error.InvalidRequestError as e: # type: ignore[reportGeneralTypeIssues]
raise ValueError(f"OpenAI API error: {e}")
raise fastapi.HTTPException(
status_code=http.HTTPStatus.BAD_REQUEST,
detail=f"OpenAI request error: {e}",
)
except openai.error.OpenAIError as e: # type: ignore[reportGeneralTypeIssues]
raise ValueError(f"OpenAI API error: {e}")
raise fastapi.HTTPException(
status_code=http.HTTPStatus.BAD_REQUEST,
detail=f"OpenAI API error: {e}",
)
return transcript.text # type: ignore[reportUnknownVariableType]

View File

@ -0,0 +1,9 @@
from .repositories import *
from .services import *
__all__ = [
"TTSBaseRepository",
"TTSElevenLabsRepository",
"TTSService",
"TTSYandexRepository",
]

View File

@ -0,0 +1,5 @@
from .protocols import *
__all__ = [
"TTSRepositoryProtocol",
]

View File

@ -0,0 +1,16 @@
import typing
import lib.models as models
class TTSRepositoryProtocol(typing.Protocol):
async def get_audio_as_bytes(self, request: models.TTSCreateRequestModel) -> models.TTSCreateResponseModel:
...
async def get_voice_model_by_name(self, voice_model_name: str) -> models.BaseVoiceModel | None:
...
async def get_voice_models_by_fields(
self, fields: models.TTSSearchVoiceRequestModel
) -> models.LIST_VOICE_MODELS_TYPE:
...

View File

@ -0,0 +1,9 @@
from .base import *
from .eleven_labs import *
from .yandex import *
__all__ = [
"TTSBaseRepository",
"TTSElevenLabsRepository",
"TTSYandexRepository",
]

View File

@ -0,0 +1,56 @@
import abc
import lib.clients as clients
import lib.models as models
class TTSBaseRepository(abc.ABC):
def __init__(self, client: clients.AsyncHttpClient, is_models_from_api: bool = False):
self.http_client = client
self.is_models_from_api = is_models_from_api
@property
@abc.abstractmethod
async def voice_models(self) -> models.LIST_VOICE_MODELS_TYPE:
raise NotImplementedError
@abc.abstractmethod
async def get_audio_as_bytes(self, request: models.TTSCreateRequestModel) -> models.TTSCreateResponseModel:
raise NotImplementedError
async def get_voice_model_by_name(self, voice_model_name: str) -> models.BaseVoiceModel | None:
"""
Search voice model by name
:param voice_model_name: String name
:return: Voice model that match the name
"""
voice_models = await self.voice_models
for voice_model in voice_models.models:
if voice_model.voice_name == voice_model_name:
return voice_model
async def get_list_voice_models_by_fields(
self, fields: models.TTSSearchVoiceRequestModel
) -> list[models.AVAILABLE_MODELS_TYPE]:
"""
Search voice model by fields
:param fields: Any fields from TTSSearchVoiceRequestModel
:return: All voice models that match the fields
"""
fields_dump = fields.model_dump(exclude_none=True)
voice_models_response = []
voice_models = await self.voice_models
for voice_model in voice_models.models:
for field, field_value in fields_dump.items():
if field == "languages": # language is a list
language_names: set[str] = {item.name for item in field_value}
voice_model_language_names: set[str] = {item.name for item in voice_model.languages}
if language_names.issubset(voice_model_language_names):
continue
break
voice_model_dump = voice_model.model_dump()
if voice_model_dump[field] != field_value.name:
break
else:
voice_models_response.append(voice_model)
return voice_models_response # type: ignore[reportUnknownVariableType]

View File

@ -0,0 +1,42 @@
import typing
import lib.app.split_settings as app_split_settings
import lib.clients as clients
import lib.models as models
import lib.tts.repositories.base as tts_repositories_base
class TTSElevenLabsRepository(tts_repositories_base.TTSBaseRepository):
def __init__(
self,
tts_settings: app_split_settings.TTSElevenLabsSettings,
client: clients.AsyncHttpClient,
is_models_from_api: bool = False,
):
self.tts_settings = tts_settings
super().__init__(client, is_models_from_api)
@property
async def voice_models(self) -> models.ElevenLabsListVoiceModelsModel:
if self.is_models_from_api:
return models.ElevenLabsListVoiceModelsModel.from_api(await self.get_all_models_dict_from_api())
return models.ElevenLabsListVoiceModelsModel()
async def get_all_models_dict_from_api(self) -> list[dict[str, typing.Any]]:
response = await self.http_client.get("/models")
return response.json()
async def get_audio_as_bytes(self, request: models.TTSCreateRequestModel) -> models.TTSCreateResponseModel:
if not isinstance(request.voice_model, models.ElevenLabsVoiceModel):
raise ValueError("ElevenLabs TTS support only ElevenLabsVoiceModel")
response = await self.http_client.post(
f"/text-to-speech/{self.tts_settings.default_voice_id}",
json={"text": request.text, "model_id": request.voice_model.voice_id},
)
return models.TTSCreateResponseModel(audio_content=response.content)
async def get_voice_models_by_fields(
self, fields: models.TTSSearchVoiceRequestModel
) -> models.ElevenLabsListVoiceModelsModel:
list_voice_models = await self.get_list_voice_models_by_fields(fields)
return models.ElevenLabsListVoiceModelsModel(models=list_voice_models) # type: ignore

View File

@ -0,0 +1,48 @@
import logging
import lib.app.split_settings as app_split_settings
import lib.clients as clients
import lib.models as models
import lib.tts.repositories.base as tts_repositories_base
logger = logging.getLogger(__name__)
class TTSYandexRepository(tts_repositories_base.TTSBaseRepository):
def __init__(
self,
tts_settings: app_split_settings.TTSYandexSettings,
client: clients.AsyncHttpClient,
is_models_from_api: bool = False,
):
self.tts_settings = tts_settings
if is_models_from_api:
logger.warning("Yandex TTS doesn't support getting models from API")
super().__init__(client, is_models_from_api=False)
@property
async def voice_models(self) -> models.YandexListVoiceModelsModel:
return models.YandexListVoiceModelsModel()
async def get_audio_as_bytes(self, request: models.TTSCreateRequestModel) -> models.TTSCreateResponseModel:
if not isinstance(request.voice_model, models.YandexVoiceModel):
raise ValueError("Yandex TTS support only YandexVoiceModel")
data = {
"text": request.text,
"lang": request.voice_model.languages[0].value,
"voice": request.voice_model.voice_id,
"emotion": request.voice_model.role,
"format": self.tts_settings.audio_format,
"sampleRateHertz": self.tts_settings.sample_rate_hertz,
}
response = await self.http_client.post(
"/tts:synthesize",
data=data,
)
return models.TTSCreateResponseModel(audio_content=response.content)
async def get_voice_models_by_fields(
self, fields: models.TTSSearchVoiceRequestModel
) -> models.YandexListVoiceModelsModel:
list_voice_models = await self.get_list_voice_models_by_fields(fields)
return models.YandexListVoiceModelsModel(models=list_voice_models) # type: ignore

View File

@ -0,0 +1,33 @@
import lib.models as _models
import lib.tts.models as tts_models
class TTSService:
def __init__(
self,
repositories: dict[_models.VoiceModelProvidersEnum, tts_models.TTSRepositoryProtocol],
):
self.repositories = repositories
async def get_audio_as_bytes(self, request: _models.TTSCreateRequestModel) -> _models.TTSCreateResponseModel:
model = request.voice_model
repository = self.repositories[model.provider]
audio_response = await repository.get_audio_as_bytes(request)
return audio_response
async def get_voice_model_by_name(self, voice_model_name: str) -> _models.BaseVoiceModel | None:
for repository in self.repositories.values():
voice_model = await repository.get_voice_model_by_name(voice_model_name)
if voice_model:
return voice_model
raise ValueError("Voice model not found")
async def get_list_voice_models_by_fields(
self, fields: _models.TTSSearchVoiceRequestModel
) -> list[_models.AVAILABLE_MODELS_TYPE]:
response_models: list[_models.AVAILABLE_MODELS_TYPE] = []
for repository in self.repositories.values():
voice_models = await repository.get_voice_models_by_fields(fields)
if voice_models.models:
response_models.extend(voice_models.models)
return response_models

View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
[[package]]
name = "aiohttp"
@ -645,7 +645,6 @@ files = [
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
@ -654,7 +653,6 @@ files = [
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
@ -684,7 +682,6 @@ files = [
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
@ -693,7 +690,6 @@ files = [
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
@ -1203,25 +1199,71 @@ wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1
[[package]]
name = "orjson"
version = "3.9.8"
version = "3.9.7"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.7"
files = [
{file = "orjson-3.9.8-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:823525bfb27b804b492acc59a45dc0973ea629d97557eac81dde7b34b5267611"},
{file = "orjson-3.9.8-cp310-none-win32.whl", hash = "sha256:2bcc9dc53f9e1d679515349bf299ed5e75310146c755d2ba227a7e37851ab3fb"},
{file = "orjson-3.9.8-cp310-none-win_amd64.whl", hash = "sha256:423774c85e73054acfef10fc3328f35c8d3e0193a7247d47308ebfccde70695d"},
{file = "orjson-3.9.8-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8a1c92f467f5fd0f8fb79273006b563364b1e45667b3760423498348dc2e22fa"},
{file = "orjson-3.9.8-cp311-none-win32.whl", hash = "sha256:a119c73520192c2882d0549151b9cdd65e0bb5396bedf8951ba5f70d6a873879"},
{file = "orjson-3.9.8-cp311-none-win_amd64.whl", hash = "sha256:764306f6370e6c76cbbf3139dd9b05be9c4481ee0b15966bd1907827a5777216"},
{file = "orjson-3.9.8-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:af8e6185516ce0c93d6ce1f4105918504da629c631fd969686f32a1be3ed3c9b"},
{file = "orjson-3.9.8-cp312-none-win_amd64.whl", hash = "sha256:5c818f19315251d68954c529f5d8322053f1c35b500b47d008e968bf2d32ed97"},
{file = "orjson-3.9.8-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e6a267c0fc64fc4d0b8fb146e1a060a40f570441a9390ec4bc6de0b5fda148cd"},
{file = "orjson-3.9.8-cp38-none-win32.whl", hash = "sha256:9df23493a72f073b2ab1005e628a963248dc577a2816e9c82caf09ff74908414"},
{file = "orjson-3.9.8-cp38-none-win_amd64.whl", hash = "sha256:34eec476141a043d478651d1efbf218162cdd57add24dfa659ac89e1a001477a"},
{file = "orjson-3.9.8-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c9ae634b8a55539c3d5a53813552325733ab3da3601feef8e99f91cef634f3c4"},
{file = "orjson-3.9.8-cp39-none-win32.whl", hash = "sha256:ca4f3e15517bdcdb573dfe6c97d4171247ce50ec82e3a7b708941b53d5f4bc29"},
{file = "orjson-3.9.8-cp39-none-win_amd64.whl", hash = "sha256:52c0480d5be12697b10b4d748b86acd4999f47e1d8e44e49486d0a550f30fcba"},
{file = "orjson-3.9.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b6df858e37c321cefbf27fe7ece30a950bcc3a75618a804a0dcef7ed9dd9c92d"},
{file = "orjson-3.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5198633137780d78b86bb54dafaaa9baea698b4f059456cd4554ab7009619221"},
{file = "orjson-3.9.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e736815b30f7e3c9044ec06a98ee59e217a833227e10eb157f44071faddd7c5"},
{file = "orjson-3.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a19e4074bc98793458b4b3ba35a9a1d132179345e60e152a1bb48c538ab863c4"},
{file = "orjson-3.9.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80acafe396ab689a326ab0d80f8cc61dec0dd2c5dca5b4b3825e7b1e0132c101"},
{file = "orjson-3.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:355efdbbf0cecc3bd9b12589b8f8e9f03c813a115efa53f8dc2a523bfdb01334"},
{file = "orjson-3.9.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3aab72d2cef7f1dd6104c89b0b4d6b416b0db5ca87cc2fac5f79c5601f549cc2"},
{file = "orjson-3.9.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36b1df2e4095368ee388190687cb1b8557c67bc38400a942a1a77713580b50ae"},
{file = "orjson-3.9.7-cp310-none-win32.whl", hash = "sha256:e94b7b31aa0d65f5b7c72dd8f8227dbd3e30354b99e7a9af096d967a77f2a580"},
{file = "orjson-3.9.7-cp310-none-win_amd64.whl", hash = "sha256:82720ab0cf5bb436bbd97a319ac529aee06077ff7e61cab57cee04a596c4f9b4"},
{file = "orjson-3.9.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1f8b47650f90e298b78ecf4df003f66f54acdba6a0f763cc4df1eab048fe3738"},
{file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f738fee63eb263530efd4d2e9c76316c1f47b3bbf38c1bf45ae9625feed0395e"},
{file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38e34c3a21ed41a7dbd5349e24c3725be5416641fdeedf8f56fcbab6d981c900"},
{file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21a3344163be3b2c7e22cef14fa5abe957a892b2ea0525ee86ad8186921b6cf0"},
{file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23be6b22aab83f440b62a6f5975bcabeecb672bc627face6a83bc7aeb495dc7e"},
{file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5205ec0dfab1887dd383597012199f5175035e782cdb013c542187d280ca443"},
{file = "orjson-3.9.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8769806ea0b45d7bf75cad253fba9ac6700b7050ebb19337ff6b4e9060f963fa"},
{file = "orjson-3.9.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f9e01239abea2f52a429fe9d95c96df95f078f0172489d691b4a848ace54a476"},
{file = "orjson-3.9.7-cp311-none-win32.whl", hash = "sha256:8bdb6c911dae5fbf110fe4f5cba578437526334df381b3554b6ab7f626e5eeca"},
{file = "orjson-3.9.7-cp311-none-win_amd64.whl", hash = "sha256:9d62c583b5110e6a5cf5169ab616aa4ec71f2c0c30f833306f9e378cf51b6c86"},
{file = "orjson-3.9.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1c3cee5c23979deb8d1b82dc4cc49be59cccc0547999dbe9adb434bb7af11cf7"},
{file = "orjson-3.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a347d7b43cb609e780ff8d7b3107d4bcb5b6fd09c2702aa7bdf52f15ed09fa09"},
{file = "orjson-3.9.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:154fd67216c2ca38a2edb4089584504fbb6c0694b518b9020ad35ecc97252bb9"},
{file = "orjson-3.9.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ea3e63e61b4b0beeb08508458bdff2daca7a321468d3c4b320a758a2f554d31"},
{file = "orjson-3.9.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb0b0b2476f357eb2975ff040ef23978137aa674cd86204cfd15d2d17318588"},
{file = "orjson-3.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b9a20a03576c6b7022926f614ac5a6b0914486825eac89196adf3267c6489d"},
{file = "orjson-3.9.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:915e22c93e7b7b636240c5a79da5f6e4e84988d699656c8e27f2ac4c95b8dcc0"},
{file = "orjson-3.9.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f26fb3e8e3e2ee405c947ff44a3e384e8fa1843bc35830fe6f3d9a95a1147b6e"},
{file = "orjson-3.9.7-cp312-none-win_amd64.whl", hash = "sha256:d8692948cada6ee21f33db5e23460f71c8010d6dfcfe293c9b96737600a7df78"},
{file = "orjson-3.9.7-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7bab596678d29ad969a524823c4e828929a90c09e91cc438e0ad79b37ce41166"},
{file = "orjson-3.9.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63ef3d371ea0b7239ace284cab9cd00d9c92b73119a7c274b437adb09bda35e6"},
{file = "orjson-3.9.7-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f8fcf696bbbc584c0c7ed4adb92fd2ad7d153a50258842787bc1524e50d7081"},
{file = "orjson-3.9.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90fe73a1f0321265126cbba13677dcceb367d926c7a65807bd80916af4c17047"},
{file = "orjson-3.9.7-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45a47f41b6c3beeb31ac5cf0ff7524987cfcce0a10c43156eb3ee8d92d92bf22"},
{file = "orjson-3.9.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a2937f528c84e64be20cb80e70cea76a6dfb74b628a04dab130679d4454395c"},
{file = "orjson-3.9.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b4fb306c96e04c5863d52ba8d65137917a3d999059c11e659eba7b75a69167bd"},
{file = "orjson-3.9.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:410aa9d34ad1089898f3db461b7b744d0efcf9252a9415bbdf23540d4f67589f"},
{file = "orjson-3.9.7-cp37-none-win32.whl", hash = "sha256:26ffb398de58247ff7bde895fe30817a036f967b0ad0e1cf2b54bda5f8dcfdd9"},
{file = "orjson-3.9.7-cp37-none-win_amd64.whl", hash = "sha256:bcb9a60ed2101af2af450318cd89c6b8313e9f8df4e8fb12b657b2e97227cf08"},
{file = "orjson-3.9.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5da9032dac184b2ae2da4bce423edff7db34bfd936ebd7d4207ea45840f03905"},
{file = "orjson-3.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7951af8f2998045c656ba8062e8edf5e83fd82b912534ab1de1345de08a41d2b"},
{file = "orjson-3.9.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8e59650292aa3a8ea78073fc84184538783966528e442a1b9ed653aa282edcf"},
{file = "orjson-3.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9274ba499e7dfb8a651ee876d80386b481336d3868cba29af839370514e4dce0"},
{file = "orjson-3.9.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca1706e8b8b565e934c142db6a9592e6401dc430e4b067a97781a997070c5378"},
{file = "orjson-3.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83cc275cf6dcb1a248e1876cdefd3f9b5f01063854acdfd687ec360cd3c9712a"},
{file = "orjson-3.9.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:11c10f31f2c2056585f89d8229a56013bc2fe5de51e095ebc71868d070a8dd81"},
{file = "orjson-3.9.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cf334ce1d2fadd1bf3e5e9bf15e58e0c42b26eb6590875ce65bd877d917a58aa"},
{file = "orjson-3.9.7-cp38-none-win32.whl", hash = "sha256:76a0fc023910d8a8ab64daed8d31d608446d2d77c6474b616b34537aa7b79c7f"},
{file = "orjson-3.9.7-cp38-none-win_amd64.whl", hash = "sha256:7a34a199d89d82d1897fd4a47820eb50947eec9cda5fd73f4578ff692a912f89"},
{file = "orjson-3.9.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e7e7f44e091b93eb39db88bb0cb765db09b7a7f64aea2f35e7d86cbf47046c65"},
{file = "orjson-3.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01d647b2a9c45a23a84c3e70e19d120011cba5f56131d185c1b78685457320bb"},
{file = "orjson-3.9.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0eb850a87e900a9c484150c414e21af53a6125a13f6e378cf4cc11ae86c8f9c5"},
{file = "orjson-3.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f4b0042d8388ac85b8330b65406c84c3229420a05068445c13ca28cc222f1f7"},
{file = "orjson-3.9.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd3e7aae977c723cc1dbb82f97babdb5e5fbce109630fbabb2ea5053523c89d3"},
{file = "orjson-3.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c616b796358a70b1f675a24628e4823b67d9e376df2703e893da58247458956"},
{file = "orjson-3.9.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c3ba725cf5cf87d2d2d988d39c6a2a8b6fc983d78ff71bc728b0be54c869c884"},
{file = "orjson-3.9.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4891d4c934f88b6c29b56395dfc7014ebf7e10b9e22ffd9877784e16c6b2064f"},
{file = "orjson-3.9.7-cp39-none-win32.whl", hash = "sha256:14d3fb6cd1040a4a4a530b28e8085131ed94ebc90d72793c59a713de34b60838"},
{file = "orjson-3.9.7-cp39-none-win_amd64.whl", hash = "sha256:9ef82157bbcecd75d6296d5d8b2d792242afcd064eb1ac573f8847b52e58f677"},
{file = "orjson-3.9.7.tar.gz", hash = "sha256:85e39198f78e2f7e054d296395f6c96f5e02892337746ef5b6a1bf3ed5910142"},
]
[[package]]
@ -1600,13 +1642,13 @@ pytest = ">=4.6"
[[package]]
name = "pyright"
version = "1.1.330.post0"
version = "1.1.331"
description = "Command line wrapper for pyright"
optional = false
python-versions = ">=3.7"
files = [
{file = "pyright-1.1.330.post0-py3-none-any.whl", hash = "sha256:2e9e0878298685b66485b340a0aaa16342129eb03ff9ed0e3c1ab66b8bfbe914"},
{file = "pyright-1.1.330.post0.tar.gz", hash = "sha256:8e5b09cc5d1cfa0bcbf8824b0316d21c43fe229da7cef0a09cd12fcf6cb3eedd"},
{file = "pyright-1.1.331-py3-none-any.whl", hash = "sha256:d200a01794e7f2a04d5042a6c3abee36ce92780287d3037edfc3604d45488f0e"},
{file = "pyright-1.1.331.tar.gz", hash = "sha256:c3e7b86154cac86c3bd61ea0f963143d001c201e246825aaabdddfcce5d04293"},
]
[package.dependencies]
@ -1649,6 +1691,22 @@ files = [
[package.dependencies]
six = ">=1.5"
name = "pytest-asyncio"
version = "0.21.1"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"},
{file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"},
]
[package.dependencies]
pytest = ">=7.0.0"
[package.extras]
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"]
[[package]]
name = "python-dotenv"
@ -1696,6 +1754,20 @@ files = [
{file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"},
]
[[package]]
name = "python-multipart"
version = "0.0.6"
description = "A streaming multipart parser for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "python_multipart-0.0.6-py3-none-any.whl", hash = "sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"},
{file = "python_multipart-0.0.6.tar.gz", hash = "sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132"},
]
[package.extras]
dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"]
[[package]]
name = "pyupgrade"
version = "3.15.0"
@ -1874,56 +1946,64 @@ tokenize-rt = ">=3.0.1"
[[package]]
name = "sqlalchemy"
version = "2.0.21"
version = "2.0.22"
description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
files = [
{file = "SQLAlchemy-2.0.21-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e7dc99b23e33c71d720c4ae37ebb095bebebbd31a24b7d99dfc4753d2803ede"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f0c4ee579acfe6c994637527c386d1c22eb60bc1c1d36d940d8477e482095d4"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f7d57a7e140efe69ce2d7b057c3f9a595f98d0bbdfc23fd055efdfbaa46e3a5"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca38746eac23dd7c20bec9278d2058c7ad662b2f1576e4c3dbfcd7c00cc48fa"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3cf229704074bce31f7f47d12883afee3b0a02bb233a0ba45ddbfe542939cca4"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb87f763b5d04a82ae84ccff25554ffd903baafba6698e18ebaf32561f2fe4aa"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-win32.whl", hash = "sha256:89e274604abb1a7fd5c14867a412c9d49c08ccf6ce3e1e04fffc068b5b6499d4"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-win_amd64.whl", hash = "sha256:e36339a68126ffb708dc6d1948161cea2a9e85d7d7b0c54f6999853d70d44430"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf8eebccc66829010f06fbd2b80095d7872991bfe8415098b9fe47deaaa58063"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b977bfce15afa53d9cf6a632482d7968477625f030d86a109f7bdfe8ce3c064a"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ff3dc2f60dbf82c9e599c2915db1526d65415be323464f84de8db3e361ba5b9"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44ac5c89b6896f4740e7091f4a0ff2e62881da80c239dd9408f84f75a293dae9"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:87bf91ebf15258c4701d71dcdd9c4ba39521fb6a37379ea68088ce8cd869b446"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b69f1f754d92eb1cc6b50938359dead36b96a1dcf11a8670bff65fd9b21a4b09"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-win32.whl", hash = "sha256:af520a730d523eab77d754f5cf44cc7dd7ad2d54907adeb3233177eeb22f271b"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-win_amd64.whl", hash = "sha256:141675dae56522126986fa4ca713739d00ed3a6f08f3c2eb92c39c6dfec463ce"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7614f1eab4336df7dd6bee05bc974f2b02c38d3d0c78060c5faa4cd1ca2af3b8"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d59cb9e20d79686aa473e0302e4a82882d7118744d30bb1dfb62d3c47141b3ec"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a95aa0672e3065d43c8aa80080cdd5cc40fe92dc873749e6c1cf23914c4b83af"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8c323813963b2503e54d0944813cd479c10c636e3ee223bcbd7bd478bf53c178"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:419b1276b55925b5ac9b4c7044e999f1787c69761a3c9756dec6e5c225ceca01"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-win32.whl", hash = "sha256:4615623a490e46be85fbaa6335f35cf80e61df0783240afe7d4f544778c315a9"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-win_amd64.whl", hash = "sha256:cca720d05389ab1a5877ff05af96551e58ba65e8dc65582d849ac83ddde3e231"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b4eae01faee9f2b17f08885e3f047153ae0416648f8e8c8bd9bc677c5ce64be9"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3eb7c03fe1cd3255811cd4e74db1ab8dca22074d50cd8937edf4ef62d758cdf4"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2d494b6a2a2d05fb99f01b84cc9af9f5f93bf3e1e5dbdafe4bed0c2823584c1"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b19ae41ef26c01a987e49e37c77b9ad060c59f94d3b3efdfdbf4f3daaca7b5fe"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fc6b15465fabccc94bf7e38777d665b6a4f95efd1725049d6184b3a39fd54880"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:014794b60d2021cc8ae0f91d4d0331fe92691ae5467a00841f7130fe877b678e"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-win32.whl", hash = "sha256:0268256a34806e5d1c8f7ee93277d7ea8cc8ae391f487213139018b6805aeaf6"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-win_amd64.whl", hash = "sha256:73c079e21d10ff2be54a4699f55865d4b275fd6c8bd5d90c5b1ef78ae0197301"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:785e2f2c1cb50d0a44e2cdeea5fd36b5bf2d79c481c10f3a88a8be4cfa2c4615"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c111cd40910ffcb615b33605fc8f8e22146aeb7933d06569ac90f219818345ef"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9cba4e7369de663611ce7460a34be48e999e0bbb1feb9130070f0685e9a6b66"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50a69067af86ec7f11a8e50ba85544657b1477aabf64fa447fd3736b5a0a4f67"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ccb99c3138c9bde118b51a289d90096a3791658da9aea1754667302ed6564f6e"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:513fd5b6513d37e985eb5b7ed89da5fd9e72354e3523980ef00d439bc549c9e9"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-win32.whl", hash = "sha256:f9fefd6298433b6e9188252f3bff53b9ff0443c8fde27298b8a2b19f6617eeb9"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-win_amd64.whl", hash = "sha256:2e617727fe4091cedb3e4409b39368f424934c7faa78171749f704b49b4bb4ce"},
{file = "SQLAlchemy-2.0.21-py3-none-any.whl", hash = "sha256:ea7da25ee458d8f404b93eb073116156fd7d8c2a776d8311534851f28277b4ce"},
{file = "SQLAlchemy-2.0.21.tar.gz", hash = "sha256:05b971ab1ac2994a14c56b35eaaa91f86ba080e9ad481b20d99d77f381bb6258"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:875de9414393e778b655a3d97d60465eb3fae7c919e88b70cc10b40b9f56042d"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13790cb42f917c45c9c850b39b9941539ca8ee7917dacf099cc0b569f3d40da7"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04ab55cf49daf1aeb8c622c54d23fa4bec91cb051a43cc24351ba97e1dd09f5"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a42c9fa3abcda0dcfad053e49c4f752eef71ecd8c155221e18b99d4224621176"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14cd3bcbb853379fef2cd01e7c64a5d6f1d005406d877ed9509afb7a05ff40a5"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-win32.whl", hash = "sha256:d143c5a9dada696bcfdb96ba2de4a47d5a89168e71d05a076e88a01386872f97"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-win_amd64.whl", hash = "sha256:ccd87c25e4c8559e1b918d46b4fa90b37f459c9b4566f1dfbce0eb8122571547"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f6ff392b27a743c1ad346d215655503cec64405d3b694228b3454878bf21590"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f776c2c30f0e5f4db45c3ee11a5f2a8d9de68e81eb73ec4237de1e32e04ae81c"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f1792d20d2f4e875ce7a113f43c3561ad12b34ff796b84002a256f37ce9437"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80eeb5189d7d4b1af519fc3f148fe7521b9dfce8f4d6a0820e8f5769b005051"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69fd9e41cf9368afa034e1c81f3570afb96f30fcd2eb1ef29cb4d9371c6eece2"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54bcceaf4eebef07dadfde424f5c26b491e4a64e61761dea9459103ecd6ccc95"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-win32.whl", hash = "sha256:7ee7ccf47aa503033b6afd57efbac6b9e05180f492aeed9fcf70752556f95624"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-win_amd64.whl", hash = "sha256:b560f075c151900587ade06706b0c51d04b3277c111151997ea0813455378ae0"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2c9bac865ee06d27a1533471405ad240a6f5d83195eca481f9fc4a71d8b87df8"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:625b72d77ac8ac23da3b1622e2da88c4aedaee14df47c8432bf8f6495e655de2"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39a6e21110204a8c08d40ff56a73ba542ec60bab701c36ce721e7990df49fb9"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53a766cb0b468223cafdf63e2d37f14a4757476157927b09300c8c5832d88560"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0e1ce8ebd2e040357dde01a3fb7d30d9b5736b3e54a94002641dfd0aa12ae6ce"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:505f503763a767556fa4deae5194b2be056b64ecca72ac65224381a0acab7ebe"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-win32.whl", hash = "sha256:154a32f3c7b00de3d090bc60ec8006a78149e221f1182e3edcf0376016be9396"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-win_amd64.whl", hash = "sha256:129415f89744b05741c6f0b04a84525f37fbabe5dc3774f7edf100e7458c48cd"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3940677d341f2b685a999bffe7078697b5848a40b5f6952794ffcf3af150c301"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55914d45a631b81a8a2cb1a54f03eea265cf1783241ac55396ec6d735be14883"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2096d6b018d242a2bcc9e451618166f860bb0304f590d205173d317b69986c95"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:19c6986cf2fb4bc8e0e846f97f4135a8e753b57d2aaaa87c50f9acbe606bd1db"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ac28bd6888fe3c81fbe97584eb0b96804bd7032d6100b9701255d9441373ec1"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-win32.whl", hash = "sha256:cb9a758ad973e795267da334a92dd82bb7555cb36a0960dcabcf724d26299db8"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-win_amd64.whl", hash = "sha256:40b1206a0d923e73aa54f0a6bd61419a96b914f1cd19900b6c8226899d9742ad"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3aa1472bf44f61dd27987cd051f1c893b7d3b17238bff8c23fceaef4f1133868"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:56a7e2bb639df9263bf6418231bc2a92a773f57886d371ddb7a869a24919face"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccca778c0737a773a1ad86b68bda52a71ad5950b25e120b6eb1330f0df54c3d0"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6c3e9350f9fb16de5b5e5fbf17b578811a52d71bb784cc5ff71acb7de2a7f9"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:564e9f9e4e6466273dbfab0e0a2e5fe819eec480c57b53a2cdee8e4fdae3ad5f"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af66001d7b76a3fab0d5e4c1ec9339ac45748bc4a399cbc2baa48c1980d3c1f4"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-win32.whl", hash = "sha256:9e55dff5ec115316dd7a083cdc1a52de63693695aecf72bc53a8e1468ce429e5"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-win_amd64.whl", hash = "sha256:4e869a8ff7ee7a833b74868a0887e8462445ec462432d8cbeff5e85f475186da"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9886a72c8e6371280cb247c5d32c9c8fa141dc560124348762db8a8b236f8692"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a571bc8ac092a3175a1d994794a8e7a1f2f651e7c744de24a19b4f740fe95034"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db5ba8b7da759b727faebc4289a9e6a51edadc7fc32207a30f7c6203a181592"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bb062784f37b2d75fd9b074c8ec360ad5df71f933f927e9e95c50eb8e05323c"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-win32.whl", hash = "sha256:58a3aba1bfb32ae7af68da3f277ed91d9f57620cf7ce651db96636790a78b736"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"},
{file = "SQLAlchemy-2.0.22-py3-none-any.whl", hash = "sha256:3076740335e4aaadd7deb3fe6dcb96b3015f1613bd190a4e1634e1b99b02ec86"},
{file = "SQLAlchemy-2.0.22.tar.gz", hash = "sha256:5434cc601aa17570d79e5377f5fd45ff92f9379e2abed0be5e8c2fba8d353d2b"},
]
[package.dependencies]
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""}
typing-extensions = ">=4.2.0"
[package.extras]
@ -2272,4 +2352,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "5212b83adf6d4f20bc25f36c0f79039516153c470fce1975ed8a30596746a113"
content-hash = "cf7c2e88dd377d6929d87da3553dabdc48acaa30d58f7de2d8303159180b0c09"

View File

@ -29,14 +29,19 @@ greenlet = "^2.0.2"
httpx = "^0.25.0"
langchain = "^0.0.312"
openai = "^0.28.1"
orjson = "^3.9.7"
pgvector = "^0.2.3"
multidict = "^6.0.4"
openai = "^0.28.1"
orjson = "3.9.7"
psycopg2-binary = "^2.9.9"
pydantic = {extras = ["email"], version = "^2.3.0"}
pydantic-settings = "^2.0.3"
pytest = "^7.4.2"
pytest-asyncio = "^0.21.1"
python = "^3.11"
python-jose = "^3.3.0"
python-magic = "^0.4.27"
python-multipart = "^0.0.6"
sqlalchemy = "^2.0.20"
uvicorn = "^0.23.2"
wrapt = "^1.15.0"
@ -95,6 +100,7 @@ variable-rgx = "^_{0,2}[a-z][a-z0-9_]*$"
exclude = [
".venv",
"alembic"
".pytest_cache",
]
pythonPlatform = "All"
pythonVersion = "3.11"

View File

@ -0,0 +1,70 @@
import asyncio
import typing
import fastapi
import httpx
import pytest_asyncio
import lib.app as lib_app
import tests.core.settings as tests_core_settings
import tests.functional.models as functional_models
@pytest_asyncio.fixture # type: ignore[reportUntypedFunctionDecorator]
async def http_client(
base_url: str = tests_core_settings.tests_settings.api.get_api_url,
) -> typing.AsyncGenerator[httpx.AsyncClient, typing.Any]:
session = httpx.AsyncClient(base_url=base_url)
yield session
await session.aclose()
@pytest_asyncio.fixture # type: ignore[reportUntypedFunctionDecorator]
async def make_request(http_client: httpx.AsyncClient):
async def inner(
api_method: str = "",
method: functional_models.MethodsEnum = functional_models.MethodsEnum.GET,
headers: dict[str, str] = tests_core_settings.tests_settings.api.headers,
body: dict[str, typing.Any] | None = None,
jwt_token: str | None = None,
) -> functional_models.HTTPResponse:
if jwt_token is not None:
headers["Authorization"] = f"Bearer {jwt_token}"
client_params = {"json": body, "headers": headers}
if method == functional_models.MethodsEnum.GET:
del client_params["json"]
response = await getattr(http_client, method.value)(api_method, **client_params)
return functional_models.HTTPResponse(
body=response.json(),
headers=response.headers,
status_code=response.status_code,
)
return inner
@pytest_asyncio.fixture(scope="session") # type: ignore[reportUntypedFunctionDecorator]
def app() -> fastapi.FastAPI:
settings = lib_app.Settings()
application = lib_app.Application.from_settings(settings)
fastapi_app = application._fastapi_app # type: ignore[reportPrivateUsage]
return fastapi_app
@pytest_asyncio.fixture # type: ignore[reportUntypedFunctionDecorator]
async def app_http_client(
app: fastapi.FastAPI,
base_url: str = tests_core_settings.tests_settings.api.get_api_url,
) -> typing.AsyncGenerator[httpx.AsyncClient, typing.Any]:
session = httpx.AsyncClient(app=app, base_url=base_url)
yield session
await session.aclose()
@pytest_asyncio.fixture(scope="session") # type: ignore[reportUntypedFunctionDecorator]
def event_loop():
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()

View File

@ -0,0 +1,5 @@
from .settings import *
__all__ = [
"tests_settings",
]

View File

@ -0,0 +1,17 @@
import pydantic
import pydantic_settings
import tests.core.split_settings as app_split_settings
class TestsSettings(pydantic_settings.BaseSettings):
api: app_split_settings.ApiSettings = pydantic.Field(default_factory=lambda: app_split_settings.ApiSettings())
postgres: app_split_settings.PostgresSettings = pydantic.Field(
default_factory=lambda: app_split_settings.PostgresSettings()
)
project: app_split_settings.ProjectSettings = pydantic.Field(
default_factory=lambda: app_split_settings.ProjectSettings()
)
tests_settings = TestsSettings()

View File

@ -0,0 +1,9 @@
from .api import *
from .postgres import *
from .project import *
__all__ = [
"ApiSettings",
"PostgresSettings",
"ProjectSettings",
]

View File

@ -0,0 +1,23 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class ApiSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="TEST_API_",
env_file_encoding="utf-8",
extra="ignore",
)
protocol: str = "http"
host: str = "0.0.0.0"
port: int = 8000
headers: dict[str, str] = {"Content-Type": "application/json"}
@pydantic.computed_field
@property
def get_api_url(self) -> str:
return f"{self.protocol}://{self.host}:{self.port}/api/v1"

View File

@ -0,0 +1,42 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class PostgresSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="POSTGRES_",
env_file_encoding="utf-8",
extra="ignore",
)
name: str = "test_database_name"
host: str = "localhost"
port: int = 5432
user: str = "app"
password: pydantic.SecretStr = pydantic.Field(
default=...,
validation_alias=pydantic.AliasChoices("password", "postgres_password"),
)
@property
def db_uri_async(self) -> str:
db_uri: str = "postgresql+asyncpg://{pg_user}:{pg_pass}@{pg_host}/{pg_dbname}".format(
pg_user=self.user,
pg_pass=self.password.get_secret_value(),
pg_host=self.host,
pg_dbname=self.name,
)
return db_uri
@property
def db_uri_sync(self) -> str:
db_uri: str = "postgresql://{pg_user}:{pg_pass}@{pg_host}/{pg_dbname}".format(
pg_user=self.user,
pg_pass=self.password.get_secret_value(),
pg_host=self.host,
pg_dbname=self.name,
)
return db_uri

View File

@ -0,0 +1,15 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class ProjectSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_file_encoding="utf-8",
extra="ignore",
)
debug: bool = False
jwt_secret_key: pydantic.SecretStr = pydantic.Field(default=..., validation_alias="jwt_secret_key")

View File

@ -0,0 +1,4 @@
import pathlib
BASE_PATH = pathlib.Path(__file__).parent.parent.parent.parent.parent.resolve()
ENV_PATH = BASE_PATH / ".env"

View File

@ -0,0 +1,7 @@
from .http import *
__all__ = [
"HTTPResponse",
"MakeResponseCallableType",
"MethodsEnum",
]

View File

@ -0,0 +1,35 @@
import dataclasses
import enum
import typing
import multidict
import tests.core.settings as functional_settings
class MethodsEnum(enum.Enum):
GET = "get"
POST = "post"
PUT = "put"
DELETE = "delete"
PATCH = "patch"
@dataclasses.dataclass
class HTTPResponse:
body: dict[str, typing.Any] | str
headers: multidict.CIMultiDictProxy[str]
status_code: int
class MakeResponseCallableType(typing.Protocol):
async def __call__(
self,
api_method: str = "",
url: str = functional_settings.tests_settings.api.get_api_url,
method: MethodsEnum = MethodsEnum.GET,
headers: dict[str, str] = functional_settings.tests_settings.api.headers,
body: dict[str, typing.Any] | None = None,
jwt_token: str | None = None,
) -> HTTPResponse:
...

View File

@ -0,0 +1,17 @@
# import http
# import pytest
# import tests.functional.models as tests_functional_models
# pytestmark = [pytest.mark.asyncio]
# async def test_health(
# make_request: tests_functional_models.MakeResponseCallableType,
# ):
# response = await make_request(
# method=tests_functional_models.MethodsEnum.GET,
# api_method=f"/health/",
# )
# assert response.status_code == http.HTTPStatus.OK

View File

View File

@ -0,0 +1,3 @@
[pytest]
log_format = %(asctime)s %(levelname)s %(message)s
log_date_format = %Y-%m-%d %H:%M:%S

View File

View File

View File

@ -0,0 +1,11 @@
import http
import httpx
import pytest
pytestmark = [pytest.mark.asyncio]
async def test_health(app_http_client: httpx.AsyncClient) -> None:
response = await app_http_client.get("/health/")
assert response.status_code == http.HTTPStatus.OK

View File

@ -0,0 +1,10 @@
BOT_CONTAINER_NAME=bot_container_name
BOT_IMAGE_NAME=botimage_name
# required parameters
BOT_TOKEN=123456:Your-TokEn_ExaMple
BOT_ADMINS=123456,654321
API_PROTOCOL=http
API_URL=api
API_PORT=8000

63
src/bot_aiogram/.gitignore vendored Executable file
View File

@ -0,0 +1,63 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Translations
*.mo
*.pot
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Environments
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Pyre type checker
.pyre/
.idea/*
.env

22
src/bot_aiogram/Dockerfile Executable file
View File

@ -0,0 +1,22 @@
FROM python:3.11
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive \
&& apt-get install -y net-tools netcat-traditional curl \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir --parents /opt/app
COPY pyproject.toml /opt/app/pyproject.toml
COPY poetry.lock /opt/app/poetry.lock
COPY poetry.toml /opt/app/poetry.toml
WORKDIR /opt/app
RUN pip install poetry \
&& poetry install --no-dev
COPY bin /opt/app/bin
COPY tgbot /opt/app/tgbot
CMD [".venv/bin/python", "-m", "bin"]

11
src/bot_aiogram/Makefile Normal file
View File

@ -0,0 +1,11 @@
include ../../common_makefile.mk
PROJECT_FOLDERS = tgbot
.PHONY: test
test:
@echo 'Running tests...'
.PHONY: ci-test
ci-test:
@echo 'Running tests...'

212
src/bot_aiogram/README.md Normal file
View File

@ -0,0 +1,212 @@
# tgbot_template (aiogram v2.0)
> ⚠️ **Note**: This template is for aiogram version 2.0. If you're interested in using the latest features and functionalities, consider using the updated [tgbot_template_v3](https://github.com/Latand/tgbot_template_v3) which is compatible with aiogram 3.0.
<img height="30em" src="https://raw.githubusercontent.com/anki-geo/ultimate-geography/a44a569a922e1d241517113e2917736af808eed7/src/media/flags/ug-flag-united_kingdom.svg" alt="english" align = "center"/>
This template is recommended to use in your Telegram bots written on <a href='https://github.com/aiogram/aiogram'>AIOgram</a>.
You can see tutorials on how to create, and use it on <a href='https://botfather.dev?utm_source=github_template'>Website with course on Telegram Bots Development</a>.
<br/><br/><br/>
<img height="30em" src="https://raw.githubusercontent.com/anki-geo/ultimate-geography/a44a569a922e1d241517113e2917736af808eed7/src/media/flags/ug-flag-ukraine.svg" alt="ukrainian" align = "center"/>
Цей шаблон рекомендовано використовувати для створення ваших Telegram-ботів, написаних на <a href='https://github.com/aiogram/aiogram'>AIOgram</a>.
Ви можете переглянути навчальні матеріали щодо створення та використання шаблону на <a href='https://botfather.dev?utm_source=github_template'>веб-сайті з курсом із розробки ботів Telegram</a>
<br/><br/><br/>
<img height="30em" src="https://raw.githubusercontent.com/anki-geo/ultimate-geography/a44a569a922e1d241517113e2917736af808eed7/src/media/flags/ug-flag-russia.svg" alt="russian" align = "center"/>
Этот шаблон рекомендуется использовать для создания ваших Telegram-ботов, написанных на <a href='https://github.com/aiogram/aiogram'>AIOgram</a>.
Учебные материалы по созданию и использованию шаблона можно найти на <a href='https://botfather.dev?utm_source=github_template'>веб-сайте с курсом по разработке ботов Telegram</a>
## About the template
**Structure:**
```
tgbot_template/
├── bot.py
├── tgbot/
│ ├── __init__.py
│ ├── config.py
│ ├── filters/
│ ├── handlers/
│ └── middlewares/
```
- The `tgbot` package is the root package for the bot, and it contains sub-packages for **filters**, **handlers**,
and **middlewares**.
- The `filters` package contains classes that define **custom filters** for the bot's message handlers.
- The `handlers` package contains classes that define the bot's **message handlers**, which specify the actions to take
in response to incoming messages.
- The `middlewares` package contains classes that define **custom middlewares** for the bot's dispatcher, which can be
used to perform additional processing on incoming messages.
## Detailed description
### `bot.py`
The bot.py script is the entry point for the template Telegram bot. It performs the following steps to start and run the
bot:
1. Set up logging: The `logging` module is imported and configured to log messages to the console.
2. Load the configuration: The `load_config()` function from the `tgbot.config` module is called to read the configuration
from the environment.
3. Set up the storage: Depending on the `use_redis` flag in the configuration, either a `MemoryStorage` or a `RedisStorage2`
instance is created to store the bot's state.
4. Create the bot and the dispatcher: A `Bot` instance is created using the bot token from the configuration, and a
`Dispatcher` instance is created using the `Bot` instance and the storage.
5. Register middlewares, filters, and handlers: The `register_all_middlewares()`, `register_all_filters()`, and
`register_all_handlers()` functions are called to register all the middlewares, filters, and handlers that are used by
the bot.
6. Start the polling loop: The `start_polling()` method of the Dispatcher instance is called to start the main event loop
for the bot. This method listens for incoming messages and routes them to the appropriate handler.
### `tgbot/config.py`
The `config.py` script defines a data structure for storing configuration options for the bot, such as the Telegram bot
token, database credentials, and other parameters.
The config.py script also includes a `load_config` function for loading the configuration from a file using
the `environs` library.
The config.py file defines a `Config` class, which is used to store configuration settings for the bot.
The Config class has three nested classes, `TgBot`, `DbConfig`, and `Miscellaneous`, which are used to store
configuration settings for the Telegram bot, the database, and miscellaneous settings, respectively.
The `load_config` function is used to load the configuration settings from an environment file and create a `Config`
object.
### `tgbot/filters/admin.py`
The `admin.py` file defines an `AdminFilter` class, which is used to filter messages so that only messages from
authorized users **(i.e., users who are listed in the ADMINS configuration setting)** are processed by the bot.
The `AdminFilter` class is a subclass of `BoundFilter` from the **aiogram** library, and it defines a key property that
specifies the name of the filter. The `AdminFilter` class also defines an `__init__` method that takes a `is_admin`
parameter, which specifies whether the user who sent the message is an authorized user.
The `AdminFilter` class also defines a `check` method that checks whether the user who sent the message is an admin
user, and if so, it returns `True`, indicating that the message should be processed by the bot. Otherwise, it returns
`False`, indicating that the message should be ignored by the bot. The `check` method is called by the bot's dispatcher
when a message is received.
### `tgbot/handlers/admin.py`
The `admin.py` file defines a `register_admin` function, which is used to register event handlers for messages that are
sent by authorized users (**i.e., users who are listed in the ADMINS configuration setting**).
The `register_admin` function takes a `Dispatcher` object as its parameter, and it uses this object to register event
handlers that respond to different types of messages.
For example, it might register an event handler that responds to commands that are sent by authorized users, such as
the `/echo` command, which causes the bot to repeat the text of the message back to the user.
### `tgbot/handlers/echo.py`
The `echo.py` file defines a `register_echo` function, which is used to register an event handler for the `/echo`
command.
This event handler is responsible for repeating the text of the message back to the user. The `register_echo` function
takes a `Dispatcher` object as its parameter, and it uses this object to register the `/echo` command handler.
### `tgbot/handlers/user.py`
The `user.py` file defines a `register_user` function, which is used to register event handlers for messages that are
sent
by non-authorized users (i.e., users who are not listed in the ADMINS configuration setting).
The `register_user` function takes a `Dispatcher` object as its parameter, and it uses this object to register event
handlers that respond to different types of messages. For example, it might register an event handler that responds to
commands that are sent by non-authorized users, such as the `/help` command, which causes the bot to send a message with
a list of available commands.
### `tgbot/middlewares/environment.py`
`environment.py` is a file that contains the `EnvironmentMiddleware` class, which is a middleware used in the Telegram
bot.
A middleware is a piece of code that sits between the incoming request and the handler function. In this case, the
`EnvironmentMiddleware` class allows the bot to access the configuration data that was loaded by the `load_config`
function
in the `config.py` file. This configuration data can then be accessed by other parts of the bot, such as the handlers,
to
customize its behavior.
### `tgbot/keyboards/(inline|reply).py`
The `inline.py` and `reply.py` files define classes that are used to create inline and reply keyboards, respectively.
The `InlineKeyboard` class is a subclass of `InlineKeyboardMarkup` from the **aiogram** library, and it defines a
`__init__` method that takes a `inline_keyboard` parameter, which specifies the buttons that should be included in the
keyboard.
The `ReplyKeyboard` class is a subclass of `ReplyKeyboardMarkup` from the **aiogram** library, and it defines a
`__init__` method that takes a `keyboard` parameter, which specifies the buttons that should be included in the
keyboard.
### `tgbot/misc`
In general, a package called "misc" might be used to store miscellaneous code that doesn't fit into any of the other
packages or modules in a project. This could include utility functions, helper classes, or other types of code that are
used by multiple parts of the project.
In this case, the `misc` package contains a `states.py` file, which defines a `StateGroup` class that is used to define
the states that are used by the bot.
### `tgbot/models`
The `models` package can contain `users.py` file, which defines a `User` class that is used to represent a user in the
database. This can be used with combination of some ORM (Object Relational Mapper) to store and retrieve data from the
database.
### `tgbot/services`
This package can also be named `infrastructure`. It contains the code that is used to interact with external services.
A package called "services" could contain code that defines services that are used by an application. In software
development, a service is a self-contained piece of functionality that performs a specific task or provides a specific
capability. A service is typically defined as a class or a set of functions that implement the desired functionality.
Examples of services that might be included in a services package could include a **database access service, a caching
service, a messaging service**, or any other type of functionality that is used by the application. The exact contents
of
a services package would depend on the specific needs of the application and the services that it requires.
The `services` package can contain a `database.py` file, which defines a `Database` class that is used to connect to the
database and perform database operations.
## docker-compose.yml
The `docker-compose.yml` file defines the services that are used by the application, as well as the networks and volumes
that are needed by the application. The file begins by specifying the version of the Docker Compose file format that is
being used.
The `services` section of the file defines the containers that should be run as part of the application. In this example,
there is only one service, called `bot`, which is based on the `tg_bot-image` Docker image. The `container_name` specifies the
name that should be used for the container, and the `build` section specifies the location of the Dockerfile that should
be used to build the image.
The `working_dir` specifies the working directory that should be used by the container, and the `volumes` section specifies
the files and directories that should be mounted into the container. In this case, the entire project directory is
mounted into the container, which allows the application to access the files on the host machine.
The `command` specifies the command that should be run when the container is started, and the `restart` setting specifies
that the container should be automatically restarted if it exits.
The `env_file` setting specifies the location of the `.env` file, which contains the configuration settings for the application.
The `networks` section defines the networks that the container should be connected to. In this example, there is only one
network, called `tg_bot`, which is based on the bridge driver. This network allows the containers in the application to
communicate with each other.
## Dockerfile
The `Dockerfile` defines the instructions for building the Docker image that is used by the bot service. The file begins
by specifying the base image that should be used for the image, which in this case is `python:3.9-buster`. The `ENV`
instruction sets the value of the `BOT_NAME` environment variable, which is used by the `WORKDIR` instruction to specify the
working directory for the container.
The `COPY` instructions are used to copy the `requirements.txt` file and the entire project directory into the image. The
`RUN` instruction is used to install the Python dependencies from the `requirements.txt` file. This allows the application
to run in the container with all the necessary dependencies.

View File

View File

@ -0,0 +1,55 @@
import asyncio
import logging
import aiogram
import aiogram.contrib.fsm_storage.memory as fsm_storage_memory
import tgbot.handlers as tgbot_handlers
import tgbot.middlewares as tgbot_middlewares
import tgbot.settings as tgbot_settings
logger = logging.getLogger(__name__)
def register_all_middlewares(dp: aiogram.Dispatcher):
dp.setup_middleware(tgbot_middlewares.environment.EnvironmentMiddleware())
def register_all_handlers(dp: aiogram.Dispatcher):
tgbot_handlers.register_user(dp)
tgbot_handlers.register_echo(dp)
tgbot_handlers.register_voice_response(dp)
async def main():
logging.basicConfig(
level=logging.INFO,
format="%(filename)s:%(lineno)d #%(levelname)-8s [%(asctime)s] - %(name)s - %(message)s",
)
logger.info("Starting bot")
config = tgbot_settings.Settings()
storage = fsm_storage_memory.MemoryStorage()
bot = aiogram.Bot(token=config.tgbot.token.get_secret_value(), parse_mode="HTML")
dp = aiogram.Dispatcher(bot, storage=storage)
bot["config"] = config
register_all_middlewares(dp)
register_all_handlers(dp)
# start
try:
await dp.start_polling()
finally:
await dp.storage.close()
await dp.storage.wait_closed()
if bot.session:
await bot.session.close()
if __name__ == "__main__":
try:
asyncio.run(main())
except (KeyboardInterrupt, SystemExit):
logger.error("Bot stopped!")

View File

@ -0,0 +1,17 @@
version: "3"
services:
bot:
image: "${BOT_IMAGE_NAME:-tg_bot-image}"
container_name: "${BOT_CONTAINER_NAME:-tg_bot-container}"
build:
context: .
restart: always
env_file:
- .env
networks:
- tg_bot_network
networks:
tg_bot_network:
driver: bridge

1364
src/bot_aiogram/poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,3 @@
[virtualenvs]
create = true
in-project = true

View File

@ -0,0 +1,143 @@
[build-system]
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[tool.black]
line-length = 120
target-version = ['py311']
[tool.isort]
known_first_party = ["backend", "tests"]
line_length = 120
profile = "black"
py_version = "311"
[tool.poetry]
authors = ["jsdio@jsdio.ru"]
description = ""
name = "bot_aiogram"
readme = "README.md"
version = "0.1.0"
[tool.poetry.dependencies]
aiogram = "2.18"
environs = "9.0"
pydantic-settings = "^2.0.3"
pytest-asyncio = "^0.21.1"
python = "^3.11"
[tool.poetry.dev-dependencies]
black = "^23.7.0"
isort = "^5.12.0"
pylint = "^2.17.5"
pylint-pydantic = "^0.2.4"
pylint-pytest = "^1.1.2"
pyright = "^1.1.318"
pyupgrade = "^3.10.1"
ruff = "^0.0.282"
sort-all = "^1.2.0"
toml-sort = "^0.23.1"
[tool.pylint]
disable = [
"broad-except",
"cannot-enumerate-pytest-fixtures",
"consider-using-from-import",
"consider-using-sys-exit",
"duplicate-code",
"fixme",
"missing-docstring",
"no-member",
"protected-access",
"too-few-public-methods",
"too-many-instance-attributes",
"too-many-locals",
"too-many-statements",
"unnecessary-ellipsis"
]
extension-pkg-allow-list = [
"orjson",
"pydantic"
]
ignore-path = [
"^.*venv/.*$"
]
load-plugins = [
"pylint_pydantic",
"pylint_pytest"
]
max-args = 15
max-line-length = 120
recursive = true
[tool.pylint.basic]
argument-rgx = "^_{0,2}[a-z][a-z0-9_]*$"
attr-rgx = "^_{0,2}[a-z][a-z0-9_]*$"
class-attribute-rgx = "^_{0,2}[a-zA-Z][a-zA-Z0-9_]*$"
variable-rgx = "^_{0,2}[a-z][a-z0-9_]*$"
[tool.pyright]
exclude = [
".pytest_cache",
".venv"
]
pythonPlatform = "All"
pythonVersion = "3.11"
reportConstantRedefenition = "none"
reportMissingTypeStubs = "none"
reportPrivateUsage = "information"
reportPropertyTypeMismatch = "warning"
reportUninitializedInstanceVariable = "warning"
reportUnknownMemberType = "none"
reportUnnecessaryTypeIgnoreComment = "warning"
reportUntypedFunctionDecorator = "warning"
typeCheckingMode = "strict"
useLibraryCodeForTypes = true
venv = ".venv"
venvPath = "."
[tool.ruff]
ignore = [
# Pyright automatically infers the type of `self`
"ANN101",
# Pyright automatically infers the type of `cls`
"ANN102",
# In some cases actively detrimental; somewhat conflicts with black
"COM",
# Ignore missing docstrings
"D102",
# In combination with D213, this results in noisy diffs and inconsistencies
# See also <https://github.com/charliermarsh/ruff/issues/4174>.
"D200",
# This results inconsistencies between function and class docstrings
# See also <https://github.com/charliermarsh/ruff/issues/4175>.
"D202",
# D211 is preferred since the extra blank line isn't visually useful
"D203",
# D213 is preferred since it's more readable and allows more characters
"D212",
# Ignore missing docstrings
"D414",
# Covered by D401, which is more restrictive
"D415",
# Type-checkers interpret redundant `as` as exporting an item
"PLC0414",
# Permit using alias for 'import'
"PLR0402",
# Causes churn and awful looking import blocks for little gain
"TCH"
]
select = ["ALL"]
[tool.ruff.per-file-ignores]
"tests/*" = [
"D100",
"D103",
"D104",
"S101"
]
[tool.tomlsort]
all = true
ignore_case = true
in_place = true

View File

@ -0,0 +1,5 @@
from .settings import Settings
__all__ = [
"Settings",
]

View File

@ -0,0 +1,9 @@
from .echo import *
from .user import *
from .voice import *
__all__ = [
"register_echo",
"register_user",
"register_voice_response",
]

View File

@ -0,0 +1,11 @@
import aiogram
async def bot_echo(message: aiogram.types.Message):
text = ["Эхо без состояния.", "Сообщение:", message.text]
await message.answer("\n".join(text))
def register_echo(dp: aiogram.Dispatcher):
dp.register_message_handler(bot_echo)

View File

@ -0,0 +1,9 @@
import aiogram
async def user_start(message: aiogram.types.Message):
await message.reply("Hello, user! Send me a voice message and I'll try to recognize it and answer you.")
def register_user(dp: aiogram.Dispatcher):
dp.register_message_handler(user_start, commands=["start"], state="*")

View File

@ -0,0 +1,55 @@
import io
import json
import typing
import aiogram
import aiohttp
import tgbot.settings as tgbot_settings
async def voice_response(message_voice: aiogram.types.Message):
config = typing.cast(tgbot_settings.Settings, message_voice.bot.get("config"))
voice_file_id: str = message_voice.voice.file_id
file_info = await message_voice.bot.get_file(voice_file_id)
file_path: str = file_info.file_path
voice_data: io.BytesIO = io.BytesIO()
voice_data.name = "voice.ogg"
voice_data.seek(0)
await message_voice.bot.download_file(file_path, destination=voice_data)
await message_voice.bot.send_chat_action(message_voice.from_user.id, "typing")
async with aiohttp.ClientSession() as session:
async with session.post(
f"{config.api.api_url}/api/v1/voice/",
data={"voice": voice_data},
) as resp:
if resp.status == 200:
voice_answer: bytes = await resp.read()
answer_io = io.BytesIO(voice_answer)
answer_io.name = "answer_io.ogg"
await message_voice.bot.send_chat_action(
message_voice.from_user.id, action=aiogram.types.ChatActions.RECORD_AUDIO
)
try:
await message_voice.answer_voice(voice=answer_io)
except aiogram.exceptions.BadRequest:
await message_voice.answer(
"We were unable to send you a voice message. Please check your privacy settings."
)
else:
error_text: str = await resp.text()
if error_text == "":
await message_voice.answer(f"Error: {resp.status}")
else:
await message_voice.answer(f"Error: {json.loads(error_text)['detail']}")
await session.close()
return
def register_voice_response(dp: aiogram.Dispatcher):
dp.register_message_handler(voice_response, content_types=aiogram.types.ContentType.VOICE)

View File

View File

@ -0,0 +1,5 @@
from .environment import *
__all__ = [
"EnvironmentMiddleware",
]

View File

@ -0,0 +1,14 @@
import typing
import aiogram.dispatcher.middlewares as dispatcher_middlewares
class EnvironmentMiddleware(dispatcher_middlewares.LifetimeControllerMiddleware):
skip_patterns = ["error", "update"]
def __init__(self, **kwargs: typing.Any):
super().__init__()
self.kwargs = kwargs
async def pre_process(self, obj: typing.Any, data: dict[typing.Any, typing.Any], *args: typing.Any):
data.update(**self.kwargs)

View File

View File

View File

@ -0,0 +1,4 @@
import pathlib
BASE_PATH = pathlib.Path(__file__).parent.parent.parent.parent.resolve()
ENV_PATH = BASE_PATH / ".env"

View File

View File

@ -0,0 +1,8 @@
import pydantic_settings
import tgbot.split_settings as app_split_settings
class Settings(pydantic_settings.BaseSettings):
api: app_split_settings.ApiSettings = app_split_settings.ApiSettings()
tgbot: app_split_settings.TgBotSettings = app_split_settings.TgBotSettings()

View File

@ -0,0 +1,7 @@
from .api import *
from .tgbot import *
__all__ = [
"ApiSettings",
"TgBotSettings",
]

View File

@ -0,0 +1,20 @@
import pydantic_settings
import tgbot.split_settings.utils as split_settings_utils
class ApiSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=split_settings_utils.ENV_PATH,
env_prefix="API_",
env_file_encoding="utf-8",
extra="ignore",
)
url: str = "127.0.0.1"
port: int = 8000
protocol: str = "http"
@property
def api_url(self) -> str:
return f"{self.protocol}://{self.url}:{self.port}"

View File

@ -0,0 +1,22 @@
import pydantic
import pydantic_settings
import tgbot.split_settings.utils as split_settings_utils
class TgBotSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=split_settings_utils.ENV_PATH,
env_prefix="BOT_",
env_file_encoding="utf-8",
extra="ignore",
)
token: pydantic.SecretStr = pydantic.Field(
default=..., validation_alias=pydantic.AliasChoices("token", "bot_token")
)
admins: str = pydantic.Field(default="")
@pydantic.field_validator("admins")
def validate_bot_admins(cls, v: str) -> list[int]:
return list(map(int, v.split(",")))

View File

@ -0,0 +1,4 @@
import pathlib
BASE_PATH = pathlib.Path(__file__).parent.parent.parent.resolve()
ENV_PATH = BASE_PATH / ".env"

View File

@ -9,6 +9,10 @@ NGINX_PORT=80
API_HOST=0.0.0.0
API_PORT=8000
TEST_API_PROTOCOL=http
TEST_API_HOST=api
TEST_API_PORT=8000
JWT_SECRET_KEY=v9LctjUWwol4XbvczPiLFMDtZ8aal7mm
JWT_ALGORITHM=HS256

View File

@ -0,0 +1,18 @@
FROM python:3.11
RUN apt-get update
WORKDIR /opt/app
COPY pyproject.toml ./
COPY poetry.lock ./
RUN apt-get update \
&& pip install poetry \
&& poetry config virtualenvs.create false \
&& poetry install --no-dev
COPY tests tests
COPY lib lib
CMD ["pytest"]

View File

@ -1,3 +1,14 @@
include ../../common_makefile.mk
PROJECT_FOLDERS = bin lib tests
PROJECT_FOLDERS = bin lib tests
.PHONY: test
test:
@echo 'Running tests...'
@$(PYTHON) -m pytest tests/unit
.PHONY: ci-test
ci-test:
@echo 'Running tests...'
@$(PYTHON) -m pytest tests/unit

View File

@ -0,0 +1,56 @@
version: "3"
services:
postgres:
image: postgres:15.2
restart: always
environment:
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_DB: ${POSTGRES_NAME}
env_file:
- .env
expose:
- "${POSTGRES_PORT}"
volumes:
- postgres_data:/var/lib/postgresql/data/
networks:
- backend_network
api:
build:
context: .
container_name: api
image: fastapi_app
restart: always
entrypoint: ["/opt/app/entrypoint.sh"]
env_file:
- .env
expose:
- "${API_PORT}"
depends_on:
- postgres
networks:
- backend_network
- api_network
tests:
build:
context: .
dockerfile: "Dockerfile.tests"
env_file:
- .env
depends_on:
- postgres
- api
networks:
- api_network
volumes:
postgres_data:
networks:
api_network:
driver: bridge
backend_network:
driver: bridge

197
src/template/poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
[[package]]
name = "alembic"
@ -296,7 +296,6 @@ files = [
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
@ -305,7 +304,6 @@ files = [
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
@ -335,7 +333,6 @@ files = [
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
@ -344,7 +341,6 @@ files = [
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
@ -589,6 +585,89 @@ files = [
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
[[package]]
name = "multidict"
version = "6.0.4"
description = "multidict implementation"
optional = false
python-versions = ">=3.7"
files = [
{file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
{file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
{file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
{file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
{file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
{file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
{file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
{file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
{file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
{file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
{file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
{file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
{file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
{file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
{file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
{file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
{file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
{file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
{file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
{file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
{file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
{file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
{file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
{file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
{file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
{file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
{file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
{file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
{file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
{file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
{file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
{file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
{file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
{file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
{file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
{file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
{file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
{file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
{file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
{file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
{file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
{file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
{file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
{file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
{file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
{file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
{file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
{file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
{file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
{file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
{file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
{file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
{file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
{file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
{file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
{file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
{file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
{file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
{file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
{file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
{file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
{file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
{file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
{file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
{file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
{file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
{file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
{file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
{file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
{file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
{file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
{file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
{file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
{file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
]
[[package]]
name = "mypy-extensions"
version = "1.0.0"
@ -736,23 +815,81 @@ dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "psycopg2"
version = "2.9.7"
name = "psycopg2-binary"
version = "2.9.9"
description = "psycopg2 - Python-PostgreSQL Database Adapter"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.7"
files = [
{file = "psycopg2-2.9.7-cp310-cp310-win32.whl", hash = "sha256:1a6a2d609bce44f78af4556bea0c62a5e7f05c23e5ea9c599e07678995609084"},
{file = "psycopg2-2.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:b22ed9c66da2589a664e0f1ca2465c29b75aaab36fa209d4fb916025fb9119e5"},
{file = "psycopg2-2.9.7-cp311-cp311-win32.whl", hash = "sha256:44d93a0109dfdf22fe399b419bcd7fa589d86895d3931b01fb321d74dadc68f1"},
{file = "psycopg2-2.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:91e81a8333a0037babfc9fe6d11e997a9d4dac0f38c43074886b0d9dead94fe9"},
{file = "psycopg2-2.9.7-cp37-cp37m-win32.whl", hash = "sha256:d1210fcf99aae6f728812d1d2240afc1dc44b9e6cba526a06fb8134f969957c2"},
{file = "psycopg2-2.9.7-cp37-cp37m-win_amd64.whl", hash = "sha256:e9b04cbef584310a1ac0f0d55bb623ca3244c87c51187645432e342de9ae81a8"},
{file = "psycopg2-2.9.7-cp38-cp38-win32.whl", hash = "sha256:d5c5297e2fbc8068d4255f1e606bfc9291f06f91ec31b2a0d4c536210ac5c0a2"},
{file = "psycopg2-2.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:8275abf628c6dc7ec834ea63f6f3846bf33518907a2b9b693d41fd063767a866"},
{file = "psycopg2-2.9.7-cp39-cp39-win32.whl", hash = "sha256:c7949770cafbd2f12cecc97dea410c514368908a103acf519f2a346134caa4d5"},
{file = "psycopg2-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:b6bd7d9d3a7a63faae6edf365f0ed0e9b0a1aaf1da3ca146e6b043fb3eb5d723"},
{file = "psycopg2-2.9.7.tar.gz", hash = "sha256:f00cc35bd7119f1fed17b85bd1007855194dde2cbd8de01ab8ebb17487440ad8"},
{file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"},
{file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"},
{file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"},
]
[[package]]
@ -1024,6 +1161,24 @@ pluggy = ">=0.12,<2.0"
[package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-asyncio"
version = "0.21.1"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"},
{file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"},
]
[package.dependencies]
pytest = ">=7.0.0"
[package.extras]
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"]
[[package]]
name = "python-dotenv"
version = "1.0.0"
@ -1216,7 +1371,7 @@ files = [
]
[package.dependencies]
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""}
typing-extensions = ">=4.2.0"
[package.extras]
@ -1412,4 +1567,4 @@ files = [
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "a1f51871ee88f7c8503f57105be988ea4ece9f41671d11a4464c382057acf3e7"
content-hash = "7f329541481928598b0dac2189439929e2e67949e9b67e3bca006dcefb79241c"

View File

@ -25,10 +25,12 @@ asyncpg = "^0.28.0"
fastapi = "0.103.1"
greenlet = "^2.0.2"
httpx = "^0.25.0"
multidict = "^6.0.4"
orjson = "^3.9.7"
psycopg2 = "^2.9.7"
psycopg2-binary = "^2.9.9"
pydantic = {extras = ["email"], version = "^2.3.0"}
pydantic-settings = "^2.0.3"
pytest-asyncio = "^0.21.1"
python = "^3.11"
python-jose = "^3.3.0"
sqlalchemy = "^2.0.20"
@ -86,6 +88,7 @@ variable-rgx = "^_{0,2}[a-z][a-z0-9_]*$"
[tool.pyright]
exclude = [
".pytest_cache",
".venv"
]
pythonPlatform = "All"

View File

@ -0,0 +1,70 @@
import asyncio
import typing
import fastapi
import httpx
import pytest_asyncio
import lib.app as lib_app
import tests.core.settings as tests_core_settings
import tests.functional.models as functional_models
@pytest_asyncio.fixture # type: ignore[reportUntypedFunctionDecorator]
async def http_client(
base_url: str = tests_core_settings.tests_settings.api.get_api_url,
) -> typing.AsyncGenerator[httpx.AsyncClient, typing.Any]:
session = httpx.AsyncClient(base_url=base_url)
yield session
await session.aclose()
@pytest_asyncio.fixture # type: ignore[reportUntypedFunctionDecorator]
async def make_request(http_client: httpx.AsyncClient):
async def inner(
api_method: str = "",
method: functional_models.MethodsEnum = functional_models.MethodsEnum.GET,
headers: dict[str, str] = tests_core_settings.tests_settings.api.headers,
body: dict[str, typing.Any] | None = None,
jwt_token: str | None = None,
) -> functional_models.HTTPResponse:
if jwt_token is not None:
headers["Authorization"] = f"Bearer {jwt_token}"
client_params = {"json": body, "headers": headers}
if method == functional_models.MethodsEnum.GET:
del client_params["json"]
response = await getattr(http_client, method.value)(api_method, **client_params)
return functional_models.HTTPResponse(
body=response.json(),
headers=response.headers,
status_code=response.status_code,
)
return inner
@pytest_asyncio.fixture(scope="session") # type: ignore[reportUntypedFunctionDecorator]
def app() -> fastapi.FastAPI:
settings = lib_app.Settings()
application = lib_app.Application.from_settings(settings)
fastapi_app = application._fastapi_app # type: ignore[reportPrivateUsage]
return fastapi_app
@pytest_asyncio.fixture # type: ignore[reportUntypedFunctionDecorator]
async def app_http_client(
app: fastapi.FastAPI,
base_url: str = tests_core_settings.tests_settings.api.get_api_url,
) -> typing.AsyncGenerator[httpx.AsyncClient, typing.Any]:
session = httpx.AsyncClient(app=app, base_url=base_url)
yield session
await session.aclose()
@pytest_asyncio.fixture(scope="session") # type: ignore[reportUntypedFunctionDecorator]
def event_loop():
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()

View File

@ -0,0 +1,5 @@
from .settings import *
__all__ = [
"tests_settings",
]

View File

@ -0,0 +1,17 @@
import pydantic
import pydantic_settings
import tests.core.split_settings as app_split_settings
class TestsSettings(pydantic_settings.BaseSettings):
api: app_split_settings.ApiSettings = pydantic.Field(default_factory=lambda: app_split_settings.ApiSettings())
postgres: app_split_settings.PostgresSettings = pydantic.Field(
default_factory=lambda: app_split_settings.PostgresSettings()
)
project: app_split_settings.ProjectSettings = pydantic.Field(
default_factory=lambda: app_split_settings.ProjectSettings()
)
tests_settings = TestsSettings()

View File

@ -0,0 +1,9 @@
from .api import *
from .postgres import *
from .project import *
__all__ = [
"ApiSettings",
"PostgresSettings",
"ProjectSettings",
]

View File

@ -0,0 +1,23 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class ApiSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="TEST_API_",
env_file_encoding="utf-8",
extra="ignore",
)
protocol: str = "http"
host: str = "0.0.0.0"
port: int = 8000
headers: dict[str, str] = {"Content-Type": "application/json"}
@pydantic.computed_field
@property
def get_api_url(self) -> str:
return f"{self.protocol}://{self.host}:{self.port}/api/v1"

View File

@ -0,0 +1,42 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class PostgresSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="POSTGRES_",
env_file_encoding="utf-8",
extra="ignore",
)
name: str = "test_database_name"
host: str = "localhost"
port: int = 5432
user: str = "app"
password: pydantic.SecretStr = pydantic.Field(
default=...,
validation_alias=pydantic.AliasChoices("password", "postgres_password"),
)
@property
def db_uri_async(self) -> str:
db_uri: str = "postgresql+asyncpg://{pg_user}:{pg_pass}@{pg_host}/{pg_dbname}".format(
pg_user=self.user,
pg_pass=self.password.get_secret_value(),
pg_host=self.host,
pg_dbname=self.name,
)
return db_uri
@property
def db_uri_sync(self) -> str:
db_uri: str = "postgresql://{pg_user}:{pg_pass}@{pg_host}/{pg_dbname}".format(
pg_user=self.user,
pg_pass=self.password.get_secret_value(),
pg_host=self.host,
pg_dbname=self.name,
)
return db_uri

View File

@ -0,0 +1,15 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class ProjectSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_file_encoding="utf-8",
extra="ignore",
)
debug: bool = False
jwt_secret_key: pydantic.SecretStr = pydantic.Field(default=..., validation_alias="jwt_secret_key")

View File

@ -0,0 +1,4 @@
import pathlib
BASE_PATH = pathlib.Path(__file__).parent.parent.parent.parent.parent.resolve()
ENV_PATH = BASE_PATH / ".env"

Some files were not shown because too many files have changed in this diff Show More