1
0
mirror of https://github.com/ijaric/voice_assistant.git synced 2025-05-24 14:33:26 +00:00

Merge branch 'main' into tasks/#47_tts_handlers_repositories

This commit is contained in:
Aleksandr Sukharev 2023-10-14 22:51:27 +03:00 committed by GitHub
commit f8d32d4b87
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
42 changed files with 2243 additions and 88 deletions

View File

@ -24,7 +24,7 @@ JWT_ALGORITHM=HS256
APP_RELOAD=True
VOICE_AVAILABLE_FORMATS=mp3,ogg,wav
VOICE_AVAILABLE_FORMATS=mp3,ogg,wav,oga
VOICE_MAX_INPUT_SIZE=5120 # 5MB
VOICE_MAX_INPUT_SECONDS=30

View File

@ -1,3 +1,7 @@
from .health import basic_router
from .voice_responce_handler import VoiceResponseHandler
__all__ = ["basic_router"]
__all__ = [
"VoiceResponseHandler",
"basic_router",
]

View File

@ -0,0 +1,45 @@
import http
import io
import fastapi
import lib.stt.services as stt_services
# import lib.tts.services as tts_service
# import lib.models as models
class VoiceResponseHandler:
def __init__(
self,
stt: stt_services.SpeechService,
# tts: tts_service.TTSService,
):
self.stt = stt
# self.tts = tts
self.router = fastapi.APIRouter()
self.router.add_api_route(
"/",
self.voice_response,
methods=["POST"],
summary="Ответ голосового помощника",
description="Маршрут возвращает потоковый ответ аудио",
)
async def voice_response(
self,
voice: bytes = fastapi.File(...),
) -> fastapi.responses.StreamingResponse:
voice_text: str = await self.stt.recognize(voice)
if voice_text == "":
raise fastapi.HTTPException(status_code=http.HTTPStatus.BAD_REQUEST, detail="Speech recognition failed")
# TODO: Добавить обработку текста через клиента openai
# TODO: Добавить синтез речи через клиента tts
# TODO: Заменить заглушку на реальный ответ
# response = await self.tts.get_audio_as_bytes(
# models.TTSCreateRequestModel(
# text=voice_text,
# )
# )
# return fastapi.responses.StreamingResponse(io.BytesIO(response.audio_content), media_type="audio/ogg")
return fastapi.responses.StreamingResponse(io.BytesIO(voice), media_type="audio/ogg")

View File

@ -1,3 +1,5 @@
from .base import HealthResponse
__all__ = ["HealthResponse"]
__all__ = [
"HealthResponse",
]

View File

@ -115,11 +115,18 @@ class Application:
models.VoiceModelProvidersEnum.ELEVEN_LABS: tts_eleven_labs_repository,
},
)
# Handlers
logger.info("Initializing handlers")
liveness_probe_handler = api_v1_handlers.basic_router
# TODO: объявить сервисы tts и openai и добавить их в voice_response_handler
voice_response_handler = api_v1_handlers.VoiceResponseHandler(
stt=stt_service,
# tts=tts_service, # TODO
).router
logger.info("Creating application")
fastapi_app = fastapi.FastAPI(
@ -132,6 +139,7 @@ class Application:
# Routes
fastapi_app.include_router(liveness_probe_handler, prefix="/api/v1/health", tags=["health"])
fastapi_app.include_router(voice_response_handler, prefix="/api/v1/voice", tags=["voice"])
application = Application(
settings=settings,

View File

@ -1,30 +1,16 @@
import pydantic
import pydantic_settings
import lib.app.split_settings as app_split_settings
class Settings(pydantic_settings.BaseSettings):
api: app_split_settings.ApiSettings = pydantic.Field(default_factory=lambda: app_split_settings.ApiSettings())
app: app_split_settings.AppSettings = pydantic.Field(default_factory=lambda: app_split_settings.AppSettings())
postgres: app_split_settings.PostgresSettings = pydantic.Field(
default_factory=lambda: app_split_settings.PostgresSettings()
)
logger: app_split_settings.LoggingSettings = pydantic.Field(
default_factory=lambda: app_split_settings.LoggingSettings()
)
openai: app_split_settings.OpenaiSettings = pydantic.Field(
default_factory=lambda: app_split_settings.OpenaiSettings()
)
project: app_split_settings.ProjectSettings = pydantic.Field(
default_factory=lambda: app_split_settings.ProjectSettings()
)
proxy: app_split_settings.ProxySettings = pydantic.Field(default_factory=lambda: app_split_settings.ProxySettings())
voice: app_split_settings.VoiceSettings = pydantic.Field(default_factory=lambda: app_split_settings.VoiceSettings())
tts_yandex: app_split_settings.TTSYandexSettings = pydantic.Field(
default_factory=lambda: app_split_settings.TTSYandexSettings()
)
tts_eleven_labs: app_split_settings.TTSElevenLabsSettings = pydantic.Field(
default_factory=lambda: app_split_settings.TTSElevenLabsSettings()
)
api: app_split_settings.ApiSettings = app_split_settings.ApiSettings()
app: app_split_settings.AppSettings = app_split_settings.AppSettings()
postgres: app_split_settings.PostgresSettings = app_split_settings.PostgresSettings()
logger: app_split_settings.LoggingSettings = app_split_settings.LoggingSettings()
openai: app_split_settings.OpenaiSettings = app_split_settings.OpenaiSettings()
project: app_split_settings.ProjectSettings = app_split_settings.ProjectSettings()
proxy: app_split_settings.ProxySettings = app_split_settings.ProxySettings()
voice: app_split_settings.VoiceSettings = app_split_settings.VoiceSettings()
tts_yandex: app_split_settings.TTSYandexSettings = app_split_settings.TTSYandexSettings()
tts_eleven_labs: app_split_settings.TTSElevenLabsSettings = app_split_settings.TTSElevenLabsSettings()

View File

@ -1,8 +1,11 @@
import http
import mimetypes
import tempfile
import fastapi
import magic
import openai
import pydantic
import lib.app.settings as app_settings
import lib.stt as stt
@ -24,15 +27,24 @@ class OpenaiSpeechRepository:
async def speech_to_text(self, audio: bytes) -> str:
file_extension = self.__get_file_extension_from_bytes(audio)
if not file_extension:
raise ValueError("File extension is not supported")
voice: stt.models.SttVoice = stt.models.SttVoice(
audio_size=len(audio) // 1024, # audio size in MB,
audio_format=file_extension,
audio_data=audio,
voice_settings=self.settings.voice,
)
if not file_extension or file_extension not in self.settings.voice.available_formats:
raise fastapi.HTTPException(
status_code=http.HTTPStatus.UNSUPPORTED_MEDIA_TYPE,
detail=f"File extension is not supported. "
f"Available extensions: {self.settings.voice.available_formats}",
)
try:
voice: stt.models.SttVoice = stt.models.SttVoice(
audio_size=len(audio) // 1024, # audio size in MB,
audio_format=file_extension,
audio_data=audio,
voice_settings=self.settings.voice,
)
except (pydantic.ValidationError, ValueError) as e:
raise fastapi.HTTPException(
status_code=http.HTTPStatus.BAD_REQUEST,
detail=f"Voice validation error: {e}",
)
try:
with tempfile.NamedTemporaryFile(suffix=f".{file_extension}") as temp_file:
@ -40,8 +52,14 @@ class OpenaiSpeechRepository:
temp_file.seek(0)
transcript = openai.Audio.transcribe(self.settings.openai.stt_model, temp_file) # type: ignore
except openai.error.InvalidRequestError as e: # type: ignore[reportGeneralTypeIssues]
raise ValueError(f"OpenAI API error: {e}")
raise fastapi.HTTPException(
status_code=http.HTTPStatus.BAD_REQUEST,
detail=f"OpenAI request error: {e}",
)
except openai.error.OpenAIError as e: # type: ignore[reportGeneralTypeIssues]
raise ValueError(f"OpenAI API error: {e}")
raise fastapi.HTTPException(
status_code=http.HTTPStatus.BAD_REQUEST,
detail=f"OpenAI API error: {e}",
)
return transcript.text # type: ignore[reportUnknownVariableType]

View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
[[package]]
name = "aiohttp"
@ -616,7 +616,6 @@ files = [
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
@ -625,7 +624,6 @@ files = [
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
@ -655,7 +653,6 @@ files = [
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
@ -664,7 +661,6 @@ files = [
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
@ -1571,6 +1567,20 @@ files = [
{file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"},
]
[[package]]
name = "python-multipart"
version = "0.0.6"
description = "A streaming multipart parser for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "python_multipart-0.0.6-py3-none-any.whl", hash = "sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"},
{file = "python_multipart-0.0.6.tar.gz", hash = "sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132"},
]
[package.extras]
dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"]
[[package]]
name = "pyupgrade"
version = "3.15.0"
@ -1700,56 +1710,64 @@ tokenize-rt = ">=3.0.1"
[[package]]
name = "sqlalchemy"
version = "2.0.21"
version = "2.0.22"
description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
files = [
{file = "SQLAlchemy-2.0.21-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e7dc99b23e33c71d720c4ae37ebb095bebebbd31a24b7d99dfc4753d2803ede"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f0c4ee579acfe6c994637527c386d1c22eb60bc1c1d36d940d8477e482095d4"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f7d57a7e140efe69ce2d7b057c3f9a595f98d0bbdfc23fd055efdfbaa46e3a5"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca38746eac23dd7c20bec9278d2058c7ad662b2f1576e4c3dbfcd7c00cc48fa"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3cf229704074bce31f7f47d12883afee3b0a02bb233a0ba45ddbfe542939cca4"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb87f763b5d04a82ae84ccff25554ffd903baafba6698e18ebaf32561f2fe4aa"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-win32.whl", hash = "sha256:89e274604abb1a7fd5c14867a412c9d49c08ccf6ce3e1e04fffc068b5b6499d4"},
{file = "SQLAlchemy-2.0.21-cp310-cp310-win_amd64.whl", hash = "sha256:e36339a68126ffb708dc6d1948161cea2a9e85d7d7b0c54f6999853d70d44430"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf8eebccc66829010f06fbd2b80095d7872991bfe8415098b9fe47deaaa58063"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b977bfce15afa53d9cf6a632482d7968477625f030d86a109f7bdfe8ce3c064a"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ff3dc2f60dbf82c9e599c2915db1526d65415be323464f84de8db3e361ba5b9"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44ac5c89b6896f4740e7091f4a0ff2e62881da80c239dd9408f84f75a293dae9"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:87bf91ebf15258c4701d71dcdd9c4ba39521fb6a37379ea68088ce8cd869b446"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b69f1f754d92eb1cc6b50938359dead36b96a1dcf11a8670bff65fd9b21a4b09"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-win32.whl", hash = "sha256:af520a730d523eab77d754f5cf44cc7dd7ad2d54907adeb3233177eeb22f271b"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-win_amd64.whl", hash = "sha256:141675dae56522126986fa4ca713739d00ed3a6f08f3c2eb92c39c6dfec463ce"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7614f1eab4336df7dd6bee05bc974f2b02c38d3d0c78060c5faa4cd1ca2af3b8"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d59cb9e20d79686aa473e0302e4a82882d7118744d30bb1dfb62d3c47141b3ec"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a95aa0672e3065d43c8aa80080cdd5cc40fe92dc873749e6c1cf23914c4b83af"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8c323813963b2503e54d0944813cd479c10c636e3ee223bcbd7bd478bf53c178"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:419b1276b55925b5ac9b4c7044e999f1787c69761a3c9756dec6e5c225ceca01"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-win32.whl", hash = "sha256:4615623a490e46be85fbaa6335f35cf80e61df0783240afe7d4f544778c315a9"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-win_amd64.whl", hash = "sha256:cca720d05389ab1a5877ff05af96551e58ba65e8dc65582d849ac83ddde3e231"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b4eae01faee9f2b17f08885e3f047153ae0416648f8e8c8bd9bc677c5ce64be9"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3eb7c03fe1cd3255811cd4e74db1ab8dca22074d50cd8937edf4ef62d758cdf4"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2d494b6a2a2d05fb99f01b84cc9af9f5f93bf3e1e5dbdafe4bed0c2823584c1"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b19ae41ef26c01a987e49e37c77b9ad060c59f94d3b3efdfdbf4f3daaca7b5fe"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fc6b15465fabccc94bf7e38777d665b6a4f95efd1725049d6184b3a39fd54880"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:014794b60d2021cc8ae0f91d4d0331fe92691ae5467a00841f7130fe877b678e"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-win32.whl", hash = "sha256:0268256a34806e5d1c8f7ee93277d7ea8cc8ae391f487213139018b6805aeaf6"},
{file = "SQLAlchemy-2.0.21-cp38-cp38-win_amd64.whl", hash = "sha256:73c079e21d10ff2be54a4699f55865d4b275fd6c8bd5d90c5b1ef78ae0197301"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:785e2f2c1cb50d0a44e2cdeea5fd36b5bf2d79c481c10f3a88a8be4cfa2c4615"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c111cd40910ffcb615b33605fc8f8e22146aeb7933d06569ac90f219818345ef"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9cba4e7369de663611ce7460a34be48e999e0bbb1feb9130070f0685e9a6b66"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50a69067af86ec7f11a8e50ba85544657b1477aabf64fa447fd3736b5a0a4f67"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ccb99c3138c9bde118b51a289d90096a3791658da9aea1754667302ed6564f6e"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:513fd5b6513d37e985eb5b7ed89da5fd9e72354e3523980ef00d439bc549c9e9"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-win32.whl", hash = "sha256:f9fefd6298433b6e9188252f3bff53b9ff0443c8fde27298b8a2b19f6617eeb9"},
{file = "SQLAlchemy-2.0.21-cp39-cp39-win_amd64.whl", hash = "sha256:2e617727fe4091cedb3e4409b39368f424934c7faa78171749f704b49b4bb4ce"},
{file = "SQLAlchemy-2.0.21-py3-none-any.whl", hash = "sha256:ea7da25ee458d8f404b93eb073116156fd7d8c2a776d8311534851f28277b4ce"},
{file = "SQLAlchemy-2.0.21.tar.gz", hash = "sha256:05b971ab1ac2994a14c56b35eaaa91f86ba080e9ad481b20d99d77f381bb6258"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:875de9414393e778b655a3d97d60465eb3fae7c919e88b70cc10b40b9f56042d"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13790cb42f917c45c9c850b39b9941539ca8ee7917dacf099cc0b569f3d40da7"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04ab55cf49daf1aeb8c622c54d23fa4bec91cb051a43cc24351ba97e1dd09f5"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a42c9fa3abcda0dcfad053e49c4f752eef71ecd8c155221e18b99d4224621176"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14cd3bcbb853379fef2cd01e7c64a5d6f1d005406d877ed9509afb7a05ff40a5"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-win32.whl", hash = "sha256:d143c5a9dada696bcfdb96ba2de4a47d5a89168e71d05a076e88a01386872f97"},
{file = "SQLAlchemy-2.0.22-cp310-cp310-win_amd64.whl", hash = "sha256:ccd87c25e4c8559e1b918d46b4fa90b37f459c9b4566f1dfbce0eb8122571547"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f6ff392b27a743c1ad346d215655503cec64405d3b694228b3454878bf21590"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f776c2c30f0e5f4db45c3ee11a5f2a8d9de68e81eb73ec4237de1e32e04ae81c"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f1792d20d2f4e875ce7a113f43c3561ad12b34ff796b84002a256f37ce9437"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80eeb5189d7d4b1af519fc3f148fe7521b9dfce8f4d6a0820e8f5769b005051"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69fd9e41cf9368afa034e1c81f3570afb96f30fcd2eb1ef29cb4d9371c6eece2"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54bcceaf4eebef07dadfde424f5c26b491e4a64e61761dea9459103ecd6ccc95"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-win32.whl", hash = "sha256:7ee7ccf47aa503033b6afd57efbac6b9e05180f492aeed9fcf70752556f95624"},
{file = "SQLAlchemy-2.0.22-cp311-cp311-win_amd64.whl", hash = "sha256:b560f075c151900587ade06706b0c51d04b3277c111151997ea0813455378ae0"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2c9bac865ee06d27a1533471405ad240a6f5d83195eca481f9fc4a71d8b87df8"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:625b72d77ac8ac23da3b1622e2da88c4aedaee14df47c8432bf8f6495e655de2"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39a6e21110204a8c08d40ff56a73ba542ec60bab701c36ce721e7990df49fb9"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53a766cb0b468223cafdf63e2d37f14a4757476157927b09300c8c5832d88560"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0e1ce8ebd2e040357dde01a3fb7d30d9b5736b3e54a94002641dfd0aa12ae6ce"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:505f503763a767556fa4deae5194b2be056b64ecca72ac65224381a0acab7ebe"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-win32.whl", hash = "sha256:154a32f3c7b00de3d090bc60ec8006a78149e221f1182e3edcf0376016be9396"},
{file = "SQLAlchemy-2.0.22-cp312-cp312-win_amd64.whl", hash = "sha256:129415f89744b05741c6f0b04a84525f37fbabe5dc3774f7edf100e7458c48cd"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3940677d341f2b685a999bffe7078697b5848a40b5f6952794ffcf3af150c301"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55914d45a631b81a8a2cb1a54f03eea265cf1783241ac55396ec6d735be14883"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2096d6b018d242a2bcc9e451618166f860bb0304f590d205173d317b69986c95"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:19c6986cf2fb4bc8e0e846f97f4135a8e753b57d2aaaa87c50f9acbe606bd1db"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ac28bd6888fe3c81fbe97584eb0b96804bd7032d6100b9701255d9441373ec1"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-win32.whl", hash = "sha256:cb9a758ad973e795267da334a92dd82bb7555cb36a0960dcabcf724d26299db8"},
{file = "SQLAlchemy-2.0.22-cp37-cp37m-win_amd64.whl", hash = "sha256:40b1206a0d923e73aa54f0a6bd61419a96b914f1cd19900b6c8226899d9742ad"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3aa1472bf44f61dd27987cd051f1c893b7d3b17238bff8c23fceaef4f1133868"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:56a7e2bb639df9263bf6418231bc2a92a773f57886d371ddb7a869a24919face"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccca778c0737a773a1ad86b68bda52a71ad5950b25e120b6eb1330f0df54c3d0"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6c3e9350f9fb16de5b5e5fbf17b578811a52d71bb784cc5ff71acb7de2a7f9"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:564e9f9e4e6466273dbfab0e0a2e5fe819eec480c57b53a2cdee8e4fdae3ad5f"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af66001d7b76a3fab0d5e4c1ec9339ac45748bc4a399cbc2baa48c1980d3c1f4"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-win32.whl", hash = "sha256:9e55dff5ec115316dd7a083cdc1a52de63693695aecf72bc53a8e1468ce429e5"},
{file = "SQLAlchemy-2.0.22-cp38-cp38-win_amd64.whl", hash = "sha256:4e869a8ff7ee7a833b74868a0887e8462445ec462432d8cbeff5e85f475186da"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9886a72c8e6371280cb247c5d32c9c8fa141dc560124348762db8a8b236f8692"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a571bc8ac092a3175a1d994794a8e7a1f2f651e7c744de24a19b4f740fe95034"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db5ba8b7da759b727faebc4289a9e6a51edadc7fc32207a30f7c6203a181592"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bb062784f37b2d75fd9b074c8ec360ad5df71f933f927e9e95c50eb8e05323c"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-win32.whl", hash = "sha256:58a3aba1bfb32ae7af68da3f277ed91d9f57620cf7ce651db96636790a78b736"},
{file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"},
{file = "SQLAlchemy-2.0.22-py3-none-any.whl", hash = "sha256:3076740335e4aaadd7deb3fe6dcb96b3015f1613bd190a4e1634e1b99b02ec86"},
{file = "SQLAlchemy-2.0.22.tar.gz", hash = "sha256:5434cc601aa17570d79e5377f5fd45ff92f9379e2abed0be5e8c2fba8d353d2b"},
]
[package.dependencies]
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""}
typing-extensions = ">=4.2.0"
[package.extras]
@ -2069,4 +2087,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "9d9f56a0892e3eb62cadf0061462fe1b8a9fd8aa404761e331d485a6a05a531c"
content-hash = "cf7c2e88dd377d6929d87da3553dabdc48acaa30d58f7de2d8303159180b0c09"

View File

@ -37,6 +37,7 @@ pytest-asyncio = "^0.21.1"
python = "^3.11"
python-jose = "^3.3.0"
python-magic = "^0.4.27"
python-multipart = "^0.0.6"
sqlalchemy = "^2.0.20"
uvicorn = "^0.23.2"
wrapt = "^1.15.0"

View File

@ -0,0 +1,10 @@
BOT_CONTAINER_NAME=bot_container_name
BOT_IMAGE_NAME=botimage_name
# required parameters
BOT_TOKEN=123456:Your-TokEn_ExaMple
BOT_ADMINS=123456,654321
API_PROTOCOL=http
API_URL=api
API_PORT=8000

63
src/bot_aiogram/.gitignore vendored Executable file
View File

@ -0,0 +1,63 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Translations
*.mo
*.pot
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Environments
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Pyre type checker
.pyre/
.idea/*
.env

22
src/bot_aiogram/Dockerfile Executable file
View File

@ -0,0 +1,22 @@
FROM python:3.11
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive \
&& apt-get install -y net-tools netcat-traditional curl \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir --parents /opt/app
COPY pyproject.toml /opt/app/pyproject.toml
COPY poetry.lock /opt/app/poetry.lock
COPY poetry.toml /opt/app/poetry.toml
WORKDIR /opt/app
RUN pip install poetry \
&& poetry install --no-dev
COPY bin /opt/app/bin
COPY tgbot /opt/app/tgbot
CMD [".venv/bin/python", "-m", "bin"]

11
src/bot_aiogram/Makefile Normal file
View File

@ -0,0 +1,11 @@
include ../../common_makefile.mk
PROJECT_FOLDERS = tgbot
.PHONY: test
test:
@echo 'Running tests...'
.PHONY: ci-test
ci-test:
@echo 'Running tests...'

212
src/bot_aiogram/README.md Normal file
View File

@ -0,0 +1,212 @@
# tgbot_template (aiogram v2.0)
> ⚠️ **Note**: This template is for aiogram version 2.0. If you're interested in using the latest features and functionalities, consider using the updated [tgbot_template_v3](https://github.com/Latand/tgbot_template_v3) which is compatible with aiogram 3.0.
<img height="30em" src="https://raw.githubusercontent.com/anki-geo/ultimate-geography/a44a569a922e1d241517113e2917736af808eed7/src/media/flags/ug-flag-united_kingdom.svg" alt="english" align = "center"/>
This template is recommended to use in your Telegram bots written on <a href='https://github.com/aiogram/aiogram'>AIOgram</a>.
You can see tutorials on how to create, and use it on <a href='https://botfather.dev?utm_source=github_template'>Website with course on Telegram Bots Development</a>.
<br/><br/><br/>
<img height="30em" src="https://raw.githubusercontent.com/anki-geo/ultimate-geography/a44a569a922e1d241517113e2917736af808eed7/src/media/flags/ug-flag-ukraine.svg" alt="ukrainian" align = "center"/>
Цей шаблон рекомендовано використовувати для створення ваших Telegram-ботів, написаних на <a href='https://github.com/aiogram/aiogram'>AIOgram</a>.
Ви можете переглянути навчальні матеріали щодо створення та використання шаблону на <a href='https://botfather.dev?utm_source=github_template'>веб-сайті з курсом із розробки ботів Telegram</a>
<br/><br/><br/>
<img height="30em" src="https://raw.githubusercontent.com/anki-geo/ultimate-geography/a44a569a922e1d241517113e2917736af808eed7/src/media/flags/ug-flag-russia.svg" alt="russian" align = "center"/>
Этот шаблон рекомендуется использовать для создания ваших Telegram-ботов, написанных на <a href='https://github.com/aiogram/aiogram'>AIOgram</a>.
Учебные материалы по созданию и использованию шаблона можно найти на <a href='https://botfather.dev?utm_source=github_template'>веб-сайте с курсом по разработке ботов Telegram</a>
## About the template
**Structure:**
```
tgbot_template/
├── bot.py
├── tgbot/
│ ├── __init__.py
│ ├── config.py
│ ├── filters/
│ ├── handlers/
│ └── middlewares/
```
- The `tgbot` package is the root package for the bot, and it contains sub-packages for **filters**, **handlers**,
and **middlewares**.
- The `filters` package contains classes that define **custom filters** for the bot's message handlers.
- The `handlers` package contains classes that define the bot's **message handlers**, which specify the actions to take
in response to incoming messages.
- The `middlewares` package contains classes that define **custom middlewares** for the bot's dispatcher, which can be
used to perform additional processing on incoming messages.
## Detailed description
### `bot.py`
The bot.py script is the entry point for the template Telegram bot. It performs the following steps to start and run the
bot:
1. Set up logging: The `logging` module is imported and configured to log messages to the console.
2. Load the configuration: The `load_config()` function from the `tgbot.config` module is called to read the configuration
from the environment.
3. Set up the storage: Depending on the `use_redis` flag in the configuration, either a `MemoryStorage` or a `RedisStorage2`
instance is created to store the bot's state.
4. Create the bot and the dispatcher: A `Bot` instance is created using the bot token from the configuration, and a
`Dispatcher` instance is created using the `Bot` instance and the storage.
5. Register middlewares, filters, and handlers: The `register_all_middlewares()`, `register_all_filters()`, and
`register_all_handlers()` functions are called to register all the middlewares, filters, and handlers that are used by
the bot.
6. Start the polling loop: The `start_polling()` method of the Dispatcher instance is called to start the main event loop
for the bot. This method listens for incoming messages and routes them to the appropriate handler.
### `tgbot/config.py`
The `config.py` script defines a data structure for storing configuration options for the bot, such as the Telegram bot
token, database credentials, and other parameters.
The config.py script also includes a `load_config` function for loading the configuration from a file using
the `environs` library.
The config.py file defines a `Config` class, which is used to store configuration settings for the bot.
The Config class has three nested classes, `TgBot`, `DbConfig`, and `Miscellaneous`, which are used to store
configuration settings for the Telegram bot, the database, and miscellaneous settings, respectively.
The `load_config` function is used to load the configuration settings from an environment file and create a `Config`
object.
### `tgbot/filters/admin.py`
The `admin.py` file defines an `AdminFilter` class, which is used to filter messages so that only messages from
authorized users **(i.e., users who are listed in the ADMINS configuration setting)** are processed by the bot.
The `AdminFilter` class is a subclass of `BoundFilter` from the **aiogram** library, and it defines a key property that
specifies the name of the filter. The `AdminFilter` class also defines an `__init__` method that takes a `is_admin`
parameter, which specifies whether the user who sent the message is an authorized user.
The `AdminFilter` class also defines a `check` method that checks whether the user who sent the message is an admin
user, and if so, it returns `True`, indicating that the message should be processed by the bot. Otherwise, it returns
`False`, indicating that the message should be ignored by the bot. The `check` method is called by the bot's dispatcher
when a message is received.
### `tgbot/handlers/admin.py`
The `admin.py` file defines a `register_admin` function, which is used to register event handlers for messages that are
sent by authorized users (**i.e., users who are listed in the ADMINS configuration setting**).
The `register_admin` function takes a `Dispatcher` object as its parameter, and it uses this object to register event
handlers that respond to different types of messages.
For example, it might register an event handler that responds to commands that are sent by authorized users, such as
the `/echo` command, which causes the bot to repeat the text of the message back to the user.
### `tgbot/handlers/echo.py`
The `echo.py` file defines a `register_echo` function, which is used to register an event handler for the `/echo`
command.
This event handler is responsible for repeating the text of the message back to the user. The `register_echo` function
takes a `Dispatcher` object as its parameter, and it uses this object to register the `/echo` command handler.
### `tgbot/handlers/user.py`
The `user.py` file defines a `register_user` function, which is used to register event handlers for messages that are
sent
by non-authorized users (i.e., users who are not listed in the ADMINS configuration setting).
The `register_user` function takes a `Dispatcher` object as its parameter, and it uses this object to register event
handlers that respond to different types of messages. For example, it might register an event handler that responds to
commands that are sent by non-authorized users, such as the `/help` command, which causes the bot to send a message with
a list of available commands.
### `tgbot/middlewares/environment.py`
`environment.py` is a file that contains the `EnvironmentMiddleware` class, which is a middleware used in the Telegram
bot.
A middleware is a piece of code that sits between the incoming request and the handler function. In this case, the
`EnvironmentMiddleware` class allows the bot to access the configuration data that was loaded by the `load_config`
function
in the `config.py` file. This configuration data can then be accessed by other parts of the bot, such as the handlers,
to
customize its behavior.
### `tgbot/keyboards/(inline|reply).py`
The `inline.py` and `reply.py` files define classes that are used to create inline and reply keyboards, respectively.
The `InlineKeyboard` class is a subclass of `InlineKeyboardMarkup` from the **aiogram** library, and it defines a
`__init__` method that takes a `inline_keyboard` parameter, which specifies the buttons that should be included in the
keyboard.
The `ReplyKeyboard` class is a subclass of `ReplyKeyboardMarkup` from the **aiogram** library, and it defines a
`__init__` method that takes a `keyboard` parameter, which specifies the buttons that should be included in the
keyboard.
### `tgbot/misc`
In general, a package called "misc" might be used to store miscellaneous code that doesn't fit into any of the other
packages or modules in a project. This could include utility functions, helper classes, or other types of code that are
used by multiple parts of the project.
In this case, the `misc` package contains a `states.py` file, which defines a `StateGroup` class that is used to define
the states that are used by the bot.
### `tgbot/models`
The `models` package can contain `users.py` file, which defines a `User` class that is used to represent a user in the
database. This can be used with combination of some ORM (Object Relational Mapper) to store and retrieve data from the
database.
### `tgbot/services`
This package can also be named `infrastructure`. It contains the code that is used to interact with external services.
A package called "services" could contain code that defines services that are used by an application. In software
development, a service is a self-contained piece of functionality that performs a specific task or provides a specific
capability. A service is typically defined as a class or a set of functions that implement the desired functionality.
Examples of services that might be included in a services package could include a **database access service, a caching
service, a messaging service**, or any other type of functionality that is used by the application. The exact contents
of
a services package would depend on the specific needs of the application and the services that it requires.
The `services` package can contain a `database.py` file, which defines a `Database` class that is used to connect to the
database and perform database operations.
## docker-compose.yml
The `docker-compose.yml` file defines the services that are used by the application, as well as the networks and volumes
that are needed by the application. The file begins by specifying the version of the Docker Compose file format that is
being used.
The `services` section of the file defines the containers that should be run as part of the application. In this example,
there is only one service, called `bot`, which is based on the `tg_bot-image` Docker image. The `container_name` specifies the
name that should be used for the container, and the `build` section specifies the location of the Dockerfile that should
be used to build the image.
The `working_dir` specifies the working directory that should be used by the container, and the `volumes` section specifies
the files and directories that should be mounted into the container. In this case, the entire project directory is
mounted into the container, which allows the application to access the files on the host machine.
The `command` specifies the command that should be run when the container is started, and the `restart` setting specifies
that the container should be automatically restarted if it exits.
The `env_file` setting specifies the location of the `.env` file, which contains the configuration settings for the application.
The `networks` section defines the networks that the container should be connected to. In this example, there is only one
network, called `tg_bot`, which is based on the bridge driver. This network allows the containers in the application to
communicate with each other.
## Dockerfile
The `Dockerfile` defines the instructions for building the Docker image that is used by the bot service. The file begins
by specifying the base image that should be used for the image, which in this case is `python:3.9-buster`. The `ENV`
instruction sets the value of the `BOT_NAME` environment variable, which is used by the `WORKDIR` instruction to specify the
working directory for the container.
The `COPY` instructions are used to copy the `requirements.txt` file and the entire project directory into the image. The
`RUN` instruction is used to install the Python dependencies from the `requirements.txt` file. This allows the application
to run in the container with all the necessary dependencies.

View File

View File

@ -0,0 +1,55 @@
import asyncio
import logging
import aiogram
import aiogram.contrib.fsm_storage.memory as fsm_storage_memory
import tgbot.handlers as tgbot_handlers
import tgbot.middlewares as tgbot_middlewares
import tgbot.settings as tgbot_settings
logger = logging.getLogger(__name__)
def register_all_middlewares(dp: aiogram.Dispatcher):
dp.setup_middleware(tgbot_middlewares.environment.EnvironmentMiddleware())
def register_all_handlers(dp: aiogram.Dispatcher):
tgbot_handlers.register_user(dp)
tgbot_handlers.register_echo(dp)
tgbot_handlers.register_voice_response(dp)
async def main():
logging.basicConfig(
level=logging.INFO,
format="%(filename)s:%(lineno)d #%(levelname)-8s [%(asctime)s] - %(name)s - %(message)s",
)
logger.info("Starting bot")
config = tgbot_settings.Settings()
storage = fsm_storage_memory.MemoryStorage()
bot = aiogram.Bot(token=config.tgbot.token.get_secret_value(), parse_mode="HTML")
dp = aiogram.Dispatcher(bot, storage=storage)
bot["config"] = config
register_all_middlewares(dp)
register_all_handlers(dp)
# start
try:
await dp.start_polling()
finally:
await dp.storage.close()
await dp.storage.wait_closed()
if bot.session:
await bot.session.close()
if __name__ == "__main__":
try:
asyncio.run(main())
except (KeyboardInterrupt, SystemExit):
logger.error("Bot stopped!")

View File

@ -0,0 +1,17 @@
version: "3"
services:
bot:
image: "${BOT_IMAGE_NAME:-tg_bot-image}"
container_name: "${BOT_CONTAINER_NAME:-tg_bot-container}"
build:
context: .
restart: always
env_file:
- .env
networks:
- tg_bot_network
networks:
tg_bot_network:
driver: bridge

1364
src/bot_aiogram/poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,3 @@
[virtualenvs]
create = true
in-project = true

View File

@ -0,0 +1,143 @@
[build-system]
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[tool.black]
line-length = 120
target-version = ['py311']
[tool.isort]
known_first_party = ["backend", "tests"]
line_length = 120
profile = "black"
py_version = "311"
[tool.poetry]
authors = ["jsdio@jsdio.ru"]
description = ""
name = "bot_aiogram"
readme = "README.md"
version = "0.1.0"
[tool.poetry.dependencies]
aiogram = "2.18"
environs = "9.0"
pydantic-settings = "^2.0.3"
pytest-asyncio = "^0.21.1"
python = "^3.11"
[tool.poetry.dev-dependencies]
black = "^23.7.0"
isort = "^5.12.0"
pylint = "^2.17.5"
pylint-pydantic = "^0.2.4"
pylint-pytest = "^1.1.2"
pyright = "^1.1.318"
pyupgrade = "^3.10.1"
ruff = "^0.0.282"
sort-all = "^1.2.0"
toml-sort = "^0.23.1"
[tool.pylint]
disable = [
"broad-except",
"cannot-enumerate-pytest-fixtures",
"consider-using-from-import",
"consider-using-sys-exit",
"duplicate-code",
"fixme",
"missing-docstring",
"no-member",
"protected-access",
"too-few-public-methods",
"too-many-instance-attributes",
"too-many-locals",
"too-many-statements",
"unnecessary-ellipsis"
]
extension-pkg-allow-list = [
"orjson",
"pydantic"
]
ignore-path = [
"^.*venv/.*$"
]
load-plugins = [
"pylint_pydantic",
"pylint_pytest"
]
max-args = 15
max-line-length = 120
recursive = true
[tool.pylint.basic]
argument-rgx = "^_{0,2}[a-z][a-z0-9_]*$"
attr-rgx = "^_{0,2}[a-z][a-z0-9_]*$"
class-attribute-rgx = "^_{0,2}[a-zA-Z][a-zA-Z0-9_]*$"
variable-rgx = "^_{0,2}[a-z][a-z0-9_]*$"
[tool.pyright]
exclude = [
".pytest_cache",
".venv"
]
pythonPlatform = "All"
pythonVersion = "3.11"
reportConstantRedefenition = "none"
reportMissingTypeStubs = "none"
reportPrivateUsage = "information"
reportPropertyTypeMismatch = "warning"
reportUninitializedInstanceVariable = "warning"
reportUnknownMemberType = "none"
reportUnnecessaryTypeIgnoreComment = "warning"
reportUntypedFunctionDecorator = "warning"
typeCheckingMode = "strict"
useLibraryCodeForTypes = true
venv = ".venv"
venvPath = "."
[tool.ruff]
ignore = [
# Pyright automatically infers the type of `self`
"ANN101",
# Pyright automatically infers the type of `cls`
"ANN102",
# In some cases actively detrimental; somewhat conflicts with black
"COM",
# Ignore missing docstrings
"D102",
# In combination with D213, this results in noisy diffs and inconsistencies
# See also <https://github.com/charliermarsh/ruff/issues/4174>.
"D200",
# This results inconsistencies between function and class docstrings
# See also <https://github.com/charliermarsh/ruff/issues/4175>.
"D202",
# D211 is preferred since the extra blank line isn't visually useful
"D203",
# D213 is preferred since it's more readable and allows more characters
"D212",
# Ignore missing docstrings
"D414",
# Covered by D401, which is more restrictive
"D415",
# Type-checkers interpret redundant `as` as exporting an item
"PLC0414",
# Permit using alias for 'import'
"PLR0402",
# Causes churn and awful looking import blocks for little gain
"TCH"
]
select = ["ALL"]
[tool.ruff.per-file-ignores]
"tests/*" = [
"D100",
"D103",
"D104",
"S101"
]
[tool.tomlsort]
all = true
ignore_case = true
in_place = true

View File

@ -0,0 +1,5 @@
from .settings import Settings
__all__ = [
"Settings",
]

View File

@ -0,0 +1,9 @@
from .echo import *
from .user import *
from .voice import *
__all__ = [
"register_echo",
"register_user",
"register_voice_response",
]

View File

@ -0,0 +1,11 @@
import aiogram
async def bot_echo(message: aiogram.types.Message):
text = ["Эхо без состояния.", "Сообщение:", message.text]
await message.answer("\n".join(text))
def register_echo(dp: aiogram.Dispatcher):
dp.register_message_handler(bot_echo)

View File

@ -0,0 +1,9 @@
import aiogram
async def user_start(message: aiogram.types.Message):
await message.reply("Hello, user! Send me a voice message and I'll try to recognize it and answer you.")
def register_user(dp: aiogram.Dispatcher):
dp.register_message_handler(user_start, commands=["start"], state="*")

View File

@ -0,0 +1,55 @@
import io
import json
import typing
import aiogram
import aiohttp
import tgbot.settings as tgbot_settings
async def voice_response(message_voice: aiogram.types.Message):
config = typing.cast(tgbot_settings.Settings, message_voice.bot.get("config"))
voice_file_id: str = message_voice.voice.file_id
file_info = await message_voice.bot.get_file(voice_file_id)
file_path: str = file_info.file_path
voice_data: io.BytesIO = io.BytesIO()
voice_data.name = "voice.ogg"
voice_data.seek(0)
await message_voice.bot.download_file(file_path, destination=voice_data)
await message_voice.bot.send_chat_action(message_voice.from_user.id, "typing")
async with aiohttp.ClientSession() as session:
async with session.post(
f"{config.api.api_url}/api/v1/voice/",
data={"voice": voice_data},
) as resp:
if resp.status == 200:
voice_answer: bytes = await resp.read()
answer_io = io.BytesIO(voice_answer)
answer_io.name = "answer_io.ogg"
await message_voice.bot.send_chat_action(
message_voice.from_user.id, action=aiogram.types.ChatActions.RECORD_AUDIO
)
try:
await message_voice.answer_voice(voice=answer_io)
except aiogram.exceptions.BadRequest:
await message_voice.answer(
"We were unable to send you a voice message. Please check your privacy settings."
)
else:
error_text: str = await resp.text()
if error_text == "":
await message_voice.answer(f"Error: {resp.status}")
else:
await message_voice.answer(f"Error: {json.loads(error_text)['detail']}")
await session.close()
return
def register_voice_response(dp: aiogram.Dispatcher):
dp.register_message_handler(voice_response, content_types=aiogram.types.ContentType.VOICE)

View File

View File

@ -0,0 +1,5 @@
from .environment import *
__all__ = [
"EnvironmentMiddleware",
]

View File

@ -0,0 +1,14 @@
import typing
import aiogram.dispatcher.middlewares as dispatcher_middlewares
class EnvironmentMiddleware(dispatcher_middlewares.LifetimeControllerMiddleware):
skip_patterns = ["error", "update"]
def __init__(self, **kwargs: typing.Any):
super().__init__()
self.kwargs = kwargs
async def pre_process(self, obj: typing.Any, data: dict[typing.Any, typing.Any], *args: typing.Any):
data.update(**self.kwargs)

View File

View File

View File

@ -0,0 +1,4 @@
import pathlib
BASE_PATH = pathlib.Path(__file__).parent.parent.parent.parent.resolve()
ENV_PATH = BASE_PATH / ".env"

View File

View File

@ -0,0 +1,8 @@
import pydantic_settings
import tgbot.split_settings as app_split_settings
class Settings(pydantic_settings.BaseSettings):
api: app_split_settings.ApiSettings = app_split_settings.ApiSettings()
tgbot: app_split_settings.TgBotSettings = app_split_settings.TgBotSettings()

View File

@ -0,0 +1,7 @@
from .api import *
from .tgbot import *
__all__ = [
"ApiSettings",
"TgBotSettings",
]

View File

@ -0,0 +1,20 @@
import pydantic_settings
import tgbot.split_settings.utils as split_settings_utils
class ApiSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=split_settings_utils.ENV_PATH,
env_prefix="API_",
env_file_encoding="utf-8",
extra="ignore",
)
url: str = "127.0.0.1"
port: int = 8000
protocol: str = "http"
@property
def api_url(self) -> str:
return f"{self.protocol}://{self.url}:{self.port}"

View File

@ -0,0 +1,22 @@
import pydantic
import pydantic_settings
import tgbot.split_settings.utils as split_settings_utils
class TgBotSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=split_settings_utils.ENV_PATH,
env_prefix="BOT_",
env_file_encoding="utf-8",
extra="ignore",
)
token: pydantic.SecretStr = pydantic.Field(
default=..., validation_alias=pydantic.AliasChoices("token", "bot_token")
)
admins: str = pydantic.Field(default="")
@pydantic.field_validator("admins")
def validate_bot_admins(cls, v: str) -> list[int]:
return list(map(int, v.split(",")))

View File

@ -0,0 +1,4 @@
import pathlib
BASE_PATH = pathlib.Path(__file__).parent.parent.parent.resolve()
ENV_PATH = BASE_PATH / ".env"