From 23aada53b2ff989e6da496ee5ceb8fbf4b3a5200 Mon Sep 17 00:00:00 2001 From: Artem Litvinov Date: Sun, 15 Oct 2023 16:39:21 +0100 Subject: [PATCH] feat: working langchain memory --- README.md | 8 +- src/assistant/lib/agent/services.py | 103 +++++++----------- .../lib/app/split_settings/openai.py | 1 + src/assistant/lib/app/split_settings/voice.py | 2 +- src/assistant/poetry.lock | 28 ++--- src/bot_aiogram/docker-compose.yml | 2 +- 6 files changed, 56 insertions(+), 88 deletions(-) diff --git a/README.md b/README.md index ab2d73f..44bf425 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,7 @@ ## Что удалось реализовать? ### + - Организация кодовой базы по [шаблону DDD](https://github.com/yp-middle-python-24/python-service-example/) - Speech To Text на базе [Whisper](https://openai.com/research/whisper) от OpenAI - LLM: @@ -25,10 +26,12 @@ - [Алексей](https://github.com/grucshetskyaleksei) ## Как запустить проект? + 1. Скачать [файл базы данных](https://disk.yandex.ru/d/ZAKDDg8lP9DHBQ) с `embeddings` и поместить её по пути `src/assistant/data/dump.sql`. 2. В директории `src/assistant` файл `.env.example` переименовать в `.env` и заполнить переменные окружения. -Пример заполнения переменных окружения: + Пример заполнения переменных окружения: + ``` POSTGRES_DRIVER=postgresql+asyncpg # Драйвер для работы с базой данных POSTGRES_HOST=db # Хост базы данных @@ -68,7 +71,7 @@ TTS_ELEVEN_LABS_DEFAULT_VOICE_ID=EXAVITQu4vr4xnSDxMaL # ID голоса по у ``` 3. В директории `src/bot_aiogram` файл `.env.example` переименовать в `.env` и заполнить переменные окружения. -Пример заполнения переменных окружения: + Пример заполнения переменных окружения: ``` BOT_CONTAINER_NAME=bot_container_name # Название контейнера @@ -88,5 +91,6 @@ REDIS_PORT=6379 # Порт Redis 3. Запустить проект командой `docker-compose up -d` ### Важно! + Для работы с Telegram-ботом необходимо предварительно начать с ним диалог и отключить в параметрах конфиденциальности вашего аккаунта запрет на голосовые сообщения. diff --git a/src/assistant/lib/agent/services.py b/src/assistant/lib/agent/services.py index 59e4603..a8b471a 100644 --- a/src/assistant/lib/agent/services.py +++ b/src/assistant/lib/agent/services.py @@ -30,29 +30,12 @@ class AgentService: self.chat_repository = chat_repository self.logger = logging.getLogger(__name__) - async def send_message_request(self, request: str, system_prompt: str): - prompt = langchain.prompts.ChatPromptTemplate.from_messages( - [ - ("system", system_prompt), - ] - ) - llm = langchain.chat_models.ChatOpenAI( - temperature=self.settings.openai.agent_temperature, - openai_api_key=self.settings.openai.api_key.get_secret_value(), - ) - chain = langchain.chains.LLMChain(llm=llm, prompt=prompt) - result = await chain.ainvoke({"input": request}) - return result["text"] - async def process_request(self, request: models.AgentCreateRequestModel) -> models.AgentCreateResponseModel: # Get session ID - request_text = request.text - translate_text = await self.send_message_request(request=request_text, system_prompt="Translation into English") session_request = models.RequestLastSessionId(channel=request.channel, user_id=request.user_id, minutes_ago=3) session_id = await self.chat_repository.get_last_session_id(session_request) if not session_id: session_id = uuid.uuid4() - await self.send_message_request(request="test", system_prompt="test") # Declare tools (OpenAI functions) tools = [ @@ -64,59 +47,56 @@ class AgentService: ), ] - llm = langchain.chat_models.ChatOpenAI( - temperature=self.settings.openai.agent_temperature, - openai_api_key=self.settings.openai.api_key.get_secret_value(), - ) - - chat_history = [] - chat_history_name = f"{chat_history=}".partition("=")[0] - - request_chat_history = models.RequestChatHistory(session_id=session_id) - chat_history_source = await self.chat_repository.get_messages_by_sid(request_chat_history) - if not chat_history_source: - for entry in chat_history_source: - if entry.role == "user": - chat_history.append(langchain.schema.HumanMessage(content=entry.content)) - elif entry.role == "agent": - chat_history.append(langchain.schema.AIMessage(content=entry.content)) + template = """ + 1. You are movie expert with a vast knowledge base about movies and their related aspects. + 2. Use functions to get an additional data about movies. + 3. Translate each inbound request into English language. Before calling any functions. + 4. Answer always in Russian language. + 5. Be very concise. You answer must be no longer than 100 words.""" prompt = langchain.prompts.ChatPromptTemplate.from_messages( [ - ( - "system", - """1. Translate each inbound request into English language. Before calling any functions. -2. You are movie expert with a vast knowledge base about movies and their related aspects. -3. Answer always in Russian language. -4. Be concise. You answer must be within 100-150 words.""", - ), - langchain.prompts.MessagesPlaceholder(variable_name=chat_history_name), + ("system", template), + langchain.prompts.MessagesPlaceholder(variable_name="chat_history"), ("user", "{input}"), langchain.prompts.MessagesPlaceholder(variable_name="agent_scratchpad"), ] ) - llm_with_tools = llm.bind( - functions=[langchain.tools.render.format_tool_to_openai_function(tool) for tool in tools] + llm = langchain.chat_models.ChatOpenAI( + temperature=self.settings.openai.agent_temperature, + openai_api_key=self.settings.openai.api_key.get_secret_value(), + model=self.settings.openai.model, ) - agent = ( - { - "input": lambda _: _["input"], - "agent_scratchpad": lambda _: langchain.agents.format_scratchpad.format_to_openai_functions( - _["intermediate_steps"] - ), - "chat_history": lambda _: _["chat_history"], - } - | prompt - | llm_with_tools - | langchain.agents.output_parsers.OpenAIFunctionsAgentOutputParser() + agent_kwargs = { + "extra_prompt_messages": [langchain.prompts.MessagesPlaceholder(variable_name="memory")], + } + memory = langchain.memory.ConversationBufferMemory(memory_key="chat_history", return_messages=True) + + # Load chat history from database + request_chat_history = models.RequestChatHistory(session_id=session_id) + chat_history = await self.chat_repository.get_messages_by_sid(request_chat_history) + for entry in chat_history: + print("ENTRY: ", entry) + if entry.role == "user": + memory.chat_memory.add_user_message(entry.content) + elif entry.role == "agent": + memory.chat_memory.add_ai_message(entry.content) + + print("MEMORY: ", memory.load_memory_variables({})) + + agent = langchain.agents.OpenAIFunctionsAgent(llm=llm, tools=tools, prompt=prompt) + agent_executor: langchain.agents.AgentExecutor = langchain.agents.AgentExecutor.from_agent_and_tools( + tools=tools, + agent=agent, + agent_kwargs=agent_kwargs, + memory=memory, ) - agent_executor = langchain.agents.AgentExecutor(agent=agent, tools=tools, verbose=True) - chat_history = [] # temporary disable chat_history - response = await agent_executor.ainvoke({"input": translate_text, "chat_history": chat_history}) + response = await agent_executor.arun({"input": request.text}) + # Save user request and AI response to database user_request = models.RequestChatMessage( session_id=session_id, user_id=request.user_id, @@ -127,15 +107,10 @@ class AgentService: session_id=session_id, user_id=request.user_id, channel=request.channel, - message={"role": "assistant", "content": response["output"]}, + message={"role": "assistant", "content": response}, ) await self.chat_repository.add_message(user_request) await self.chat_repository.add_message(ai_response) - response_translate = await self.send_message_request( - request=f"Original text: {request_text}. Answer: {response['output']}", - system_prompt="Translate the answer into the language of the original text", - ) - print(response_translate) - return models.AgentCreateResponseModel(text=response_translate) + return models.AgentCreateResponseModel(text=response) diff --git a/src/assistant/lib/app/split_settings/openai.py b/src/assistant/lib/app/split_settings/openai.py index 765040d..edfedfc 100644 --- a/src/assistant/lib/app/split_settings/openai.py +++ b/src/assistant/lib/app/split_settings/openai.py @@ -17,3 +17,4 @@ class OpenaiSettings(pydantic_settings.BaseSettings): ) stt_model: str = "whisper-1" agent_temperature: float = 0.7 + model: str = "gpt-3.5-turbo-0613" diff --git a/src/assistant/lib/app/split_settings/voice.py b/src/assistant/lib/app/split_settings/voice.py index 3b7ebd4..7ad2f5a 100644 --- a/src/assistant/lib/app/split_settings/voice.py +++ b/src/assistant/lib/app/split_settings/voice.py @@ -14,7 +14,7 @@ class VoiceSettings(pydantic_settings.BaseSettings): max_input_seconds: int = 30 max_input_size: int = 5120 # 5MB - available_formats: str = "wav,mp3,ogg" + available_formats: str = "wav,mp3,ogg,oga" @pydantic.field_validator("available_formats") def validate_available_formats(cls, v: str) -> list[str]: diff --git a/src/assistant/poetry.lock b/src/assistant/poetry.lock index 521fbdf..875acce 100644 --- a/src/assistant/poetry.lock +++ b/src/assistant/poetry.lock @@ -960,16 +960,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1772,7 +1762,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1780,15 +1769,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1805,7 +1787,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1813,7 +1794,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1941,7 +1921,9 @@ python-versions = ">=3.7" files = [ {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"}, {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:875de9414393e778b655a3d97d60465eb3fae7c919e88b70cc10b40b9f56042d"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13790cb42f917c45c9c850b39b9941539ca8ee7917dacf099cc0b569f3d40da7"}, {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04ab55cf49daf1aeb8c622c54d23fa4bec91cb051a43cc24351ba97e1dd09f5"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a42c9fa3abcda0dcfad053e49c4f752eef71ecd8c155221e18b99d4224621176"}, {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14cd3bcbb853379fef2cd01e7c64a5d6f1d005406d877ed9509afb7a05ff40a5"}, {file = "SQLAlchemy-2.0.22-cp310-cp310-win32.whl", hash = "sha256:d143c5a9dada696bcfdb96ba2de4a47d5a89168e71d05a076e88a01386872f97"}, {file = "SQLAlchemy-2.0.22-cp310-cp310-win_amd64.whl", hash = "sha256:ccd87c25e4c8559e1b918d46b4fa90b37f459c9b4566f1dfbce0eb8122571547"}, @@ -1962,19 +1944,25 @@ files = [ {file = "SQLAlchemy-2.0.22-cp312-cp312-win32.whl", hash = "sha256:154a32f3c7b00de3d090bc60ec8006a78149e221f1182e3edcf0376016be9396"}, {file = "SQLAlchemy-2.0.22-cp312-cp312-win_amd64.whl", hash = "sha256:129415f89744b05741c6f0b04a84525f37fbabe5dc3774f7edf100e7458c48cd"}, {file = "SQLAlchemy-2.0.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3940677d341f2b685a999bffe7078697b5848a40b5f6952794ffcf3af150c301"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55914d45a631b81a8a2cb1a54f03eea265cf1783241ac55396ec6d735be14883"}, {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2096d6b018d242a2bcc9e451618166f860bb0304f590d205173d317b69986c95"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:19c6986cf2fb4bc8e0e846f97f4135a8e753b57d2aaaa87c50f9acbe606bd1db"}, {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ac28bd6888fe3c81fbe97584eb0b96804bd7032d6100b9701255d9441373ec1"}, {file = "SQLAlchemy-2.0.22-cp37-cp37m-win32.whl", hash = "sha256:cb9a758ad973e795267da334a92dd82bb7555cb36a0960dcabcf724d26299db8"}, {file = "SQLAlchemy-2.0.22-cp37-cp37m-win_amd64.whl", hash = "sha256:40b1206a0d923e73aa54f0a6bd61419a96b914f1cd19900b6c8226899d9742ad"}, {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3aa1472bf44f61dd27987cd051f1c893b7d3b17238bff8c23fceaef4f1133868"}, {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:56a7e2bb639df9263bf6418231bc2a92a773f57886d371ddb7a869a24919face"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccca778c0737a773a1ad86b68bda52a71ad5950b25e120b6eb1330f0df54c3d0"}, {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6c3e9350f9fb16de5b5e5fbf17b578811a52d71bb784cc5ff71acb7de2a7f9"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:564e9f9e4e6466273dbfab0e0a2e5fe819eec480c57b53a2cdee8e4fdae3ad5f"}, {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af66001d7b76a3fab0d5e4c1ec9339ac45748bc4a399cbc2baa48c1980d3c1f4"}, {file = "SQLAlchemy-2.0.22-cp38-cp38-win32.whl", hash = "sha256:9e55dff5ec115316dd7a083cdc1a52de63693695aecf72bc53a8e1468ce429e5"}, {file = "SQLAlchemy-2.0.22-cp38-cp38-win_amd64.whl", hash = "sha256:4e869a8ff7ee7a833b74868a0887e8462445ec462432d8cbeff5e85f475186da"}, {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9886a72c8e6371280cb247c5d32c9c8fa141dc560124348762db8a8b236f8692"}, {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a571bc8ac092a3175a1d994794a8e7a1f2f651e7c744de24a19b4f740fe95034"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db5ba8b7da759b727faebc4289a9e6a51edadc7fc32207a30f7c6203a181592"}, {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00"}, {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bb062784f37b2d75fd9b074c8ec360ad5df71f933f927e9e95c50eb8e05323c"}, {file = "SQLAlchemy-2.0.22-cp39-cp39-win32.whl", hash = "sha256:58a3aba1bfb32ae7af68da3f277ed91d9f57620cf7ce651db96636790a78b736"}, {file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"}, diff --git a/src/bot_aiogram/docker-compose.yml b/src/bot_aiogram/docker-compose.yml index 4961d78..153f49b 100755 --- a/src/bot_aiogram/docker-compose.yml +++ b/src/bot_aiogram/docker-compose.yml @@ -26,7 +26,7 @@ services: - tg_bot_network volumes: - redis_data: + redis_data: networks: tg_bot_network: