1
0
mirror of https://github.com/ijaric/voice_assistant.git synced 2025-05-24 14:33:26 +00:00

feat: [#45] llm_agent

This commit is contained in:
Григорич 2023-10-15 09:53:13 +03:00
parent 6de92100a7
commit b3e480886e
8 changed files with 18 additions and 44 deletions

View File

@ -1,3 +1,5 @@
from .chat_repository import ChatHistoryRepository
from .services import AgentService
__all__ = ["ChatHistoryRepository"]
__all__ = [
"AgentService",
]

View File

@ -0,0 +1,9 @@
from .chat_repository import ChatHistoryRepository
from .embedding_repository import EmbeddingRepository
from .openai_functions import OpenAIFunctions
__all__ = [
"ChatHistoryRepository",
"EmbeddingRepository",
"OpenAIFunctions",
]

View File

@ -1,7 +1,5 @@
import logging
import uuid
import langchain.agents
import sqlalchemy as sa
import sqlalchemy.exc
import sqlalchemy.ext.asyncio as sa_asyncio
@ -23,28 +21,6 @@ class OpenAIFunctions:
self.pg_async_session = pg_async_session
self.repository = repository
@langchain.agents.tool
async def artem_get_movie_by_description(self, description: str) -> list[models.Movie] | None:
"""Provide a movie data by description."""
self.logger.info("Request to get movie by description: %s", description)
embedded_description = await self.repository.aget_embedding(description)
try:
async with self.pg_async_session() as session:
result: list[models.Movie] = []
stmt = (
sa.select(orm_models.FilmWork)
.order_by(orm_models.FilmWork.embeddings.cosine_distance(embedded_description.root))
.limit(5)
)
response = await session.execute(stmt)
neighbours = response.scalars()
for neighbour in neighbours:
result.append(models.Movie(**neighbour.__dict__))
return result
except sqlalchemy.exc.SQLAlchemyError as error:
self.logger.exception("Error: %s", error)
async def get_movie_by_description(self, description: str) -> list[models.Movie] | None:
"""Provide a movie data by description."""
@ -65,16 +41,3 @@ class OpenAIFunctions:
return result
except sqlalchemy.exc.SQLAlchemyError as error:
self.logger.exception("Error: %s", error)
@langchain.agents.tool
def get_movie_by_id(self, id: uuid.UUID = None) -> models.Movie | None:
"""Provide a movie data by movie id."""
# self.logger.info("Request to get movie by id: %s", id)
return f"hello world {id}"
@langchain.agents.tool
def get_similar_movies(self, id: uuid.UUID) -> list[models.Movie] | None:
"""Provide similar movies for the given movie ID."""
self.logger.info("Request to get movie by id: %s", id)
return None

View File

@ -15,16 +15,15 @@ import langchain.memory
import langchain.memory.chat_memory
import lib.models as models
import lib.agent.openai_functions as openai_functions
import lib.agent.repositories as lib_agent_repositories
import lib.app.settings as app_settings
import lib.agent.chat_repository as _chat_repository
import lib.agent.repositories.chat_repository as _chat_repository
class AgentService:
def __init__(
self,
settings: app_settings.Settings,
tools: openai_functions.OpenAIFunctions,
chat_repository: _chat_repository.ChatHistoryRepository,
) -> None:
self.settings = settings

View File

@ -7,7 +7,7 @@ import lib.models as models
class AgentHandler:
def __init__(self, chat_history_repository: agent.ChatHistoryRepository):
def __init__(self, chat_history_repository: agent_repositories.ChatHistoryRepository):
self.chat_history_repository = chat_history_repository
self.router = fastapi.APIRouter()
self.router.add_api_route(

View File

@ -28,6 +28,8 @@ class VoiceResponseHandler:
async def voice_response(
self,
channel: str,
user_id: str,
voice: bytes = fastapi.File(...),
) -> fastapi.responses.StreamingResponse:
voice_text: str = await self.stt.recognize(voice)

View File

@ -6,7 +6,6 @@ import typing
import fastapi
import uvicorn
import lib.agent as agent
import lib.api.v1.handlers as api_v1_handlers
import lib.app.errors as app_errors
import lib.app.settings as app_settings