1
0
mirror of https://github.com/ijaric/voice_assistant.git synced 2025-12-17 11:46:20 +00:00

feat: new file structure

This commit is contained in:
Artem Litvinov
2023-10-03 21:29:55 +01:00
parent eab9177c00
commit 89660d1ac7
110 changed files with 2785 additions and 7 deletions

View File

View File

View File

View File

@@ -0,0 +1,3 @@
from .health import basic_router
__all__ = ["basic_router"]

View File

@@ -0,0 +1,5 @@
from .liveness_probe import basic_router
__all__ = [
"basic_router",
]

View File

@@ -0,0 +1,15 @@
import fastapi
import lib.api.v1.schemas as api_shemas
basic_router = fastapi.APIRouter()
@basic_router.get(
"/",
response_model=api_shemas.HealthResponse,
summary="Статус работоспособности",
description="Проверяет доступность сервиса FastAPI.",
)
async def health():
return api_shemas.HealthResponse(status="healthy")

View File

@@ -0,0 +1,3 @@
from .base import HealthResponse
__all__ = ["HealthResponse"]

View File

@@ -0,0 +1,5 @@
import pydantic
class HealthResponse(pydantic.BaseModel):
status: str = pydantic.Field(default=..., examples=["healthy"], description="Схема доступности сервиса")

View File

@@ -0,0 +1,11 @@
from .app import Application
from .errors import *
from .settings import Settings
__all__ = [
"Application",
"ApplicationError",
"DisposeError",
"Settings",
"StartServerError",
]

View File

@@ -0,0 +1,131 @@
import dataclasses
import logging
import logging.config as logging_config
import typing
import fastapi
import uvicorn
import lib.api.v1.handlers as api_v1_handlers
import lib.app.errors as app_errors
import lib.app.settings as app_settings
import lib.app.split_settings as app_split_settings
import lib.clients as clients
logger = logging.getLogger(__name__)
@dataclasses.dataclass
class DisposableResource:
name: str
dispose_callback: typing.Awaitable[typing.Any]
class Application:
def __init__(
self,
settings: app_settings.Settings,
fastapi_app: fastapi.FastAPI,
disposable_resources: list[DisposableResource],
) -> None:
self._settings = settings
self._fastapi_app = fastapi_app
self._disposable_resources = disposable_resources
@classmethod
def from_settings(cls, settings: app_settings.Settings) -> typing.Self:
# Logging
logging_config.dictConfig(app_split_settings.get_logging_config(**settings.logger.model_dump()))
logger.info("Initializing application")
disposable_resources = []
# Global clients
logger.info("Initializing global clients")
postgres_client = clients.AsyncPostgresClient(settings=settings)
disposable_resources.append(
DisposableResource(
name="postgres_client",
dispose_callback=postgres_client.dispose_callback(),
)
)
# Clients
logger.info("Initializing clients")
# Repositories
logger.info("Initializing repositories")
# Caches
logger.info("Initializing caches")
# Services
logger.info("Initializing services")
# Handlers
logger.info("Initializing handlers")
liveness_probe_handler = api_v1_handlers.basic_router
logger.info("Creating application")
fastapi_app = fastapi.FastAPI(
title=settings.app.title,
version=settings.app.version,
docs_url=settings.app.docs_url,
openapi_url=settings.app.openapi_url,
default_response_class=fastapi.responses.ORJSONResponse,
)
# Routes
fastapi_app.include_router(liveness_probe_handler, prefix="/api/v1/health", tags=["health"])
application = Application(
settings=settings,
fastapi_app=fastapi_app,
disposable_resources=disposable_resources,
)
logger.info("Initializing application finished")
return application
async def start(self) -> None:
try:
config = uvicorn.Config(
app=self._fastapi_app,
host=self._settings.api.host,
port=self._settings.api.port,
)
server = uvicorn.Server(config)
await server.serve()
except BaseException as unexpected_error:
logger.exception("FastAPI failed to start")
raise app_errors.StartServerError("FastAPI failed to start") from unexpected_error
async def dispose(self) -> None:
logger.info("Application is shutting down...")
dispose_errors = []
for resource in self._disposable_resources:
logger.info("Disposing %s...", resource.name)
try:
await resource.dispose_callback
except Exception as unexpected_error:
dispose_errors.append(unexpected_error)
logger.exception("Failed to dispose %s", resource.name)
else:
logger.info("%s has been disposed", resource.name)
if len(dispose_errors) != 0:
logger.error("Application has shut down with errors")
raise app_errors.DisposeError("Application has shut down with errors, see logs above")
logger.info("Application has successfully shut down")

View File

@@ -0,0 +1,22 @@
import typing
class ApplicationError(Exception):
def __init__(self, message: str, *args: typing.Any) -> None:
super().__init__(*args)
self.message = message
class DisposeError(ApplicationError):
pass
class StartServerError(ApplicationError):
pass
__all__ = [
"ApplicationError",
"DisposeError",
"StartServerError",
]

View File

@@ -0,0 +1,69 @@
import pydantic_settings
class LoggingSettings(pydantic_settings.BaseSettings):
log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
log_default_handlers: list[str] = [
"console",
]
log_level_handlers: str = "DEBUG"
log_level_loggers: str = "INFO"
log_level_root: str = "INFO"
log_settings = LoggingSettings()
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {"format": log_settings.log_format},
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": "%(levelprefix)s %(message)s",
"use_colors": None,
},
"access": {
"()": "uvicorn.logging.AccessFormatter",
"fmt": "%(levelprefix)s %(client_addr)s - '%(request_line)s' %(status_code)s",
},
},
"handlers": {
"console": {
"level": log_settings.log_level_handlers,
"class": "logging.StreamHandler",
"formatter": "verbose",
},
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
"access": {
"formatter": "access",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
},
"loggers": {
"": {
"handlers": log_settings.log_default_handlers,
"level": log_settings.log_level_loggers,
},
"uvicorn.error": {
"level": log_settings.log_level_loggers,
},
"uvicorn.access": {
"handlers": ["access"],
"level": log_settings.log_level_loggers,
"propagate": False,
},
},
"root": {
"level": log_settings.log_level_root,
"formatter": "verbose",
"handlers": log_settings.log_default_handlers,
},
}

View File

@@ -0,0 +1,18 @@
import pydantic
import pydantic_settings
import lib.app.split_settings as app_split_settings
class Settings(pydantic_settings.BaseSettings):
api: app_split_settings.ApiSettings = pydantic.Field(default_factory=lambda: app_split_settings.ApiSettings())
app: app_split_settings.AppSettings = pydantic.Field(default_factory=lambda: app_split_settings.AppSettings())
postgres: app_split_settings.PostgresSettings = pydantic.Field(
default_factory=lambda: app_split_settings.PostgresSettings()
)
logger: app_split_settings.LoggingSettings = pydantic.Field(
default_factory=lambda: app_split_settings.LoggingSettings()
)
project: app_split_settings.ProjectSettings = pydantic.Field(
default_factory=lambda: app_split_settings.ProjectSettings()
)

View File

@@ -0,0 +1,14 @@
from .api import *
from .app import *
from .logger import *
from .postgres import *
from .project import *
__all__ = [
"ApiSettings",
"AppSettings",
"LoggingSettings",
"PostgresSettings",
"ProjectSettings",
"get_logging_config",
]

View File

@@ -0,0 +1,15 @@
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class ApiSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="API_",
env_file_encoding="utf-8",
extra="ignore",
)
host: str = "0.0.0.0"
port: int = 8000

View File

@@ -0,0 +1,25 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class AppSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="APP_",
env_file_encoding="utf-8",
extra="ignore",
)
title: str = "FastAPI"
version: str = "0.1.0"
docs_url: str = "/api/openapi"
openapi_url: str = "/api/openapi.json"
reload: bool = False
@pydantic.field_validator("reload")
def validate_debug(cls, v: str | bool) -> bool:
if isinstance(v, bool):
return v
return v.lower() == "true"

View File

@@ -0,0 +1,79 @@
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class LoggingSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH, env_file_encoding="utf-8", extra="ignore"
)
log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
log_default_handlers: list[str] = [
"console",
]
log_level_handlers: str = "INFO"
log_level_loggers: str = "INFO"
log_level_root: str = "INFO"
def get_logging_config(
log_format: str,
log_default_handlers: list[str],
log_level_handlers: str,
log_level_loggers: str,
log_level_root: str,
):
return {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {"format": log_format},
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": "%(levelprefix)s %(message)s",
"use_colors": None,
},
"access": {
"()": "uvicorn.logging.AccessFormatter",
"fmt": "%(levelprefix)s %(client_addr)s - '%(request_line)s' %(status_code)s",
},
},
"handlers": {
"console": {
"level": log_level_handlers,
"class": "logging.StreamHandler",
"formatter": "verbose",
},
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
"access": {
"formatter": "access",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
},
"loggers": {
"": {
"handlers": log_default_handlers,
"level": log_level_loggers,
},
"uvicorn.error": {
"level": log_level_loggers,
},
"uvicorn.access": {
"handlers": ["access"],
"level": log_level_loggers,
"propagate": False,
},
},
"root": {
"level": log_level_root,
"formatter": "verbose",
"handlers": log_default_handlers,
},
}

View File

@@ -0,0 +1,44 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class PostgresSettings(pydantic_settings.BaseSettings):
"""Postgres settings."""
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="POSTGRES_",
env_file_encoding="utf-8",
extra="ignore",
)
# Connection settings
driver: str = "postgresql+asyncpg"
db_name: str = "database_name"
host: str = "localhost"
port: int = 5432
user: str = "app"
password: pydantic.SecretStr = pydantic.Field(
default=..., validation_alias=pydantic.AliasChoices("password", "postgres_password")
)
# Engine settings
pool_size: int = 50
pool_pre_ping: bool = True
echo: bool = False
# Session settings
auto_commit: bool = False
auto_flush: bool = False
expire_on_commit: bool = False
@property
def dsn(self) -> str:
password = self.password.get_secret_value()
return f"{self.driver}://{self.user}:{password}@{self.host}:{self.port}"
@property
def dsn_as_safe_url(self) -> str:
return f"{self.driver}://{self.user}:***@{self.host}:{self.port}"

View File

@@ -0,0 +1,22 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class ProjectSettings(pydantic_settings.BaseSettings):
"""Project settings."""
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_file_encoding="utf-8",
extra="ignore",
)
debug: str = "false"
jwt_secret_key: str = pydantic.Field(default=..., validation_alias="jwt_secret_key")
jwt_algorithm: str = "HS256"
@pydantic.field_validator("debug")
def validate_debug(cls, v: str) -> bool:
return v.lower() == "true"

View File

@@ -0,0 +1,4 @@
import pathlib
BASE_PATH = pathlib.Path(__file__).parent.parent.parent.parent.resolve()
ENV_PATH = BASE_PATH / ".env"

View File

@@ -0,0 +1,3 @@
from .postgres import AsyncPostgresClient
__all__ = ["AsyncPostgresClient"]

View File

@@ -0,0 +1,30 @@
import sqlalchemy.ext.asyncio as sa_asyncio
import lib.app.settings as app_settings
class AsyncPostgresClient:
"""Async Postgres Client that return sessionmaker."""
def __init__(self, settings: app_settings.Settings) -> None:
self.settings = settings.postgres
self.async_enging = sa_asyncio.create_async_engine(
url=self.settings.dsn,
pool_size=self.settings.pool_size,
pool_pre_ping=self.settings.pool_pre_ping,
echo=self.settings.echo,
future=True,
)
def get_async_session(self) -> sa_asyncio.async_sessionmaker[sa_asyncio.AsyncSession]:
async_session = sa_asyncio.async_sessionmaker(
bind=self.async_enging,
autocommit=self.settings.auto_commit,
autoflush=self.settings.auto_flush,
expire_on_commit=self.settings.expire_on_commit,
)
return async_session # noqa: RET504
async def dispose_callback(self) -> None:
await self.async_enging.dispose()

View File

@@ -0,0 +1,4 @@
from .orm import Base, IdCreatedUpdatedBaseMixin
from .token import Token
__all__ = ["Base", "IdCreatedUpdatedBaseMixin", "Token"]

View File

@@ -0,0 +1,3 @@
from .base import Base, IdCreatedUpdatedBaseMixin
__all__ = ["Base", "IdCreatedUpdatedBaseMixin"]

View File

@@ -0,0 +1,35 @@
import datetime
import uuid
import sqlalchemy
import sqlalchemy.dialects.postgresql
import sqlalchemy.ext.declarative
import sqlalchemy.orm as sa_orm
import sqlalchemy.sql as sa_sql
class Base(sa_orm.DeclarativeBase):
"""Base class for all models."""
@sqlalchemy.ext.declarative.declared_attr.directive
def __tablename__(cls):
return cls.__name__.lower()
__mapper_args__ = {"eager_defaults": True}
id: sa_orm.Mapped[uuid.UUID] = sa_orm.mapped_column(primary_key=True, default=uuid.uuid4)
class IdCreatedUpdatedBaseMixin:
# id: sa_orm.Mapped[int] = sa_orm.mapped_column(primary_key=True)
# id_field: sa_orm.Mapped[uuid.UUID] = sa_orm.mapped_column(name="uuid", primary_key=True, unique=True, default=uuid.uuid4, nullable=False)
created: sa_orm.Mapped[datetime.datetime] = sa_orm.mapped_column(server_default=sa_sql.func.now())
updated: sa_orm.Mapped[datetime.datetime] = sa_orm.mapped_column(
server_default=sa_sql.func.now(), onupdate=sa_sql.func.now()
)
# __mapper_args__ = {"eager_defaults": True}
# @sqlalchemy.ext.declarative.declared_attr.directive
# def __tablename__(cls) -> str:
# return cls.__name__.lower()

View File

@@ -0,0 +1,9 @@
import uuid
import pydantic
# TODO: TBU
class Token(pydantic.BaseModel):
sub: uuid.UUID
exp: int | None = None

View File

View File

@@ -0,0 +1,25 @@
import fastapi
import fastapi.security
import jose
import jose.jwt
import pydantic
import lib.app.settings as app_settings
import lib.models as models
def get_token_data(
authorization: fastapi.security.HTTPAuthorizationCredentials = fastapi.Security(fastapi.security.HTTPBearer()),
) -> models.Token:
settings = app_settings.Settings()
token = authorization.credentials
try:
secret_key = settings.project.jwt_secret_key
payload = jose.jwt.decode(token, secret_key, algorithms=[settings.project.jwt_algorithm])
return models.Token(**payload)
except (jose.JWTError, pydantic.ValidationError) as error:
raise fastapi.HTTPException(
status_code=fastapi.status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
) from error