mirror of
https://github.com/ijaric/voice_assistant.git
synced 2025-05-24 14:33:26 +00:00
Merge branch 'main' into features/#15_postgres_client
This commit is contained in:
commit
7279333da9
|
@ -10,3 +10,5 @@ API_HOST=0.0.0.0
|
|||
API_PORT=8000
|
||||
|
||||
JWT_SECRET_KEY=v9LctjUWwol4XbvczPiLFMDtZ8aal7mm
|
||||
|
||||
APP_RELOAD=True
|
||||
|
|
114
src/fastapi_app/alembic.ini
Normal file
114
src/fastapi_app/alembic.ini
Normal file
|
@ -0,0 +1,114 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
1
src/fastapi_app/alembic/README
Normal file
1
src/fastapi_app/alembic/README
Normal file
|
@ -0,0 +1 @@
|
|||
Generic single-database configuration with an async dbapi.
|
83
src/fastapi_app/alembic/env.py
Normal file
83
src/fastapi_app/alembic/env.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
import lib.app.settings as app_settings
|
||||
import lib.models as models
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
config.set_main_option("sqlalchemy.url", app_settings.settings.postgres.dsn)
|
||||
|
||||
target_metadata = models.Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
26
src/fastapi_app/alembic/script.py.mako
Normal file
26
src/fastapi_app/alembic/script.py.mako
Normal file
|
@ -0,0 +1,26 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
|
@ -1,17 +1,37 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
|
||||
import uvicorn
|
||||
|
||||
import lib.app.app as app_module
|
||||
import lib.app.settings as app_settings
|
||||
import lib.app as app
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
app_instance = app_module.Application()
|
||||
app = app_instance.create_app()
|
||||
settings = app_settings.settings
|
||||
async def run() -> None:
|
||||
settings = app.Settings()
|
||||
application = app.Application.from_settings(settings)
|
||||
|
||||
try:
|
||||
await application.start()
|
||||
finally:
|
||||
await application.dispose()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
try:
|
||||
asyncio.run(run())
|
||||
exit(os.EX_OK)
|
||||
except SystemExit:
|
||||
exit(os.EX_OK)
|
||||
except app.ApplicationError:
|
||||
exit(os.EX_SOFTWARE)
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Exited with keyboard interruption")
|
||||
exit(os.EX_OK)
|
||||
except BaseException:
|
||||
logger.exception("Unexpected error occurred")
|
||||
exit(os.EX_SOFTWARE)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host=settings.api.host, port=settings.api.port)
|
||||
main()
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
from .health import *
|
||||
|
||||
__all__ = [
|
||||
"health_router",
|
||||
]
|
5
src/fastapi_app/lib/api/v1/handlers/health/__init__.py
Normal file
5
src/fastapi_app/lib/api/v1/handlers/health/__init__.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
from .liveness_probe import router as health_router
|
||||
|
||||
__all__ = [
|
||||
"health_router",
|
||||
]
|
15
src/fastapi_app/lib/api/v1/handlers/health/liveness_probe.py
Normal file
15
src/fastapi_app/lib/api/v1/handlers/health/liveness_probe.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
import fastapi
|
||||
|
||||
import lib.api.v1.schemas as api_shemas
|
||||
|
||||
router = fastapi.APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/",
|
||||
response_model=api_shemas.HealthResponseModel,
|
||||
summary="Статус работоспособности",
|
||||
description="Проверяет доступность сервиса FastAPI.",
|
||||
)
|
||||
async def health():
|
||||
return api_shemas.HealthResponseModel(status="healthy")
|
|
@ -0,0 +1,6 @@
|
|||
from .base import *
|
||||
|
||||
__all__ = [
|
||||
"HealthResponseModel",
|
||||
"TokenResponseModel",
|
||||
]
|
12
src/fastapi_app/lib/api/v1/schemas/base.py
Normal file
12
src/fastapi_app/lib/api/v1/schemas/base.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
import uuid
|
||||
|
||||
import pydantic
|
||||
|
||||
|
||||
class TokenResponseModel(pydantic.BaseModel):
|
||||
sub: uuid.UUID
|
||||
exp: int | None = None
|
||||
|
||||
|
||||
class HealthResponseModel(pydantic.BaseModel):
|
||||
status: str = pydantic.Field(default=..., examples=["healthy"], description="Схема доступности сервиса")
|
|
@ -1,8 +0,0 @@
|
|||
import uuid
|
||||
|
||||
import pydantic
|
||||
|
||||
|
||||
class Token(pydantic.BaseModel):
|
||||
sub: uuid.UUID
|
||||
exp: int | None = None
|
|
@ -0,0 +1,11 @@
|
|||
from .app import Application
|
||||
from .errors import *
|
||||
from .settings import Settings
|
||||
|
||||
__all__ = [
|
||||
"Application",
|
||||
"Settings",
|
||||
"ApplicationError",
|
||||
"DisposeError",
|
||||
"StartServerError",
|
||||
]
|
|
@ -1,36 +1,123 @@
|
|||
import dataclasses
|
||||
import logging
|
||||
import logging.config as logging_config
|
||||
import typing
|
||||
|
||||
import fastapi
|
||||
import uvicorn
|
||||
|
||||
import lib.api.v1.handlers as api_v1_handlers
|
||||
import lib.app.errors as app_errors
|
||||
import lib.app.settings as app_settings
|
||||
import lib.app.split_settings as app_split_settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Application:
|
||||
def __init__(self) -> None:
|
||||
self.settings = app_settings
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.producer = None
|
||||
@dataclasses.dataclass
|
||||
class DisposableResource:
|
||||
name: str
|
||||
dispose_callback: typing.Awaitable[typing.Any]
|
||||
|
||||
def create_app(self) -> fastapi.FastAPI:
|
||||
app = fastapi.FastAPI(
|
||||
title="FastAPI",
|
||||
version="0.1.0",
|
||||
docs_url="/api/openapi",
|
||||
openapi_url="/api/openapi.json",
|
||||
|
||||
class Application:
|
||||
def __init__(
|
||||
self,
|
||||
settings: app_settings.Settings,
|
||||
fastapi_app: fastapi.FastAPI,
|
||||
disposable_resources: list[DisposableResource],
|
||||
) -> None:
|
||||
self._settings = settings
|
||||
self._fastapi_app = fastapi_app
|
||||
self._disposable_resources = disposable_resources
|
||||
|
||||
@classmethod
|
||||
def from_settings(cls, settings: app_settings.Settings) -> typing.Self:
|
||||
# Logging
|
||||
|
||||
logging_config.dictConfig(app_split_settings.get_logging_config(**settings.logger.model_dump()))
|
||||
|
||||
logger.info("Initializing application")
|
||||
disposable_resources = []
|
||||
|
||||
# Global clients
|
||||
|
||||
logger.info("Initializing global clients")
|
||||
|
||||
# Clients
|
||||
|
||||
logger.info("Initializing clients")
|
||||
|
||||
# Repositories
|
||||
|
||||
logger.info("Initializing repositories")
|
||||
|
||||
# Caches
|
||||
|
||||
logger.info("Initializing caches")
|
||||
|
||||
# Services
|
||||
|
||||
logger.info("Initializing services")
|
||||
|
||||
# Handlers
|
||||
|
||||
logger.info("Initializing handlers")
|
||||
# liveness_probe_handler = health_handlers.LivenessProbeHandler()
|
||||
|
||||
logger.info("Creating application")
|
||||
# aio_app = aiohttp_web.Application()
|
||||
|
||||
fastapi_app = fastapi.FastAPI(
|
||||
title=settings.app.title,
|
||||
version=settings.app.version,
|
||||
docs_url=settings.app.docs_url,
|
||||
openapi_url=settings.app.openapi_url,
|
||||
default_response_class=fastapi.responses.ORJSONResponse,
|
||||
)
|
||||
|
||||
# app.include_router(api_handlers.user_router, prefix="/api/v1/users", tags=["users"])
|
||||
# app.include_router(api_handlers.movie_router, prefix="/api/v1/movies", tags=["movies"])
|
||||
# Routes
|
||||
fastapi_app.include_router(api_v1_handlers.health_router, prefix="/api/v1/health", tags=["health"])
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
self.logger.info("Starting server")
|
||||
application = Application(
|
||||
settings=settings,
|
||||
fastapi_app=fastapi_app,
|
||||
disposable_resources=disposable_resources,
|
||||
)
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event():
|
||||
self.logger.info("Shutting down server")
|
||||
logger.info("Initializing application finished")
|
||||
|
||||
return app
|
||||
return application
|
||||
|
||||
async def start(self) -> None:
|
||||
try:
|
||||
config = uvicorn.Config(
|
||||
app=self._fastapi_app,
|
||||
host=self._settings.api.host,
|
||||
port=self._settings.api.port,
|
||||
)
|
||||
server = uvicorn.Server(config)
|
||||
await server.serve()
|
||||
except BaseException as unexpected_error:
|
||||
logger.exception("FastAPI failed to start")
|
||||
raise app_errors.StartServerError("FastAPI failed to start") from unexpected_error
|
||||
|
||||
async def dispose(self) -> None:
|
||||
logger.info("Application is shutting down...")
|
||||
dispose_errors = []
|
||||
|
||||
for resource in self._disposable_resources:
|
||||
logger.info("Disposing %s...", resource.name)
|
||||
try:
|
||||
await resource.dispose_callback
|
||||
except Exception as unexpected_error:
|
||||
dispose_errors.append(unexpected_error)
|
||||
logger.exception("Failed to dispose %s", resource.name)
|
||||
else:
|
||||
logger.info("%s has been disposed", resource.name)
|
||||
|
||||
if len(dispose_errors) != 0:
|
||||
logger.error("Application has shut down with errors")
|
||||
raise app_errors.DisposeError("Application has shut down with errors, see logs above")
|
||||
|
||||
logger.info("Application has successfully shut down")
|
||||
|
|
22
src/fastapi_app/lib/app/errors.py
Normal file
22
src/fastapi_app/lib/app/errors.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
import typing
|
||||
|
||||
|
||||
class ApplicationError(Exception):
|
||||
def __init__(self, message: str, *args: typing.Any) -> None:
|
||||
super().__init__(*args)
|
||||
self.message = message
|
||||
|
||||
|
||||
class DisposeError(ApplicationError):
|
||||
pass
|
||||
|
||||
|
||||
class StartServerError(ApplicationError):
|
||||
pass
|
||||
|
||||
|
||||
__all__ = [
|
||||
"ApplicationError",
|
||||
"DisposeError",
|
||||
"StartServerError",
|
||||
]
|
|
@ -1,5 +1,3 @@
|
|||
import logging.config as logging_config
|
||||
|
||||
import pydantic
|
||||
import pydantic_settings
|
||||
|
||||
|
@ -8,6 +6,7 @@ import lib.app.split_settings as app_split_settings
|
|||
|
||||
class Settings(pydantic_settings.BaseSettings):
|
||||
api: app_split_settings.ApiSettings = pydantic.Field(default_factory=lambda: app_split_settings.ApiSettings())
|
||||
app: app_split_settings.AppSettings = pydantic.Field(default_factory=lambda: app_split_settings.AppSettings())
|
||||
postgres: app_split_settings.PostgresSettings = pydantic.Field(
|
||||
default_factory=lambda: app_split_settings.PostgresSettings()
|
||||
)
|
||||
|
@ -17,10 +16,3 @@ class Settings(pydantic_settings.BaseSettings):
|
|||
project: app_split_settings.ProjectSettings = pydantic.Field(
|
||||
default_factory=lambda: app_split_settings.ProjectSettings()
|
||||
)
|
||||
|
||||
|
||||
settings = Settings() # todo Вынести в инициализацию
|
||||
|
||||
logging_config.dictConfig( # todo Вынести в инициализацию
|
||||
app_split_settings.get_logging_config(**settings.logger.model_dump())
|
||||
)
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
from .api import *
|
||||
from .app import *
|
||||
from .logger import *
|
||||
from .postgres import *
|
||||
from .project import *
|
||||
|
||||
__all__ = [
|
||||
"ApiSettings",
|
||||
"AppSettings",
|
||||
"LoggingSettings",
|
||||
"PostgresSettings",
|
||||
"ProjectSettings",
|
||||
|
|
25
src/fastapi_app/lib/app/split_settings/app.py
Normal file
25
src/fastapi_app/lib/app/split_settings/app.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
import pydantic
|
||||
import pydantic_settings
|
||||
|
||||
import lib.app.split_settings.utils as app_split_settings_utils
|
||||
|
||||
|
||||
class AppSettings(pydantic_settings.BaseSettings):
|
||||
model_config = pydantic_settings.SettingsConfigDict(
|
||||
env_file=app_split_settings_utils.ENV_PATH,
|
||||
env_prefix="APP_",
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
title: str = "FastAPI"
|
||||
version: str = "0.1.0"
|
||||
docs_url: str = "/api/openapi"
|
||||
openapi_url: str = "/api/openapi.json"
|
||||
reload: bool = False
|
||||
|
||||
@pydantic.field_validator("reload")
|
||||
def validate_debug(cls, v: str | bool) -> bool:
|
||||
if isinstance(v, bool):
|
||||
return v
|
||||
return v.lower() == "true"
|
Loading…
Reference in New Issue
Block a user