mirror of
https://github.com/ijaric/voice_assistant.git
synced 2026-03-23 01:03:45 +00:00
feat: new file structure
This commit is contained in:
2
src/assistant/.dockerignore
Normal file
2
src/assistant/.dockerignore
Normal file
@@ -0,0 +1,2 @@
|
||||
.venv
|
||||
.env
|
||||
15
src/assistant/.env.example
Normal file
15
src/assistant/.env.example
Normal file
@@ -0,0 +1,15 @@
|
||||
POSTGRES_DRIVER=postgresql+asyncpg
|
||||
POSTGRES_HOST=db
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_USER=user
|
||||
POSTGRES_PASSWORD=Qwe123
|
||||
POSTGRES_DB_NAME=api_db
|
||||
|
||||
NGINX_PORT=80
|
||||
API_HOST=0.0.0.0
|
||||
API_PORT=8000
|
||||
|
||||
JWT_SECRET_KEY=v9LctjUWwol4XbvczPiLFMDtZ8aal7mm
|
||||
JWT_ALGORITHM=HS256
|
||||
|
||||
APP_RELOAD=True
|
||||
23
src/assistant/Dockerfile
Normal file
23
src/assistant/Dockerfile
Normal file
@@ -0,0 +1,23 @@
|
||||
FROM python:3.11
|
||||
|
||||
RUN apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive \
|
||||
&& apt-get install -y net-tools netcat-traditional curl \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir --parents /opt/app
|
||||
COPY pyproject.toml /opt/app/pyproject.toml
|
||||
COPY poetry.lock /opt/app/poetry.lock
|
||||
COPY poetry.toml /opt/app/poetry.toml
|
||||
|
||||
WORKDIR /opt/app
|
||||
|
||||
RUN pip install poetry \
|
||||
&& poetry install --no-dev
|
||||
|
||||
COPY bin /opt/app/bin
|
||||
COPY lib /opt/app/lib
|
||||
COPY entrypoint.sh /opt/app/entrypoint.sh
|
||||
|
||||
RUN chmod +x /opt/app/entrypoint.sh
|
||||
3
src/assistant/Makefile
Normal file
3
src/assistant/Makefile
Normal file
@@ -0,0 +1,3 @@
|
||||
include ../../common_makefile.mk
|
||||
|
||||
PROJECT_FOLDERS = bin lib tests
|
||||
6
src/assistant/README.md
Normal file
6
src/assistant/README.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# Voice Assitant
|
||||
|
||||
## Install
|
||||
|
||||
1. `make init`
|
||||
2. `make all-init``
|
||||
0
src/assistant/__init__.py
Normal file
0
src/assistant/__init__.py
Normal file
114
src/assistant/alembic.ini
Normal file
114
src/assistant/alembic.ini
Normal file
@@ -0,0 +1,114 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
1
src/assistant/alembic/README
Normal file
1
src/assistant/alembic/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration with an async dbapi.
|
||||
83
src/assistant/alembic/env.py
Normal file
83
src/assistant/alembic/env.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
import lib.app.settings as app_settings
|
||||
import lib.models as models
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
config.set_main_option("sqlalchemy.url", app_settings.Settings().postgres.dsn)
|
||||
|
||||
target_metadata = models.Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
src/assistant/alembic/script.py.mako
Normal file
26
src/assistant/alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,37 @@
|
||||
"""Added initial table
|
||||
|
||||
Revision ID: 9749b063b095
|
||||
Revises:
|
||||
Create Date: 2023-10-02 19:46:05.078494
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "9749b063b095"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"joke",
|
||||
sa.Column("type", sa.String(), nullable=False),
|
||||
sa.Column("setup", sa.String(), nullable=False),
|
||||
sa.Column("punchline", sa.String(), nullable=False),
|
||||
sa.Column("id", sa.Uuid(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("joke")
|
||||
# ### end Alembic commands ###
|
||||
0
src/assistant/bin/__init__.py
Normal file
0
src/assistant/bin/__init__.py
Normal file
37
src/assistant/bin/__main__.py
Normal file
37
src/assistant/bin/__main__.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
|
||||
import lib.app as app
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def run() -> None:
|
||||
settings = app.Settings()
|
||||
application = app.Application.from_settings(settings)
|
||||
|
||||
try:
|
||||
await application.start()
|
||||
finally:
|
||||
await application.dispose()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
try:
|
||||
asyncio.run(run())
|
||||
exit(os.EX_OK)
|
||||
except SystemExit:
|
||||
exit(os.EX_OK)
|
||||
except app.ApplicationError:
|
||||
exit(os.EX_SOFTWARE)
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Exited with keyboard interruption")
|
||||
exit(os.EX_OK)
|
||||
except BaseException:
|
||||
logger.exception("Unexpected error occurred")
|
||||
exit(os.EX_SOFTWARE)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
58
src/assistant/docker-compose.dev.yml
Normal file
58
src/assistant/docker-compose.dev.yml
Normal file
@@ -0,0 +1,58 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15.2
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
POSTGRES_DB: ${POSTGRES_NAME}
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- "${POSTGRES_PORT}:${POSTGRES_PORT}"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data/
|
||||
networks:
|
||||
- backend_network
|
||||
|
||||
api:
|
||||
build:
|
||||
context: .
|
||||
container_name: fastapi_app
|
||||
image: fastapi_app
|
||||
restart: always
|
||||
entrypoint: ["/opt/app/entrypoint.sh"]
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- "${API_PORT}:${API_PORT}"
|
||||
depends_on:
|
||||
- postgres
|
||||
networks:
|
||||
- backend_network
|
||||
- api_network
|
||||
|
||||
nginx:
|
||||
image: nginx:1.25.1
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- "${NGINX_PORT}:${NGINX_PORT}"
|
||||
volumes:
|
||||
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
|
||||
- ./nginx/templates:/etc/nginx/templates
|
||||
depends_on:
|
||||
- api
|
||||
networks:
|
||||
- api_network
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
|
||||
networks:
|
||||
api_network:
|
||||
driver: bridge
|
||||
backend_network:
|
||||
driver: bridge
|
||||
58
src/assistant/docker-compose.yml
Normal file
58
src/assistant/docker-compose.yml
Normal file
@@ -0,0 +1,58 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15.2
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
POSTGRES_DB: ${POSTGRES_NAME}
|
||||
env_file:
|
||||
- .env
|
||||
expose:
|
||||
- "${POSTGRES_PORT}"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data/
|
||||
networks:
|
||||
- backend_network
|
||||
|
||||
api:
|
||||
build:
|
||||
context: .
|
||||
container_name: api
|
||||
image: fastapi_app
|
||||
restart: always
|
||||
entrypoint: ["/opt/app/entrypoint.sh"]
|
||||
env_file:
|
||||
- .env
|
||||
expose:
|
||||
- "${API_PORT}"
|
||||
depends_on:
|
||||
- postgres
|
||||
networks:
|
||||
- backend_network
|
||||
- api_network
|
||||
|
||||
nginx:
|
||||
image: nginx:1.25.1
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- "${NGINX_PORT}:${NGINX_PORT}"
|
||||
volumes:
|
||||
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
|
||||
- ./nginx/templates:/etc/nginx/templates
|
||||
depends_on:
|
||||
- api
|
||||
networks:
|
||||
- api_network
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
|
||||
networks:
|
||||
api_network:
|
||||
driver: bridge
|
||||
backend_network:
|
||||
driver: bridge
|
||||
5
src/assistant/entrypoint.sh
Normal file
5
src/assistant/entrypoint.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
while ! nc -z postgres 5432; do sleep 1; done;
|
||||
|
||||
exec .venv/bin/python -m bin
|
||||
0
src/assistant/lib/__init__.py
Normal file
0
src/assistant/lib/__init__.py
Normal file
0
src/assistant/lib/api/__init__.py
Normal file
0
src/assistant/lib/api/__init__.py
Normal file
0
src/assistant/lib/api/v1/__init__.py
Normal file
0
src/assistant/lib/api/v1/__init__.py
Normal file
3
src/assistant/lib/api/v1/handlers/__init__.py
Normal file
3
src/assistant/lib/api/v1/handlers/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .health import basic_router
|
||||
|
||||
__all__ = ["basic_router"]
|
||||
5
src/assistant/lib/api/v1/handlers/health/__init__.py
Normal file
5
src/assistant/lib/api/v1/handlers/health/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from .liveness_probe import basic_router
|
||||
|
||||
__all__ = [
|
||||
"basic_router",
|
||||
]
|
||||
15
src/assistant/lib/api/v1/handlers/health/liveness_probe.py
Normal file
15
src/assistant/lib/api/v1/handlers/health/liveness_probe.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import fastapi
|
||||
|
||||
import lib.api.v1.schemas as api_shemas
|
||||
|
||||
basic_router = fastapi.APIRouter()
|
||||
|
||||
|
||||
@basic_router.get(
|
||||
"/",
|
||||
response_model=api_shemas.HealthResponse,
|
||||
summary="Статус работоспособности",
|
||||
description="Проверяет доступность сервиса FastAPI.",
|
||||
)
|
||||
async def health():
|
||||
return api_shemas.HealthResponse(status="healthy")
|
||||
3
src/assistant/lib/api/v1/schemas/__init__.py
Normal file
3
src/assistant/lib/api/v1/schemas/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .base import HealthResponse
|
||||
|
||||
__all__ = ["HealthResponse"]
|
||||
5
src/assistant/lib/api/v1/schemas/base.py
Normal file
5
src/assistant/lib/api/v1/schemas/base.py
Normal file
@@ -0,0 +1,5 @@
|
||||
import pydantic
|
||||
|
||||
|
||||
class HealthResponse(pydantic.BaseModel):
|
||||
status: str = pydantic.Field(default=..., examples=["healthy"], description="Схема доступности сервиса")
|
||||
11
src/assistant/lib/app/__init__.py
Normal file
11
src/assistant/lib/app/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from .app import Application
|
||||
from .errors import *
|
||||
from .settings import Settings
|
||||
|
||||
__all__ = [
|
||||
"Application",
|
||||
"ApplicationError",
|
||||
"DisposeError",
|
||||
"Settings",
|
||||
"StartServerError",
|
||||
]
|
||||
131
src/assistant/lib/app/app.py
Normal file
131
src/assistant/lib/app/app.py
Normal file
@@ -0,0 +1,131 @@
|
||||
import dataclasses
|
||||
import logging
|
||||
import logging.config as logging_config
|
||||
import typing
|
||||
|
||||
import fastapi
|
||||
import uvicorn
|
||||
|
||||
import lib.api.v1.handlers as api_v1_handlers
|
||||
import lib.app.errors as app_errors
|
||||
import lib.app.settings as app_settings
|
||||
import lib.app.split_settings as app_split_settings
|
||||
import lib.clients as clients
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class DisposableResource:
|
||||
name: str
|
||||
dispose_callback: typing.Awaitable[typing.Any]
|
||||
|
||||
|
||||
class Application:
|
||||
def __init__(
|
||||
self,
|
||||
settings: app_settings.Settings,
|
||||
fastapi_app: fastapi.FastAPI,
|
||||
disposable_resources: list[DisposableResource],
|
||||
) -> None:
|
||||
self._settings = settings
|
||||
self._fastapi_app = fastapi_app
|
||||
self._disposable_resources = disposable_resources
|
||||
|
||||
@classmethod
|
||||
def from_settings(cls, settings: app_settings.Settings) -> typing.Self:
|
||||
# Logging
|
||||
|
||||
logging_config.dictConfig(app_split_settings.get_logging_config(**settings.logger.model_dump()))
|
||||
|
||||
logger.info("Initializing application")
|
||||
disposable_resources = []
|
||||
|
||||
# Global clients
|
||||
|
||||
logger.info("Initializing global clients")
|
||||
postgres_client = clients.AsyncPostgresClient(settings=settings)
|
||||
|
||||
disposable_resources.append(
|
||||
DisposableResource(
|
||||
name="postgres_client",
|
||||
dispose_callback=postgres_client.dispose_callback(),
|
||||
)
|
||||
)
|
||||
|
||||
# Clients
|
||||
|
||||
logger.info("Initializing clients")
|
||||
|
||||
# Repositories
|
||||
|
||||
logger.info("Initializing repositories")
|
||||
|
||||
# Caches
|
||||
|
||||
logger.info("Initializing caches")
|
||||
|
||||
# Services
|
||||
|
||||
logger.info("Initializing services")
|
||||
|
||||
# Handlers
|
||||
|
||||
logger.info("Initializing handlers")
|
||||
liveness_probe_handler = api_v1_handlers.basic_router
|
||||
|
||||
logger.info("Creating application")
|
||||
|
||||
fastapi_app = fastapi.FastAPI(
|
||||
title=settings.app.title,
|
||||
version=settings.app.version,
|
||||
docs_url=settings.app.docs_url,
|
||||
openapi_url=settings.app.openapi_url,
|
||||
default_response_class=fastapi.responses.ORJSONResponse,
|
||||
)
|
||||
|
||||
# Routes
|
||||
fastapi_app.include_router(liveness_probe_handler, prefix="/api/v1/health", tags=["health"])
|
||||
|
||||
application = Application(
|
||||
settings=settings,
|
||||
fastapi_app=fastapi_app,
|
||||
disposable_resources=disposable_resources,
|
||||
)
|
||||
|
||||
logger.info("Initializing application finished")
|
||||
|
||||
return application
|
||||
|
||||
async def start(self) -> None:
|
||||
try:
|
||||
config = uvicorn.Config(
|
||||
app=self._fastapi_app,
|
||||
host=self._settings.api.host,
|
||||
port=self._settings.api.port,
|
||||
)
|
||||
server = uvicorn.Server(config)
|
||||
await server.serve()
|
||||
except BaseException as unexpected_error:
|
||||
logger.exception("FastAPI failed to start")
|
||||
raise app_errors.StartServerError("FastAPI failed to start") from unexpected_error
|
||||
|
||||
async def dispose(self) -> None:
|
||||
logger.info("Application is shutting down...")
|
||||
dispose_errors = []
|
||||
|
||||
for resource in self._disposable_resources:
|
||||
logger.info("Disposing %s...", resource.name)
|
||||
try:
|
||||
await resource.dispose_callback
|
||||
except Exception as unexpected_error:
|
||||
dispose_errors.append(unexpected_error)
|
||||
logger.exception("Failed to dispose %s", resource.name)
|
||||
else:
|
||||
logger.info("%s has been disposed", resource.name)
|
||||
|
||||
if len(dispose_errors) != 0:
|
||||
logger.error("Application has shut down with errors")
|
||||
raise app_errors.DisposeError("Application has shut down with errors, see logs above")
|
||||
|
||||
logger.info("Application has successfully shut down")
|
||||
22
src/assistant/lib/app/errors.py
Normal file
22
src/assistant/lib/app/errors.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import typing
|
||||
|
||||
|
||||
class ApplicationError(Exception):
|
||||
def __init__(self, message: str, *args: typing.Any) -> None:
|
||||
super().__init__(*args)
|
||||
self.message = message
|
||||
|
||||
|
||||
class DisposeError(ApplicationError):
|
||||
pass
|
||||
|
||||
|
||||
class StartServerError(ApplicationError):
|
||||
pass
|
||||
|
||||
|
||||
__all__ = [
|
||||
"ApplicationError",
|
||||
"DisposeError",
|
||||
"StartServerError",
|
||||
]
|
||||
69
src/assistant/lib/app/logger.py
Normal file
69
src/assistant/lib/app/logger.py
Normal file
@@ -0,0 +1,69 @@
|
||||
import pydantic_settings
|
||||
|
||||
|
||||
class LoggingSettings(pydantic_settings.BaseSettings):
|
||||
log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
log_default_handlers: list[str] = [
|
||||
"console",
|
||||
]
|
||||
|
||||
log_level_handlers: str = "DEBUG"
|
||||
log_level_loggers: str = "INFO"
|
||||
log_level_root: str = "INFO"
|
||||
|
||||
|
||||
log_settings = LoggingSettings()
|
||||
|
||||
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"verbose": {"format": log_settings.log_format},
|
||||
"default": {
|
||||
"()": "uvicorn.logging.DefaultFormatter",
|
||||
"fmt": "%(levelprefix)s %(message)s",
|
||||
"use_colors": None,
|
||||
},
|
||||
"access": {
|
||||
"()": "uvicorn.logging.AccessFormatter",
|
||||
"fmt": "%(levelprefix)s %(client_addr)s - '%(request_line)s' %(status_code)s",
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"level": log_settings.log_level_handlers,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "verbose",
|
||||
},
|
||||
"default": {
|
||||
"formatter": "default",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stdout",
|
||||
},
|
||||
"access": {
|
||||
"formatter": "access",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stdout",
|
||||
},
|
||||
},
|
||||
"loggers": {
|
||||
"": {
|
||||
"handlers": log_settings.log_default_handlers,
|
||||
"level": log_settings.log_level_loggers,
|
||||
},
|
||||
"uvicorn.error": {
|
||||
"level": log_settings.log_level_loggers,
|
||||
},
|
||||
"uvicorn.access": {
|
||||
"handlers": ["access"],
|
||||
"level": log_settings.log_level_loggers,
|
||||
"propagate": False,
|
||||
},
|
||||
},
|
||||
"root": {
|
||||
"level": log_settings.log_level_root,
|
||||
"formatter": "verbose",
|
||||
"handlers": log_settings.log_default_handlers,
|
||||
},
|
||||
}
|
||||
18
src/assistant/lib/app/settings.py
Normal file
18
src/assistant/lib/app/settings.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import pydantic
|
||||
import pydantic_settings
|
||||
|
||||
import lib.app.split_settings as app_split_settings
|
||||
|
||||
|
||||
class Settings(pydantic_settings.BaseSettings):
|
||||
api: app_split_settings.ApiSettings = pydantic.Field(default_factory=lambda: app_split_settings.ApiSettings())
|
||||
app: app_split_settings.AppSettings = pydantic.Field(default_factory=lambda: app_split_settings.AppSettings())
|
||||
postgres: app_split_settings.PostgresSettings = pydantic.Field(
|
||||
default_factory=lambda: app_split_settings.PostgresSettings()
|
||||
)
|
||||
logger: app_split_settings.LoggingSettings = pydantic.Field(
|
||||
default_factory=lambda: app_split_settings.LoggingSettings()
|
||||
)
|
||||
project: app_split_settings.ProjectSettings = pydantic.Field(
|
||||
default_factory=lambda: app_split_settings.ProjectSettings()
|
||||
)
|
||||
14
src/assistant/lib/app/split_settings/__init__.py
Normal file
14
src/assistant/lib/app/split_settings/__init__.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from .api import *
|
||||
from .app import *
|
||||
from .logger import *
|
||||
from .postgres import *
|
||||
from .project import *
|
||||
|
||||
__all__ = [
|
||||
"ApiSettings",
|
||||
"AppSettings",
|
||||
"LoggingSettings",
|
||||
"PostgresSettings",
|
||||
"ProjectSettings",
|
||||
"get_logging_config",
|
||||
]
|
||||
15
src/assistant/lib/app/split_settings/api.py
Normal file
15
src/assistant/lib/app/split_settings/api.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import pydantic_settings
|
||||
|
||||
import lib.app.split_settings.utils as app_split_settings_utils
|
||||
|
||||
|
||||
class ApiSettings(pydantic_settings.BaseSettings):
|
||||
model_config = pydantic_settings.SettingsConfigDict(
|
||||
env_file=app_split_settings_utils.ENV_PATH,
|
||||
env_prefix="API_",
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
host: str = "0.0.0.0"
|
||||
port: int = 8000
|
||||
25
src/assistant/lib/app/split_settings/app.py
Normal file
25
src/assistant/lib/app/split_settings/app.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import pydantic
|
||||
import pydantic_settings
|
||||
|
||||
import lib.app.split_settings.utils as app_split_settings_utils
|
||||
|
||||
|
||||
class AppSettings(pydantic_settings.BaseSettings):
|
||||
model_config = pydantic_settings.SettingsConfigDict(
|
||||
env_file=app_split_settings_utils.ENV_PATH,
|
||||
env_prefix="APP_",
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
title: str = "FastAPI"
|
||||
version: str = "0.1.0"
|
||||
docs_url: str = "/api/openapi"
|
||||
openapi_url: str = "/api/openapi.json"
|
||||
reload: bool = False
|
||||
|
||||
@pydantic.field_validator("reload")
|
||||
def validate_debug(cls, v: str | bool) -> bool:
|
||||
if isinstance(v, bool):
|
||||
return v
|
||||
return v.lower() == "true"
|
||||
79
src/assistant/lib/app/split_settings/logger.py
Normal file
79
src/assistant/lib/app/split_settings/logger.py
Normal file
@@ -0,0 +1,79 @@
|
||||
import pydantic_settings
|
||||
|
||||
import lib.app.split_settings.utils as app_split_settings_utils
|
||||
|
||||
|
||||
class LoggingSettings(pydantic_settings.BaseSettings):
|
||||
model_config = pydantic_settings.SettingsConfigDict(
|
||||
env_file=app_split_settings_utils.ENV_PATH, env_file_encoding="utf-8", extra="ignore"
|
||||
)
|
||||
|
||||
log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
log_default_handlers: list[str] = [
|
||||
"console",
|
||||
]
|
||||
|
||||
log_level_handlers: str = "INFO"
|
||||
log_level_loggers: str = "INFO"
|
||||
log_level_root: str = "INFO"
|
||||
|
||||
|
||||
def get_logging_config(
|
||||
log_format: str,
|
||||
log_default_handlers: list[str],
|
||||
log_level_handlers: str,
|
||||
log_level_loggers: str,
|
||||
log_level_root: str,
|
||||
):
|
||||
return {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"verbose": {"format": log_format},
|
||||
"default": {
|
||||
"()": "uvicorn.logging.DefaultFormatter",
|
||||
"fmt": "%(levelprefix)s %(message)s",
|
||||
"use_colors": None,
|
||||
},
|
||||
"access": {
|
||||
"()": "uvicorn.logging.AccessFormatter",
|
||||
"fmt": "%(levelprefix)s %(client_addr)s - '%(request_line)s' %(status_code)s",
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"level": log_level_handlers,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "verbose",
|
||||
},
|
||||
"default": {
|
||||
"formatter": "default",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stdout",
|
||||
},
|
||||
"access": {
|
||||
"formatter": "access",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stdout",
|
||||
},
|
||||
},
|
||||
"loggers": {
|
||||
"": {
|
||||
"handlers": log_default_handlers,
|
||||
"level": log_level_loggers,
|
||||
},
|
||||
"uvicorn.error": {
|
||||
"level": log_level_loggers,
|
||||
},
|
||||
"uvicorn.access": {
|
||||
"handlers": ["access"],
|
||||
"level": log_level_loggers,
|
||||
"propagate": False,
|
||||
},
|
||||
},
|
||||
"root": {
|
||||
"level": log_level_root,
|
||||
"formatter": "verbose",
|
||||
"handlers": log_default_handlers,
|
||||
},
|
||||
}
|
||||
44
src/assistant/lib/app/split_settings/postgres.py
Normal file
44
src/assistant/lib/app/split_settings/postgres.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import pydantic
|
||||
import pydantic_settings
|
||||
|
||||
import lib.app.split_settings.utils as app_split_settings_utils
|
||||
|
||||
|
||||
class PostgresSettings(pydantic_settings.BaseSettings):
|
||||
"""Postgres settings."""
|
||||
|
||||
model_config = pydantic_settings.SettingsConfigDict(
|
||||
env_file=app_split_settings_utils.ENV_PATH,
|
||||
env_prefix="POSTGRES_",
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
# Connection settings
|
||||
driver: str = "postgresql+asyncpg"
|
||||
db_name: str = "database_name"
|
||||
host: str = "localhost"
|
||||
port: int = 5432
|
||||
user: str = "app"
|
||||
password: pydantic.SecretStr = pydantic.Field(
|
||||
default=..., validation_alias=pydantic.AliasChoices("password", "postgres_password")
|
||||
)
|
||||
|
||||
# Engine settings
|
||||
pool_size: int = 50
|
||||
pool_pre_ping: bool = True
|
||||
echo: bool = False
|
||||
|
||||
# Session settings
|
||||
auto_commit: bool = False
|
||||
auto_flush: bool = False
|
||||
expire_on_commit: bool = False
|
||||
|
||||
@property
|
||||
def dsn(self) -> str:
|
||||
password = self.password.get_secret_value()
|
||||
return f"{self.driver}://{self.user}:{password}@{self.host}:{self.port}"
|
||||
|
||||
@property
|
||||
def dsn_as_safe_url(self) -> str:
|
||||
return f"{self.driver}://{self.user}:***@{self.host}:{self.port}"
|
||||
22
src/assistant/lib/app/split_settings/project.py
Normal file
22
src/assistant/lib/app/split_settings/project.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import pydantic
|
||||
import pydantic_settings
|
||||
|
||||
import lib.app.split_settings.utils as app_split_settings_utils
|
||||
|
||||
|
||||
class ProjectSettings(pydantic_settings.BaseSettings):
|
||||
"""Project settings."""
|
||||
|
||||
model_config = pydantic_settings.SettingsConfigDict(
|
||||
env_file=app_split_settings_utils.ENV_PATH,
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
debug: str = "false"
|
||||
jwt_secret_key: str = pydantic.Field(default=..., validation_alias="jwt_secret_key")
|
||||
jwt_algorithm: str = "HS256"
|
||||
|
||||
@pydantic.field_validator("debug")
|
||||
def validate_debug(cls, v: str) -> bool:
|
||||
return v.lower() == "true"
|
||||
4
src/assistant/lib/app/split_settings/utils.py
Normal file
4
src/assistant/lib/app/split_settings/utils.py
Normal file
@@ -0,0 +1,4 @@
|
||||
import pathlib
|
||||
|
||||
BASE_PATH = pathlib.Path(__file__).parent.parent.parent.parent.resolve()
|
||||
ENV_PATH = BASE_PATH / ".env"
|
||||
3
src/assistant/lib/clients/__init__.py
Normal file
3
src/assistant/lib/clients/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .postgres import AsyncPostgresClient
|
||||
|
||||
__all__ = ["AsyncPostgresClient"]
|
||||
30
src/assistant/lib/clients/postgres.py
Normal file
30
src/assistant/lib/clients/postgres.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import sqlalchemy.ext.asyncio as sa_asyncio
|
||||
|
||||
import lib.app.settings as app_settings
|
||||
|
||||
|
||||
class AsyncPostgresClient:
|
||||
"""Async Postgres Client that return sessionmaker."""
|
||||
|
||||
def __init__(self, settings: app_settings.Settings) -> None:
|
||||
self.settings = settings.postgres
|
||||
self.async_enging = sa_asyncio.create_async_engine(
|
||||
url=self.settings.dsn,
|
||||
pool_size=self.settings.pool_size,
|
||||
pool_pre_ping=self.settings.pool_pre_ping,
|
||||
echo=self.settings.echo,
|
||||
future=True,
|
||||
)
|
||||
|
||||
def get_async_session(self) -> sa_asyncio.async_sessionmaker[sa_asyncio.AsyncSession]:
|
||||
async_session = sa_asyncio.async_sessionmaker(
|
||||
bind=self.async_enging,
|
||||
autocommit=self.settings.auto_commit,
|
||||
autoflush=self.settings.auto_flush,
|
||||
expire_on_commit=self.settings.expire_on_commit,
|
||||
)
|
||||
|
||||
return async_session # noqa: RET504
|
||||
|
||||
async def dispose_callback(self) -> None:
|
||||
await self.async_enging.dispose()
|
||||
4
src/assistant/lib/models/__init__.py
Normal file
4
src/assistant/lib/models/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from .orm import Base, IdCreatedUpdatedBaseMixin
|
||||
from .token import Token
|
||||
|
||||
__all__ = ["Base", "IdCreatedUpdatedBaseMixin", "Token"]
|
||||
3
src/assistant/lib/models/orm/__init__.py
Normal file
3
src/assistant/lib/models/orm/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .base import Base, IdCreatedUpdatedBaseMixin
|
||||
|
||||
__all__ = ["Base", "IdCreatedUpdatedBaseMixin"]
|
||||
35
src/assistant/lib/models/orm/base.py
Normal file
35
src/assistant/lib/models/orm/base.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
import sqlalchemy
|
||||
import sqlalchemy.dialects.postgresql
|
||||
import sqlalchemy.ext.declarative
|
||||
import sqlalchemy.orm as sa_orm
|
||||
import sqlalchemy.sql as sa_sql
|
||||
|
||||
|
||||
class Base(sa_orm.DeclarativeBase):
|
||||
"""Base class for all models."""
|
||||
|
||||
@sqlalchemy.ext.declarative.declared_attr.directive
|
||||
def __tablename__(cls):
|
||||
return cls.__name__.lower()
|
||||
|
||||
__mapper_args__ = {"eager_defaults": True}
|
||||
|
||||
id: sa_orm.Mapped[uuid.UUID] = sa_orm.mapped_column(primary_key=True, default=uuid.uuid4)
|
||||
|
||||
|
||||
class IdCreatedUpdatedBaseMixin:
|
||||
# id: sa_orm.Mapped[int] = sa_orm.mapped_column(primary_key=True)
|
||||
# id_field: sa_orm.Mapped[uuid.UUID] = sa_orm.mapped_column(name="uuid", primary_key=True, unique=True, default=uuid.uuid4, nullable=False)
|
||||
created: sa_orm.Mapped[datetime.datetime] = sa_orm.mapped_column(server_default=sa_sql.func.now())
|
||||
updated: sa_orm.Mapped[datetime.datetime] = sa_orm.mapped_column(
|
||||
server_default=sa_sql.func.now(), onupdate=sa_sql.func.now()
|
||||
)
|
||||
|
||||
# __mapper_args__ = {"eager_defaults": True}
|
||||
|
||||
# @sqlalchemy.ext.declarative.declared_attr.directive
|
||||
# def __tablename__(cls) -> str:
|
||||
# return cls.__name__.lower()
|
||||
9
src/assistant/lib/models/token.py
Normal file
9
src/assistant/lib/models/token.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import uuid
|
||||
|
||||
import pydantic
|
||||
|
||||
|
||||
# TODO: TBU
|
||||
class Token(pydantic.BaseModel):
|
||||
sub: uuid.UUID
|
||||
exp: int | None = None
|
||||
0
src/assistant/lib/utils/__init__.py
Normal file
0
src/assistant/lib/utils/__init__.py
Normal file
25
src/assistant/lib/utils/token.py
Normal file
25
src/assistant/lib/utils/token.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import fastapi
|
||||
import fastapi.security
|
||||
import jose
|
||||
import jose.jwt
|
||||
import pydantic
|
||||
|
||||
import lib.app.settings as app_settings
|
||||
import lib.models as models
|
||||
|
||||
|
||||
def get_token_data(
|
||||
authorization: fastapi.security.HTTPAuthorizationCredentials = fastapi.Security(fastapi.security.HTTPBearer()),
|
||||
) -> models.Token:
|
||||
settings = app_settings.Settings()
|
||||
|
||||
token = authorization.credentials
|
||||
try:
|
||||
secret_key = settings.project.jwt_secret_key
|
||||
payload = jose.jwt.decode(token, secret_key, algorithms=[settings.project.jwt_algorithm])
|
||||
return models.Token(**payload)
|
||||
except (jose.JWTError, pydantic.ValidationError) as error:
|
||||
raise fastapi.HTTPException(
|
||||
status_code=fastapi.status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
) from error
|
||||
98
src/assistant/nginx/mime.types
Normal file
98
src/assistant/nginx/mime.types
Normal file
@@ -0,0 +1,98 @@
|
||||
types {
|
||||
text/html html htm shtml;
|
||||
text/css css;
|
||||
text/xml xml;
|
||||
image/gif gif;
|
||||
image/jpeg jpeg jpg;
|
||||
application/javascript js;
|
||||
application/atom+xml atom;
|
||||
application/rss+xml rss;
|
||||
|
||||
text/mathml mml;
|
||||
text/plain txt;
|
||||
text/vnd.sun.j2me.app-descriptor jad;
|
||||
text/vnd.wap.wml wml;
|
||||
text/x-component htc;
|
||||
|
||||
image/avif avif;
|
||||
image/png png;
|
||||
image/svg+xml svg svgz;
|
||||
image/tiff tif tiff;
|
||||
image/vnd.wap.wbmp wbmp;
|
||||
image/webp webp;
|
||||
image/x-icon ico;
|
||||
image/x-jng jng;
|
||||
image/x-ms-bmp bmp;
|
||||
|
||||
font/woff woff;
|
||||
font/woff2 woff2;
|
||||
|
||||
application/java-archive jar war ear;
|
||||
application/json json;
|
||||
application/mac-binhex40 hqx;
|
||||
application/msword doc;
|
||||
application/pdf pdf;
|
||||
application/postscript ps eps ai;
|
||||
application/rtf rtf;
|
||||
application/vnd.apple.mpegurl m3u8;
|
||||
application/vnd.google-earth.kml+xml kml;
|
||||
application/vnd.google-earth.kmz kmz;
|
||||
application/vnd.ms-excel xls;
|
||||
application/vnd.ms-fontobject eot;
|
||||
application/vnd.ms-powerpoint ppt;
|
||||
application/vnd.oasis.opendocument.graphics odg;
|
||||
application/vnd.oasis.opendocument.presentation odp;
|
||||
application/vnd.oasis.opendocument.spreadsheet ods;
|
||||
application/vnd.oasis.opendocument.text odt;
|
||||
application/vnd.openxmlformats-officedocument.presentationml.presentation
|
||||
pptx;
|
||||
application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
|
||||
xlsx;
|
||||
application/vnd.openxmlformats-officedocument.wordprocessingml.document
|
||||
docx;
|
||||
application/vnd.wap.wmlc wmlc;
|
||||
application/wasm wasm;
|
||||
application/x-7z-compressed 7z;
|
||||
application/x-cocoa cco;
|
||||
application/x-java-archive-diff jardiff;
|
||||
application/x-java-jnlp-file jnlp;
|
||||
application/x-makeself run;
|
||||
application/x-perl pl pm;
|
||||
application/x-pilot prc pdb;
|
||||
application/x-rar-compressed rar;
|
||||
application/x-redhat-package-manager rpm;
|
||||
application/x-sea sea;
|
||||
application/x-shockwave-flash swf;
|
||||
application/x-stuffit sit;
|
||||
application/x-tcl tcl tk;
|
||||
application/x-x509-ca-cert der pem crt;
|
||||
application/x-xpinstall xpi;
|
||||
application/xhtml+xml xhtml;
|
||||
application/xspf+xml xspf;
|
||||
application/zip zip;
|
||||
|
||||
application/octet-stream bin exe dll;
|
||||
application/octet-stream deb;
|
||||
application/octet-stream dmg;
|
||||
application/octet-stream iso img;
|
||||
application/octet-stream msi msp msm;
|
||||
|
||||
audio/midi mid midi kar;
|
||||
audio/mpeg mp3;
|
||||
audio/ogg ogg;
|
||||
audio/x-m4a m4a;
|
||||
audio/x-realaudio ra;
|
||||
|
||||
video/3gpp 3gpp 3gp;
|
||||
video/mp2t ts;
|
||||
video/mp4 mp4;
|
||||
video/mpeg mpeg mpg;
|
||||
video/quicktime mov;
|
||||
video/webm webm;
|
||||
video/x-flv flv;
|
||||
video/x-m4v m4v;
|
||||
video/x-mng mng;
|
||||
video/x-ms-asf asx asf;
|
||||
video/x-ms-wmv wmv;
|
||||
video/x-msvideo avi;
|
||||
}
|
||||
39
src/assistant/nginx/nginx.conf
Normal file
39
src/assistant/nginx/nginx.conf
Normal file
@@ -0,0 +1,39 @@
|
||||
worker_processes 1;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
server_tokens off;
|
||||
include mime.types;
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for" "$request_id"';
|
||||
|
||||
access_log /dev/stdout main;
|
||||
|
||||
sendfile on;
|
||||
tcp_nodelay on;
|
||||
tcp_nopush on;
|
||||
client_max_body_size 200m;
|
||||
|
||||
gzip on;
|
||||
gzip_comp_level 3;
|
||||
gzip_min_length 1000;
|
||||
gzip_types
|
||||
text/plain
|
||||
text/css
|
||||
application/json
|
||||
application/x-javascript
|
||||
text/xml
|
||||
text/javascript;
|
||||
|
||||
add_header X-Request-Id $request_id;
|
||||
proxy_redirect off;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
include conf.d/api.conf;
|
||||
}
|
||||
11
src/assistant/nginx/templates/api.conf.template
Normal file
11
src/assistant/nginx/templates/api.conf.template
Normal file
@@ -0,0 +1,11 @@
|
||||
server {
|
||||
listen ${NGINX_PORT} default_server;
|
||||
listen [::]:${NGINX_PORT} default_server;
|
||||
server_name _;
|
||||
|
||||
location /api {
|
||||
proxy_pass http://api:${API_PORT}/api;
|
||||
proxy_set_header X-Request-Id $request_id;
|
||||
}
|
||||
|
||||
}
|
||||
1415
src/assistant/poetry.lock
generated
Normal file
1415
src/assistant/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
3
src/assistant/poetry.toml
Normal file
3
src/assistant/poetry.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[virtualenvs]
|
||||
create = true
|
||||
in-project = true
|
||||
150
src/assistant/pyproject.toml
Normal file
150
src/assistant/pyproject.toml
Normal file
@@ -0,0 +1,150 @@
|
||||
[build-system]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
target-version = ['py311']
|
||||
|
||||
[tool.isort]
|
||||
known_first_party = ["backend", "tests"]
|
||||
line_length = 120
|
||||
profile = "black"
|
||||
py_version = "311"
|
||||
|
||||
[tool.poetry]
|
||||
authors = ["ijaric@gmail.com", "jsdio@jsdio.ru"]
|
||||
description = ""
|
||||
name = "fastapi_project"
|
||||
readme = "README.md"
|
||||
version = "0.1.0"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
alembic = "^1.12.0"
|
||||
asyncpg = "^0.28.0"
|
||||
fastapi = "0.103.1"
|
||||
greenlet = "^2.0.2"
|
||||
httpx = "^0.25.0"
|
||||
orjson = "^3.9.7"
|
||||
psycopg2 = "^2.9.7"
|
||||
pydantic = {extras = ["email"], version = "^2.3.0"}
|
||||
pydantic-settings = "^2.0.3"
|
||||
python = "^3.11"
|
||||
python-jose = "^3.3.0"
|
||||
sqlalchemy = "^2.0.20"
|
||||
uvicorn = "^0.23.2"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
black = "^23.7.0"
|
||||
isort = "^5.12.0"
|
||||
pylint = "^2.17.5"
|
||||
pylint-pydantic = "^0.2.4"
|
||||
pylint-pytest = "^1.1.2"
|
||||
pyright = "^1.1.318"
|
||||
pyupgrade = "^3.10.1"
|
||||
ruff = "^0.0.282"
|
||||
sort-all = "^1.2.0"
|
||||
toml-sort = "^0.23.1"
|
||||
|
||||
[tool.pylint]
|
||||
disable = [
|
||||
"broad-except",
|
||||
"cannot-enumerate-pytest-fixtures",
|
||||
"consider-using-from-import",
|
||||
"consider-using-sys-exit",
|
||||
"duplicate-code",
|
||||
"fixme",
|
||||
"missing-docstring",
|
||||
"no-member",
|
||||
"protected-access",
|
||||
"too-few-public-methods",
|
||||
"too-many-instance-attributes",
|
||||
"too-many-locals",
|
||||
"too-many-statements",
|
||||
"unnecessary-ellipsis"
|
||||
]
|
||||
extension-pkg-allow-list = [
|
||||
"orjson",
|
||||
"pydantic"
|
||||
]
|
||||
ignore-path = [
|
||||
"^.*venv/.*$"
|
||||
]
|
||||
load-plugins = [
|
||||
"pylint_pydantic",
|
||||
"pylint_pytest"
|
||||
]
|
||||
max-args = 15
|
||||
max-line-length = 120
|
||||
recursive = true
|
||||
|
||||
[tool.pylint.basic]
|
||||
argument-rgx = "^_{0,2}[a-z][a-z0-9]*$"
|
||||
attr-rgx = "^_{0,2}[a-z][a-z0-9]*$"
|
||||
class-attribute-rgx = "^_{0,2}[a-zA-Z][a-zA-Z0-9_]*$"
|
||||
variable-rgx = "^_{0,2}[a-z][a-z0-9_]*$"
|
||||
|
||||
[tool.pyright]
|
||||
exclude = [
|
||||
".venv"
|
||||
]
|
||||
pythonPlatform = "All"
|
||||
pythonVersion = "3.11"
|
||||
reportConstantRedefenition = "none"
|
||||
reportMissingTypeStubs = "none"
|
||||
reportPrivateUsage = "information"
|
||||
reportPropertyTypeMismatch = "warning"
|
||||
reportUninitializedInstanceVariable = "warning"
|
||||
reportUnknownMemberType = "none"
|
||||
reportUnnecessaryTypeIgnoreComment = "warning"
|
||||
reportUntypedFunctionDecorator = "warning"
|
||||
typeCheckingMode = "strict"
|
||||
useLibraryCodeForTypes = true
|
||||
venv = ".venv"
|
||||
venvPath = "."
|
||||
|
||||
[tool.ruff]
|
||||
ignore = [
|
||||
# Pyright automatically infers the type of `self`
|
||||
"ANN101",
|
||||
# Pyright automatically infers the type of `cls`
|
||||
"ANN102",
|
||||
# In some cases actively detrimental; somewhat conflicts with black
|
||||
"COM",
|
||||
# Ignore missing docstrings
|
||||
"D102",
|
||||
# In combination with D213, this results in noisy diffs and inconsistencies
|
||||
# See also <https://github.com/charliermarsh/ruff/issues/4174>.
|
||||
"D200",
|
||||
# This results inconsistencies between function and class docstrings
|
||||
# See also <https://github.com/charliermarsh/ruff/issues/4175>.
|
||||
"D202",
|
||||
# D211 is preferred since the extra blank line isn't visually useful
|
||||
"D203",
|
||||
# D213 is preferred since it's more readable and allows more characters
|
||||
"D212",
|
||||
# Ignore missing docstrings
|
||||
"D414",
|
||||
# Covered by D401, which is more restrictive
|
||||
"D415",
|
||||
# Type-checkers interpret redundant `as` as exporting an item
|
||||
"PLC0414",
|
||||
# Permit using alias for 'import'
|
||||
"PLR0402",
|
||||
# Causes churn and awful looking import blocks for little gain
|
||||
"TCH"
|
||||
]
|
||||
select = ["ALL"]
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"tests/*" = [
|
||||
"D100",
|
||||
"D103",
|
||||
"D104",
|
||||
"S101"
|
||||
]
|
||||
|
||||
[tool.tomlsort]
|
||||
all = true
|
||||
ignore_case = true
|
||||
in_place = true
|
||||
0
src/assistant/tests/__init__.py
Normal file
0
src/assistant/tests/__init__.py
Normal file
Reference in New Issue
Block a user