1
0
mirror of https://github.com/ijaric/voice_assistant.git synced 2025-05-24 14:33:26 +00:00

Merge pull request #35 from ijaric/feature/templates

Assitant Project
This commit is contained in:
Artem Litvinov 2023-10-03 21:36:36 +01:00 committed by GitHub
commit 005dd3ce0e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
110 changed files with 2785 additions and 7 deletions

6
src/assistant/README.md Normal file
View File

@ -0,0 +1,6 @@
# Voice Assitant
## Install
1. `make init`
2. `make all-init``

View File

@ -0,0 +1,3 @@
from .health import basic_router
__all__ = ["basic_router"]

View File

@ -7,9 +7,9 @@ basic_router = fastapi.APIRouter()
@basic_router.get( @basic_router.get(
"/", "/",
response_model=api_shemas.HealthResponseModel, response_model=api_shemas.HealthResponse,
summary="Статус работоспособности", summary="Статус работоспособности",
description="Проверяет доступность сервиса FastAPI.", description="Проверяет доступность сервиса FastAPI.",
) )
async def health(): async def health():
return api_shemas.HealthResponseModel(status="healthy") return api_shemas.HealthResponse(status="healthy")

View File

@ -0,0 +1,3 @@
from .base import HealthResponse
__all__ = ["HealthResponse"]

View File

@ -1,5 +1,5 @@
import pydantic import pydantic
class HealthResponseModel(pydantic.BaseModel): class HealthResponse(pydantic.BaseModel):
status: str = pydantic.Field(default=..., examples=["healthy"], description="Схема доступности сервиса") status: str = pydantic.Field(default=..., examples=["healthy"], description="Схема доступности сервиса")

View File

@ -0,0 +1,131 @@
import dataclasses
import logging
import logging.config as logging_config
import typing
import fastapi
import uvicorn
import lib.api.v1.handlers as api_v1_handlers
import lib.app.errors as app_errors
import lib.app.settings as app_settings
import lib.app.split_settings as app_split_settings
import lib.clients as clients
logger = logging.getLogger(__name__)
@dataclasses.dataclass
class DisposableResource:
name: str
dispose_callback: typing.Awaitable[typing.Any]
class Application:
def __init__(
self,
settings: app_settings.Settings,
fastapi_app: fastapi.FastAPI,
disposable_resources: list[DisposableResource],
) -> None:
self._settings = settings
self._fastapi_app = fastapi_app
self._disposable_resources = disposable_resources
@classmethod
def from_settings(cls, settings: app_settings.Settings) -> typing.Self:
# Logging
logging_config.dictConfig(app_split_settings.get_logging_config(**settings.logger.model_dump()))
logger.info("Initializing application")
disposable_resources = []
# Global clients
logger.info("Initializing global clients")
postgres_client = clients.AsyncPostgresClient(settings=settings)
disposable_resources.append(
DisposableResource(
name="postgres_client",
dispose_callback=postgres_client.dispose_callback(),
)
)
# Clients
logger.info("Initializing clients")
# Repositories
logger.info("Initializing repositories")
# Caches
logger.info("Initializing caches")
# Services
logger.info("Initializing services")
# Handlers
logger.info("Initializing handlers")
liveness_probe_handler = api_v1_handlers.basic_router
logger.info("Creating application")
fastapi_app = fastapi.FastAPI(
title=settings.app.title,
version=settings.app.version,
docs_url=settings.app.docs_url,
openapi_url=settings.app.openapi_url,
default_response_class=fastapi.responses.ORJSONResponse,
)
# Routes
fastapi_app.include_router(liveness_probe_handler, prefix="/api/v1/health", tags=["health"])
application = Application(
settings=settings,
fastapi_app=fastapi_app,
disposable_resources=disposable_resources,
)
logger.info("Initializing application finished")
return application
async def start(self) -> None:
try:
config = uvicorn.Config(
app=self._fastapi_app,
host=self._settings.api.host,
port=self._settings.api.port,
)
server = uvicorn.Server(config)
await server.serve()
except BaseException as unexpected_error:
logger.exception("FastAPI failed to start")
raise app_errors.StartServerError("FastAPI failed to start") from unexpected_error
async def dispose(self) -> None:
logger.info("Application is shutting down...")
dispose_errors = []
for resource in self._disposable_resources:
logger.info("Disposing %s...", resource.name)
try:
await resource.dispose_callback
except Exception as unexpected_error:
dispose_errors.append(unexpected_error)
logger.exception("Failed to dispose %s", resource.name)
else:
logger.info("%s has been disposed", resource.name)
if len(dispose_errors) != 0:
logger.error("Application has shut down with errors")
raise app_errors.DisposeError("Application has shut down with errors, see logs above")
logger.info("Application has successfully shut down")

View File

@ -0,0 +1,4 @@
from .orm import Base, IdCreatedUpdatedBaseMixin
from .token import Token
__all__ = ["Base", "IdCreatedUpdatedBaseMixin", "Token"]

View File

@ -0,0 +1,3 @@
from .base import Base, IdCreatedUpdatedBaseMixin
__all__ = ["Base", "IdCreatedUpdatedBaseMixin"]

1415
src/assistant/poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,150 @@
[build-system]
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[tool.black]
line-length = 120
target-version = ['py311']
[tool.isort]
known_first_party = ["backend", "tests"]
line_length = 120
profile = "black"
py_version = "311"
[tool.poetry]
authors = ["ijaric@gmail.com", "jsdio@jsdio.ru"]
description = ""
name = "fastapi_project"
readme = "README.md"
version = "0.1.0"
[tool.poetry.dependencies]
alembic = "^1.12.0"
asyncpg = "^0.28.0"
fastapi = "0.103.1"
greenlet = "^2.0.2"
httpx = "^0.25.0"
orjson = "^3.9.7"
psycopg2 = "^2.9.7"
pydantic = {extras = ["email"], version = "^2.3.0"}
pydantic-settings = "^2.0.3"
python = "^3.11"
python-jose = "^3.3.0"
sqlalchemy = "^2.0.20"
uvicorn = "^0.23.2"
[tool.poetry.dev-dependencies]
black = "^23.7.0"
isort = "^5.12.0"
pylint = "^2.17.5"
pylint-pydantic = "^0.2.4"
pylint-pytest = "^1.1.2"
pyright = "^1.1.318"
pyupgrade = "^3.10.1"
ruff = "^0.0.282"
sort-all = "^1.2.0"
toml-sort = "^0.23.1"
[tool.pylint]
disable = [
"broad-except",
"cannot-enumerate-pytest-fixtures",
"consider-using-from-import",
"consider-using-sys-exit",
"duplicate-code",
"fixme",
"missing-docstring",
"no-member",
"protected-access",
"too-few-public-methods",
"too-many-instance-attributes",
"too-many-locals",
"too-many-statements",
"unnecessary-ellipsis"
]
extension-pkg-allow-list = [
"orjson",
"pydantic"
]
ignore-path = [
"^.*venv/.*$"
]
load-plugins = [
"pylint_pydantic",
"pylint_pytest"
]
max-args = 15
max-line-length = 120
recursive = true
[tool.pylint.basic]
argument-rgx = "^_{0,2}[a-z][a-z0-9]*$"
attr-rgx = "^_{0,2}[a-z][a-z0-9]*$"
class-attribute-rgx = "^_{0,2}[a-zA-Z][a-zA-Z0-9_]*$"
variable-rgx = "^_{0,2}[a-z][a-z0-9_]*$"
[tool.pyright]
exclude = [
".venv"
]
pythonPlatform = "All"
pythonVersion = "3.11"
reportConstantRedefenition = "none"
reportMissingTypeStubs = "none"
reportPrivateUsage = "information"
reportPropertyTypeMismatch = "warning"
reportUninitializedInstanceVariable = "warning"
reportUnknownMemberType = "none"
reportUnnecessaryTypeIgnoreComment = "warning"
reportUntypedFunctionDecorator = "warning"
typeCheckingMode = "strict"
useLibraryCodeForTypes = true
venv = ".venv"
venvPath = "."
[tool.ruff]
ignore = [
# Pyright automatically infers the type of `self`
"ANN101",
# Pyright automatically infers the type of `cls`
"ANN102",
# In some cases actively detrimental; somewhat conflicts with black
"COM",
# Ignore missing docstrings
"D102",
# In combination with D213, this results in noisy diffs and inconsistencies
# See also <https://github.com/charliermarsh/ruff/issues/4174>.
"D200",
# This results inconsistencies between function and class docstrings
# See also <https://github.com/charliermarsh/ruff/issues/4175>.
"D202",
# D211 is preferred since the extra blank line isn't visually useful
"D203",
# D213 is preferred since it's more readable and allows more characters
"D212",
# Ignore missing docstrings
"D414",
# Covered by D401, which is more restrictive
"D415",
# Type-checkers interpret redundant `as` as exporting an item
"PLC0414",
# Permit using alias for 'import'
"PLR0402",
# Causes churn and awful looking import blocks for little gain
"TCH"
]
select = ["ALL"]
[tool.ruff.per-file-ignores]
"tests/*" = [
"D100",
"D103",
"D104",
"S101"
]
[tool.tomlsort]
all = true
ignore_case = true
in_place = true

View File

@ -1,4 +0,0 @@
from .base import HealthResponseModel
from .joke import JokeResponse
__all__ = ["HealthResponseModel", "JokeResponse"]

View File

@ -0,0 +1,2 @@
.venv
.env

15
src/template/.env.example Normal file
View File

@ -0,0 +1,15 @@
POSTGRES_DRIVER=postgresql+asyncpg
POSTGRES_HOST=db
POSTGRES_PORT=5432
POSTGRES_USER=user
POSTGRES_PASSWORD=Qwe123
POSTGRES_DB_NAME=api_db
NGINX_PORT=80
API_HOST=0.0.0.0
API_PORT=8000
JWT_SECRET_KEY=v9LctjUWwol4XbvczPiLFMDtZ8aal7mm
JWT_ALGORITHM=HS256
APP_RELOAD=True

23
src/template/Dockerfile Normal file
View File

@ -0,0 +1,23 @@
FROM python:3.11
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive \
&& apt-get install -y net-tools netcat-traditional curl \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir --parents /opt/app
COPY pyproject.toml /opt/app/pyproject.toml
COPY poetry.lock /opt/app/poetry.lock
COPY poetry.toml /opt/app/poetry.toml
WORKDIR /opt/app
RUN pip install poetry \
&& poetry install --no-dev
COPY bin /opt/app/bin
COPY lib /opt/app/lib
COPY entrypoint.sh /opt/app/entrypoint.sh
RUN chmod +x /opt/app/entrypoint.sh

3
src/template/Makefile Normal file
View File

@ -0,0 +1,3 @@
include ../../common_makefile.mk
PROJECT_FOLDERS = bin lib tests

0
src/template/__init__.py Normal file
View File

114
src/template/alembic.ini Normal file
View File

@ -0,0 +1,114 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1 @@
Generic single-database configuration with an async dbapi.

View File

@ -0,0 +1,83 @@
import asyncio
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
import lib.app.settings as app_settings
import lib.models as models
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
config.set_main_option("sqlalchemy.url", app_settings.Settings().postgres.dsn)
target_metadata = models.Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,37 @@
"""Added initial table
Revision ID: 9749b063b095
Revises:
Create Date: 2023-10-02 19:46:05.078494
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "9749b063b095"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"joke",
sa.Column("type", sa.String(), nullable=False),
sa.Column("setup", sa.String(), nullable=False),
sa.Column("punchline", sa.String(), nullable=False),
sa.Column("id", sa.Uuid(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("joke")
# ### end Alembic commands ###

View File

View File

@ -0,0 +1,37 @@
import asyncio
import logging
import os
import lib.app as app
logger = logging.getLogger(__name__)
async def run() -> None:
settings = app.Settings()
application = app.Application.from_settings(settings)
try:
await application.start()
finally:
await application.dispose()
def main() -> None:
try:
asyncio.run(run())
exit(os.EX_OK)
except SystemExit:
exit(os.EX_OK)
except app.ApplicationError:
exit(os.EX_SOFTWARE)
except KeyboardInterrupt:
logger.info("Exited with keyboard interruption")
exit(os.EX_OK)
except BaseException:
logger.exception("Unexpected error occurred")
exit(os.EX_SOFTWARE)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,58 @@
version: "3"
services:
postgres:
image: postgres:15.2
restart: always
environment:
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_DB: ${POSTGRES_NAME}
env_file:
- .env
ports:
- "${POSTGRES_PORT}:${POSTGRES_PORT}"
volumes:
- postgres_data:/var/lib/postgresql/data/
networks:
- backend_network
api:
build:
context: .
container_name: fastapi_app
image: fastapi_app
restart: always
entrypoint: ["/opt/app/entrypoint.sh"]
env_file:
- .env
ports:
- "${API_PORT}:${API_PORT}"
depends_on:
- postgres
networks:
- backend_network
- api_network
nginx:
image: nginx:1.25.1
env_file:
- .env
ports:
- "${NGINX_PORT}:${NGINX_PORT}"
volumes:
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
- ./nginx/templates:/etc/nginx/templates
depends_on:
- api
networks:
- api_network
volumes:
postgres_data:
networks:
api_network:
driver: bridge
backend_network:
driver: bridge

View File

@ -0,0 +1,58 @@
version: "3"
services:
postgres:
image: postgres:15.2
restart: always
environment:
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
POSTGRES_DB: ${POSTGRES_NAME}
env_file:
- .env
expose:
- "${POSTGRES_PORT}"
volumes:
- postgres_data:/var/lib/postgresql/data/
networks:
- backend_network
api:
build:
context: .
container_name: api
image: fastapi_app
restart: always
entrypoint: ["/opt/app/entrypoint.sh"]
env_file:
- .env
expose:
- "${API_PORT}"
depends_on:
- postgres
networks:
- backend_network
- api_network
nginx:
image: nginx:1.25.1
env_file:
- .env
ports:
- "${NGINX_PORT}:${NGINX_PORT}"
volumes:
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
- ./nginx/templates:/etc/nginx/templates
depends_on:
- api
networks:
- api_network
volumes:
postgres_data:
networks:
api_network:
driver: bridge
backend_network:
driver: bridge

View File

@ -0,0 +1,5 @@
#!/bin/bash
while ! nc -z postgres 5432; do sleep 1; done;
exec .venv/bin/python -m bin

View File

View File

View File

View File

@ -0,0 +1,5 @@
from .liveness_probe import basic_router
__all__ = [
"basic_router",
]

View File

@ -0,0 +1,15 @@
import fastapi
import lib.api.v1.schemas as api_shemas
basic_router = fastapi.APIRouter()
@basic_router.get(
"/",
response_model=api_shemas.HealthResponse,
summary="Статус работоспособности",
description="Проверяет доступность сервиса FastAPI.",
)
async def health():
return api_shemas.HealthResponse(status="healthy")

View File

@ -0,0 +1,4 @@
from .base import HealthResponse
from .joke import JokeResponse
__all__ = ["HealthResponse", "JokeResponse"]

View File

@ -0,0 +1,5 @@
import pydantic
class HealthResponse(pydantic.BaseModel):
status: str = pydantic.Field(default=..., examples=["healthy"], description="Схема доступности сервиса")

View File

@ -0,0 +1,11 @@
from .app import Application
from .errors import *
from .settings import Settings
__all__ = [
"Application",
"ApplicationError",
"DisposeError",
"Settings",
"StartServerError",
]

View File

@ -0,0 +1,22 @@
import typing
class ApplicationError(Exception):
def __init__(self, message: str, *args: typing.Any) -> None:
super().__init__(*args)
self.message = message
class DisposeError(ApplicationError):
pass
class StartServerError(ApplicationError):
pass
__all__ = [
"ApplicationError",
"DisposeError",
"StartServerError",
]

View File

@ -0,0 +1,69 @@
import pydantic_settings
class LoggingSettings(pydantic_settings.BaseSettings):
log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
log_default_handlers: list[str] = [
"console",
]
log_level_handlers: str = "DEBUG"
log_level_loggers: str = "INFO"
log_level_root: str = "INFO"
log_settings = LoggingSettings()
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {"format": log_settings.log_format},
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": "%(levelprefix)s %(message)s",
"use_colors": None,
},
"access": {
"()": "uvicorn.logging.AccessFormatter",
"fmt": "%(levelprefix)s %(client_addr)s - '%(request_line)s' %(status_code)s",
},
},
"handlers": {
"console": {
"level": log_settings.log_level_handlers,
"class": "logging.StreamHandler",
"formatter": "verbose",
},
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
"access": {
"formatter": "access",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
},
"loggers": {
"": {
"handlers": log_settings.log_default_handlers,
"level": log_settings.log_level_loggers,
},
"uvicorn.error": {
"level": log_settings.log_level_loggers,
},
"uvicorn.access": {
"handlers": ["access"],
"level": log_settings.log_level_loggers,
"propagate": False,
},
},
"root": {
"level": log_settings.log_level_root,
"formatter": "verbose",
"handlers": log_settings.log_default_handlers,
},
}

View File

@ -0,0 +1,18 @@
import pydantic
import pydantic_settings
import lib.app.split_settings as app_split_settings
class Settings(pydantic_settings.BaseSettings):
api: app_split_settings.ApiSettings = pydantic.Field(default_factory=lambda: app_split_settings.ApiSettings())
app: app_split_settings.AppSettings = pydantic.Field(default_factory=lambda: app_split_settings.AppSettings())
postgres: app_split_settings.PostgresSettings = pydantic.Field(
default_factory=lambda: app_split_settings.PostgresSettings()
)
logger: app_split_settings.LoggingSettings = pydantic.Field(
default_factory=lambda: app_split_settings.LoggingSettings()
)
project: app_split_settings.ProjectSettings = pydantic.Field(
default_factory=lambda: app_split_settings.ProjectSettings()
)

View File

@ -0,0 +1,14 @@
from .api import *
from .app import *
from .logger import *
from .postgres import *
from .project import *
__all__ = [
"ApiSettings",
"AppSettings",
"LoggingSettings",
"PostgresSettings",
"ProjectSettings",
"get_logging_config",
]

View File

@ -0,0 +1,15 @@
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class ApiSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="API_",
env_file_encoding="utf-8",
extra="ignore",
)
host: str = "0.0.0.0"
port: int = 8000

View File

@ -0,0 +1,25 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class AppSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="APP_",
env_file_encoding="utf-8",
extra="ignore",
)
title: str = "FastAPI"
version: str = "0.1.0"
docs_url: str = "/api/openapi"
openapi_url: str = "/api/openapi.json"
reload: bool = False
@pydantic.field_validator("reload")
def validate_debug(cls, v: str | bool) -> bool:
if isinstance(v, bool):
return v
return v.lower() == "true"

View File

@ -0,0 +1,79 @@
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class LoggingSettings(pydantic_settings.BaseSettings):
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH, env_file_encoding="utf-8", extra="ignore"
)
log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
log_default_handlers: list[str] = [
"console",
]
log_level_handlers: str = "INFO"
log_level_loggers: str = "INFO"
log_level_root: str = "INFO"
def get_logging_config(
log_format: str,
log_default_handlers: list[str],
log_level_handlers: str,
log_level_loggers: str,
log_level_root: str,
):
return {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {"format": log_format},
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": "%(levelprefix)s %(message)s",
"use_colors": None,
},
"access": {
"()": "uvicorn.logging.AccessFormatter",
"fmt": "%(levelprefix)s %(client_addr)s - '%(request_line)s' %(status_code)s",
},
},
"handlers": {
"console": {
"level": log_level_handlers,
"class": "logging.StreamHandler",
"formatter": "verbose",
},
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
"access": {
"formatter": "access",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
},
"loggers": {
"": {
"handlers": log_default_handlers,
"level": log_level_loggers,
},
"uvicorn.error": {
"level": log_level_loggers,
},
"uvicorn.access": {
"handlers": ["access"],
"level": log_level_loggers,
"propagate": False,
},
},
"root": {
"level": log_level_root,
"formatter": "verbose",
"handlers": log_default_handlers,
},
}

View File

@ -0,0 +1,44 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class PostgresSettings(pydantic_settings.BaseSettings):
"""Postgres settings."""
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_prefix="POSTGRES_",
env_file_encoding="utf-8",
extra="ignore",
)
# Connection settings
driver: str = "postgresql+asyncpg"
db_name: str = "database_name"
host: str = "localhost"
port: int = 5432
user: str = "app"
password: pydantic.SecretStr = pydantic.Field(
default=..., validation_alias=pydantic.AliasChoices("password", "postgres_password")
)
# Engine settings
pool_size: int = 50
pool_pre_ping: bool = True
echo: bool = False
# Session settings
auto_commit: bool = False
auto_flush: bool = False
expire_on_commit: bool = False
@property
def dsn(self) -> str:
password = self.password.get_secret_value()
return f"{self.driver}://{self.user}:{password}@{self.host}:{self.port}"
@property
def dsn_as_safe_url(self) -> str:
return f"{self.driver}://{self.user}:***@{self.host}:{self.port}"

View File

@ -0,0 +1,22 @@
import pydantic
import pydantic_settings
import lib.app.split_settings.utils as app_split_settings_utils
class ProjectSettings(pydantic_settings.BaseSettings):
"""Project settings."""
model_config = pydantic_settings.SettingsConfigDict(
env_file=app_split_settings_utils.ENV_PATH,
env_file_encoding="utf-8",
extra="ignore",
)
debug: str = "false"
jwt_secret_key: str = pydantic.Field(default=..., validation_alias="jwt_secret_key")
jwt_algorithm: str = "HS256"
@pydantic.field_validator("debug")
def validate_debug(cls, v: str) -> bool:
return v.lower() == "true"

View File

@ -0,0 +1,4 @@
import pathlib
BASE_PATH = pathlib.Path(__file__).parent.parent.parent.parent.resolve()
ENV_PATH = BASE_PATH / ".env"

View File

@ -0,0 +1,3 @@
from .postgres import AsyncPostgresClient
__all__ = ["AsyncPostgresClient"]

View File

@ -0,0 +1,30 @@
import sqlalchemy.ext.asyncio as sa_asyncio
import lib.app.settings as app_settings
class AsyncPostgresClient:
"""Async Postgres Client that return sessionmaker."""
def __init__(self, settings: app_settings.Settings) -> None:
self.settings = settings.postgres
self.async_enging = sa_asyncio.create_async_engine(
url=self.settings.dsn,
pool_size=self.settings.pool_size,
pool_pre_ping=self.settings.pool_pre_ping,
echo=self.settings.echo,
future=True,
)
def get_async_session(self) -> sa_asyncio.async_sessionmaker[sa_asyncio.AsyncSession]:
async_session = sa_asyncio.async_sessionmaker(
bind=self.async_enging,
autocommit=self.settings.auto_commit,
autoflush=self.settings.auto_flush,
expire_on_commit=self.settings.expire_on_commit,
)
return async_session # noqa: RET504
async def dispose_callback(self) -> None:
await self.async_enging.dispose()

View File

@ -0,0 +1,35 @@
import datetime
import uuid
import sqlalchemy
import sqlalchemy.dialects.postgresql
import sqlalchemy.ext.declarative
import sqlalchemy.orm as sa_orm
import sqlalchemy.sql as sa_sql
class Base(sa_orm.DeclarativeBase):
"""Base class for all models."""
@sqlalchemy.ext.declarative.declared_attr.directive
def __tablename__(cls):
return cls.__name__.lower()
__mapper_args__ = {"eager_defaults": True}
id: sa_orm.Mapped[uuid.UUID] = sa_orm.mapped_column(primary_key=True, default=uuid.uuid4)
class IdCreatedUpdatedBaseMixin:
# id: sa_orm.Mapped[int] = sa_orm.mapped_column(primary_key=True)
# id_field: sa_orm.Mapped[uuid.UUID] = sa_orm.mapped_column(name="uuid", primary_key=True, unique=True, default=uuid.uuid4, nullable=False)
created: sa_orm.Mapped[datetime.datetime] = sa_orm.mapped_column(server_default=sa_sql.func.now())
updated: sa_orm.Mapped[datetime.datetime] = sa_orm.mapped_column(
server_default=sa_sql.func.now(), onupdate=sa_sql.func.now()
)
# __mapper_args__ = {"eager_defaults": True}
# @sqlalchemy.ext.declarative.declared_attr.directive
# def __tablename__(cls) -> str:
# return cls.__name__.lower()

Some files were not shown because too many files have changed in this diff Show More