Initial commit
Some checks failed
Run linters on applied template / Python 3.13 lint and build (push) Failing after 41s

This is a FastAPI backend microservice template used with `copier` utility.

Features of applied template are:
- Configuration file processing logic
- Metrics and tracing (both optional) configuration available
- Debug endpoints
- Database migration commands, prepared Alembic environment
- Database usage example in ping_db endpoint
- gitea sanity check pipeline
This commit is contained in:
2025-11-29 21:42:27 +03:00
commit 685ea5e5f4
52 changed files with 4563 additions and 0 deletions

View File

@@ -0,0 +1,21 @@
"""SQL naming convention for Alembic is defined here."""
from sqlalchemy import MetaData
from sqlalchemy.orm import declarative_base
convention = {
"all_column_names": lambda constraint, _: "_".join([str(column.name) for column in constraint.columns.values()]),
"ix": "ix_%(table_name)s_%(all_column_names)s",
"uq": "%(table_name)s_%(all_column_names)s_key",
"ck": "ck_%(table_name)s_%(column_0_name)s",
"fk": "%(table_name)s_fk_%(all_column_names)s__%(referred_table_name)s",
"pk": "%(table_name)s_pk",
}
metadata = MetaData(naming_convention=convention)
DeclarativeBase = declarative_base(metadata=metadata)
__all__ = [
"DeclarativeBase",
"metadata",
]

View File

@@ -0,0 +1,114 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrator/
# template used to generate migration files
file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
path_separator = os
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to migrator/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrator/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = postgresql+asyncpg://%(POSTGRES_USER)s:%(POSTGRES_PASSWORD)s@%(POSTGRES_HOST)s:%(POSTGRES_PORT)s/%(POSTGRES_DB)s?target_session_attrs=read-write
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
hooks = black
black.type = console_scripts
black.entrypoint = black
black.options = -l 120 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -0,0 +1,43 @@
"""Database configuration class is defined here."""
from dataclasses import dataclass
from typing import Any
from {{project_slug}}.utils.secrets import SecretStr
@dataclass
class DBConfig:
host: str
port: int
database: str
user: str
password: SecretStr
pool_size: int
def __post_init__(self):
self.password = SecretStr(self.password)
@dataclass
class MultipleDBsConfig:
master: DBConfig
replicas: list[DBConfig] | None = None
def __post_init__(self):
_dict_to_dataclass(self, "master", DBConfig)
if self.replicas is not None:
_list_dict_to_dataclasses(self, "replicas", DBConfig)
def _list_dict_to_dataclasses(config_entry: Any, field_name: str, need_type: type) -> None:
list_dict = getattr(config_entry, field_name)
for i in range(len(list_dict)): # pylint: disable=consider-using-enumerate
if isinstance(list_dict[i], dict):
list_dict[i] = need_type(**list_dict[i])
def _dict_to_dataclass(config_entry: Any, field_name: str, need_type: type) -> None:
value = getattr(config_entry, field_name)
if isinstance(value, dict):
setattr(config_entry, field_name, need_type(**value))

View File

@@ -0,0 +1,7 @@
"""Module responsible for managing database connections."""
from .manager import PostgresConnectionManager
__all__ = [
"PostgresConnectionManager",
]

View File

@@ -0,0 +1,195 @@
"""Connection manager class and get_connection function are defined here."""
from asyncio import Lock
from contextlib import asynccontextmanager
from itertools import cycle
from typing import Any, AsyncIterator
import structlog
from sqlalchemy import select, text
from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, create_async_engine
from {{project_slug}}.db.config import DBConfig
class PostgresConnectionManager: # pylint: disable=too-many-instance-attributes
"""Connection manager for PostgreSQL database"""
def __init__( # pylint: disable=too-many-arguments
self,
master: DBConfig,
replicas: list[DBConfig] | None,
logger: structlog.stdlib.BoundLogger,
*,
engine_options: dict[str, Any] | None = None,
application_name: str | None = None,
) -> None:
"""Initialize connection manager entity."""
self._master_engine: AsyncEngine | None = None
self._replica_engines: list[AsyncEngine] = []
self._master = master
self._replicas = replicas or []
self._lock = Lock()
self._logger = logger
self._engine_options = engine_options or {}
self._application_name = application_name
# Iterator for round-robin through replicas
self._replica_cycle = None
async def update( # pylint: disable=too-many-arguments
self,
*,
master: DBConfig | None = None,
replicas: list[DBConfig] | None = None,
logger: structlog.stdlib.BoundLogger | None = None,
application_name: str | None = None,
engine_options: dict[str, Any] | None = None,
) -> None:
"""Update connection manager parameters and refresh connection."""
self._master = master or self._master
self._replicas = replicas or self._replicas
self._logger = logger or self._logger
self._application_name = application_name or self._application_name
self._engine_options = engine_options or self._engine_options
if self.initialized:
await self.refresh()
@property
def initialized(self) -> bool:
return self._master_engine is not None
async def refresh(self, no_force_refresh: bool = False) -> None:
"""Initialize or reinitialize connection engine.
Params:
no_force_refresh (bool): if set to True and ConnectionManager is already initialized,
no refresh is performed
"""
async with self._lock:
if no_force_refresh and self.initialized:
return
await self.shutdown(use_lock=False)
await self._logger.ainfo(
"creating postgres master connection pool",
max_size=self._master.pool_size,
user=self._master.user,
host=self._master.host,
port=self._master.port,
database=self._master.database,
)
self._master_engine = create_async_engine(
f"postgresql+asyncpg://{self._master.user}:{self._master.password.get_secret_value()}"
f"@{self._master.host}:{self._master.port}/{self._master.database}",
future=True,
pool_size=max(1, self._master.pool_size - 5),
max_overflow=min(self._master.pool_size - 1, 5),
**self._engine_options,
)
try:
async with self._master_engine.connect() as conn:
cur = await conn.execute(select(text("1")))
assert cur.fetchone()[0] == 1
except Exception as exc:
self._master_engine = None
raise RuntimeError("something wrong with database connection, aborting") from exc
if len(self._replicas) > 0:
for replica in self._replicas:
await self._logger.ainfo(
"creating postgres readonly connection pool",
max_size=replica.pool_size,
user=replica.user,
host=replica.host,
port=replica.port,
database=replica.database,
)
replica_engine = create_async_engine(
f"postgresql+asyncpg://{replica.user}:{replica.password.get_secret_value()}@"
f"{replica.host}:{replica.port}/{replica.database}",
future=True,
pool_size=max(1, self._master.pool_size - 5),
max_overflow=min(self._master.pool_size - 1, 5),
**self._engine_options,
)
try:
async with replica_engine.connect() as conn:
cur = await conn.execute(select(1))
assert cur.fetchone()[0] == 1
self._replica_engines.append(replica_engine)
except Exception as exc: # pylint: disable=broad-except
await replica_engine.dispose()
await self._logger.aexception("error connecting to replica", host=replica.host, error=repr(exc))
if self._replica_engines:
self._replica_cycle = cycle(self._replica_engines)
else:
self._replica_cycle = None
await self._logger.awarning("no available replicas, read queries will go to the master")
async def shutdown(self, use_lock: bool = True) -> None:
"""Dispose connection pool and deinitialize. Can be called multiple times."""
if use_lock:
async with self._lock:
await self.shutdown(use_lock=False)
return
if self.initialized:
self._logger.info("shutting down postgres connection engine")
await self._master_engine.dispose()
self._master_engine = None
for engine in self._replica_engines:
await engine.dispose()
self._replica_engines.clear()
@asynccontextmanager
async def get_connection(self) -> AsyncIterator[AsyncConnection]:
"""Get an async connection to the database with read-write ability."""
if not self.initialized:
await self.refresh(no_force_refresh=True)
async with self._master_engine.connect() as conn:
if self._application_name is not None:
await conn.execute(text(f'SET application_name TO "{self._application_name}"'))
await conn.commit()
yield conn
@asynccontextmanager
async def get_ro_connection(self) -> AsyncIterator[AsyncConnection]:
"""Get an async connection to the database which can be read-only and will attempt to use replica instances
of the database."""
if not self.initialized:
await self.refresh(no_force_refresh=True)
# If there are no replicas, use master
if self._replica_cycle is None:
async with self.get_connection() as conn:
yield conn
return
# Select the next replica (round-robin), `self._replica_cycle` is guaranteed to have values here
engine = next(self._replica_cycle) # pylint: disable=stop-iteration-return
conn = None
try:
conn = await engine.connect()
if self._application_name is not None:
await conn.execute(text(f'SET application_name TO "{self._application_name}"'))
await conn.commit()
except Exception as exc: # pylint: disable=broad-except
if conn is not None:
try:
conn.close()
except Exception: # pylint: disable=broad-except
pass
await self._logger.awarning(
"error connecting to replica, falling back to master", error=repr(exc), error_type=type(exc).__name__
)
# On exception from replica fallback to master connection
async with self.get_connection() as conn:
yield conn
return
try:
yield conn
finally:
await conn.close()

View File

View File

@@ -0,0 +1,96 @@
# pylint: disable=wrong-import-position
"""Environment preparation for Alembic is performed here."""
import asyncio
import os
from logging.config import fileConfig
from alembic import context
from dotenv import load_dotenv
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from {{project_slug}}.config import {{ProjectName}}Config
from {{project_slug}}.db import DeclarativeBase
from {{project_slug}}.db.entities import * # pylint: disable=wildcard-import,unused-wildcard-import
envfile_path = os.environ.get("ENVFILE", ".env")
if os.path.isfile(envfile_path):
try:
load_dotenv(envfile_path)
except Exception as exc:
print(f"Got an error while loading envfile '{envfile_path}': {exc!r}")
config = context.config
section = config.config_ini_section
app_settings = {{ProjectName}}Config.from_file(os.getenv("CONFIG_PATH"))
config.set_section_option(section, "POSTGRES_DB", app_settings.db.master.database)
config.set_section_option(section, "POSTGRES_HOST", app_settings.db.master.host)
config.set_section_option(section, "POSTGRES_USER", app_settings.db.master.user)
config.set_section_option(section, "POSTGRES_PASSWORD", app_settings.db.master.password.get_secret_value())
config.set_section_option(section, "POSTGRES_PORT", str(app_settings.db.master.port))
fileConfig(config.config_file_name, disable_existing_loggers=False)
target_metadata = DeclarativeBase.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,27 @@
# pylint: disable=no-member,invalid-name,missing-function-docstring,too-many-statements
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}