Initial Template for FastAPI

master
Ernest Litvinenko 2023-12-10 13:04:10 +03:00
commit b7b432e414
20 changed files with 516 additions and 0 deletions

9
.env.sample Normal file
View File

@ -0,0 +1,9 @@
HOST=0.0.0.0
PORT=8000
POSTGRES_URL=postgresql://login:pwd@0.0.0.0/db
SECRET=<YOUR SECRET>
TOKEN_LIFETIME=15
SYSTEM_USERNAME=admin
SYSTEM_PWD=admin
SYSTEM_EMAIL=ernest@elitvinenko.tech
DOMAIN=https://your-site.com

30
.gitignore vendored Normal file
View File

@ -0,0 +1,30 @@
.idea
.ipynb_checkpoints
.mypy_cache
.vscode
__pycache__
.pytest_cache
htmlcov
dist
site
.coverage
coverage.xml
.netlify
test.db
log.txt
Pipfile.lock
env3.*
env
docs_build
site_build
venv
docs.zip
archive.zip
# vim temporary files
*~
.*.sw?
.cache
# macOS
.DS_Store

114
alembic.ini Normal file
View File

@ -0,0 +1,114 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = ./alembic/migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to ./alembic/migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:./alembic/migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1 @@
Generic single-database configuration with an async dbapi.

91
alembic/migrations/env.py Normal file
View File

@ -0,0 +1,91 @@
import asyncio
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
from config import Config
from core.models.db_migrations import Base
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
config.set_main_option('sqlalchemy.url', str(Config.postgres_url))
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

21
config.py Normal file
View File

@ -0,0 +1,21 @@
from pydantic import IPvAnyAddress, PostgresDsn, EmailStr, AnyUrl
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
host: IPvAnyAddress = '0.0.0.0'
port: int = 8000
secret: str # openssl rand -hex 32
postgres_url: PostgresDsn
token_lifetime: int = 15
system_username: str = 'admin'
system_pwd: str = 'admin'
system_email: EmailStr = 'ernest@elitvinenko.tech'
domain: str
static_url: str
class Config:
env_file = '.env'
Config = Settings()

0
core/__init__.py Normal file
View File

3
core/api/__init__.py Normal file
View File

@ -0,0 +1,3 @@
from fastapi import APIRouter
router = APIRouter(prefix='/api/v1')

0
core/helpers/__init__.py Normal file
View File

0
core/models/__init__.py Normal file
View File

50
core/models/base.py Normal file
View File

@ -0,0 +1,50 @@
import typing
from typing_extensions import Unpack
from typing import TypedDict, Any
from pydantic import Field, AliasChoices, AliasPath
import camelcaser as cc
class FieldParams(TypedDict, total=False):
default: Any | None
default_factory: typing.Callable[[], Any] | None
alias: str | None
alias_priority: int | None
validation_alias: str | AliasPath | AliasChoices | None
serialization_alias: str | None
title: str | None
description: str | None
examples: list[Any] | None
exclude: bool | None
include: bool | None
discriminator: str | None
json_schema_extra: dict[str, Any] | None
frozen: bool | None
validate_default: bool | None
repr: bool
init_var: bool | None
kw_only: bool | None
pattern: str | None
strict: bool | None
gt: float | None
ge: float | None
lt: float | None
le: float | None
multiple_of: float | None
allow_inf_nan: bool | None
max_digits: int | None
decimal_places: int | None
min_length: int | None
max_length: int | None
additional_validation_aliases: list[str] | None
def build_pydantic_field(name: str,
**kwargs: Unpack[FieldParams]
) -> Field:
additional_validation_aliases = kwargs.pop('additional_validation_aliases', [])
if not isinstance(additional_validation_aliases, list):
raise TypeError('Argument additional_validation_aliases must be list of strings')
return Field(**kwargs, alias=cc.make_camel_case(name),
validation_alias=AliasChoices(name, cc.make_lower_camel_case(name), *additional_validation_aliases))

View File

@ -0,0 +1,11 @@
import importlib
from pathlib import Path
from database import Base
def get_db_files():
p = Path('core/models')
return [str(file) for file in p.rglob('**/db.py')]
__all__ = [importlib.import_module(filename.replace('.py', '').replace('/', '.')) for filename in get_db_files()]

View File

5
core/storage/__init__.py Normal file
View File

@ -0,0 +1,5 @@
"""
Register our storages
"""

17
core/storage/base.py Normal file
View File

@ -0,0 +1,17 @@
from contextlib import asynccontextmanager
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
class BaseStorage:
def __init__(self, session_maker: async_sessionmaker):
self.__session_maker: async_sessionmaker = session_maker
@asynccontextmanager
async def get_session(self) -> AsyncSession:
session = self.__session_maker()
try:
yield session
finally:
await session.aclose()

17
database.py Normal file
View File

@ -0,0 +1,17 @@
import json
import asyncpg
from sqlalchemy.orm import DeclarativeBase
from sqlalchemy.ext.asyncio import AsyncAttrs, create_async_engine, async_sessionmaker
from config import Config
from loguru import logger
class Base(AsyncAttrs, DeclarativeBase):
pass
engine = create_async_engine(str(Config.postgres_url), pool_size=20, max_overflow=0)
Session = async_sessionmaker(engine, expire_on_commit=False)

37
main.py Normal file
View File

@ -0,0 +1,37 @@
from datetime import datetime, timezone
import uvicorn
from fastapi import FastAPI
from loguru import logger
from starlette.middleware.cors import CORSMiddleware
from config import Config
from core.api import router as api_router
app = FastAPI()
origins = [
"*"
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Including routers
app.include_router(api_router)
@app.on_event('startup')
async def on_startup():
logger.success('Application startup complete at {time}', time=datetime.now(tz=timezone.utc))
if __name__ == '__main__':
uvicorn.run('main:app', host=str(Config.host), port=Config.port, reload=True)

9
requirements.in Normal file
View File

@ -0,0 +1,9 @@
fastapi
uvicorn
sqlalchemy[asyncio]
asyncpg
python-camelcaser
loguru
pydantic-settings
pydantic[email]
alembic

75
requirements.txt Normal file
View File

@ -0,0 +1,75 @@
#
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile
#
alembic==1.13.0
# via -r requirements.in
annotated-types==0.6.0
# via pydantic
anyio==3.7.1
# via
# fastapi
# starlette
async-timeout==4.0.3
# via asyncpg
asyncpg==0.29.0
# via -r requirements.in
click==8.1.7
# via uvicorn
dnspython==2.4.2
# via email-validator
email-validator==2.1.0.post1
# via pydantic
exceptiongroup==1.2.0
# via anyio
fastapi==0.104.1
# via -r requirements.in
greenlet==3.0.2
# via sqlalchemy
h11==0.14.0
# via uvicorn
idna==3.6
# via
# anyio
# email-validator
loguru==0.7.2
# via -r requirements.in
mako==1.3.0
# via alembic
markupsafe==2.1.3
# via mako
pydantic[email]==2.5.2
# via
# -r requirements.in
# fastapi
# pydantic-settings
pydantic-core==2.14.5
# via pydantic
pydantic-settings==2.1.0
# via -r requirements.in
pyenchant==3.2.2
# via python-camelcaser
python-camelcaser==1.0.2
# via -r requirements.in
python-dotenv==1.0.0
# via pydantic-settings
sniffio==1.3.0
# via anyio
sqlalchemy[asyncio]==2.0.23
# via
# -r requirements.in
# alembic
starlette==0.27.0
# via fastapi
typing-extensions==4.9.0
# via
# alembic
# fastapi
# pydantic
# pydantic-core
# sqlalchemy
# uvicorn
uvicorn==0.24.0.post1
# via -r requirements.in