add sqlmodel and alembic

This commit is contained in:
Michael Herman
2021-09-10 05:42:17 -05:00
parent 21e4a80ee9
commit 448d4723cf
11 changed files with 352 additions and 2 deletions

View File

@ -10,7 +10,7 @@ services:
ports:
- 8004:8000
environment:
- DATABASE_URL=postgresql://postgres:postgres@db:5432/foo
- DATABASE_URL=postgresql+asyncpg://postgres:postgres@db:5432/foo
depends_on:
- db

100
project/alembic.ini Normal file
View File

@ -0,0 +1,100 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator"
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. Valid values are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # default: use os.pathsep
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = postgresql+asyncpg://postgres:postgres@db:5432/foo
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

25
project/app/db.py Normal file
View File

@ -0,0 +1,25 @@
import os
from sqlmodel import SQLModel
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
DATABASE_URL = os.environ.get("DATABASE_URL")
engine = create_async_engine(DATABASE_URL, echo=True, future=True)
async def init_db():
async with engine.begin() as conn:
# await conn.run_sync(SQLModel.metadata.drop_all)
await conn.run_sync(SQLModel.metadata.create_all)
async def get_session() -> AsyncSession:
async_session = sessionmaker(
engine, class_=AsyncSession, expire_on_commit=False
)
async with async_session() as session:
yield session

View File

@ -1,4 +1,9 @@
from fastapi import FastAPI
from fastapi import Depends, FastAPI
from sqlalchemy.future import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import get_session, init_db
from app.models import Song, SongCreate
app = FastAPI()
@ -6,3 +11,19 @@ app = FastAPI()
@app.get("/ping")
async def pong():
return {"ping": "pong!"}
@app.get("/songs", response_model=list[Song])
async def get_songs(session: AsyncSession = Depends(get_session)):
result = await session.execute(select(Song))
songs = result.scalars().all()
return [Song(name=song.name, artist=song.artist, year=song.year, id=song.id) for song in songs]
@app.post("/songs")
async def add_song(song: SongCreate, session: AsyncSession = Depends(get_session)):
song = Song(name=song.name, artist=song.artist, year=song.year)
session.add(song)
await session.commit()
await session.refresh(song)
return song

17
project/app/models.py Normal file
View File

@ -0,0 +1,17 @@
from typing import Optional
from sqlmodel import SQLModel, Field
class SongBase(SQLModel):
name: str
artist: str
year: Optional[int] = None
class Song(SongBase, table=True):
id: int = Field(default=None, primary_key=True)
class SongCreate(SongBase):
pass

View File

@ -0,0 +1 @@
Generic single-database configuration with an async dbapi.

87
project/migrations/env.py Normal file
View File

@ -0,0 +1,87 @@
import asyncio
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from sqlalchemy.ext.asyncio import AsyncEngine
from sqlmodel import SQLModel
from alembic import context
from app.models import Song
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = SQLModel.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = AsyncEngine(
engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
future=True,
)
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
if context.is_offline_mode():
run_migrations_offline()
else:
asyncio.run(run_migrations_online())

View File

@ -0,0 +1,25 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,31 @@
"""add year
Revision ID: 53754b2c08a4
Revises: f9c634db477d
Create Date: 2021-09-10 00:52:38.668620
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '53754b2c08a4'
down_revision = 'f9c634db477d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('song', sa.Column('year', sa.Integer(), nullable=True))
op.create_index(op.f('ix_song_year'), 'song', ['year'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_song_year'), table_name='song')
op.drop_column('song', 'year')
# ### end Alembic commands ###

View File

@ -0,0 +1,40 @@
"""init
Revision ID: f9c634db477d
Revises:
Create Date: 2021-09-10 00:24:32.718895
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = 'f9c634db477d'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('song',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('artist', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_song_artist'), 'song', ['artist'], unique=False)
op.create_index(op.f('ix_song_id'), 'song', ['id'], unique=False)
op.create_index(op.f('ix_song_name'), 'song', ['name'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_song_name'), table_name='song')
op.drop_index(op.f('ix_song_id'), table_name='song')
op.drop_index(op.f('ix_song_artist'), table_name='song')
op.drop_table('song')
# ### end Alembic commands ###

View File

@ -1,2 +1,5 @@
alembic==1.7.1
asyncpg==0.24.0
fastapi==0.68.1
sqlmodel==0.0.4
uvicorn==0.15.0