Skip to content

Commit

Permalink
How PR #3174 compares to initial Alembic config
Browse files Browse the repository at this point in the history
  • Loading branch information
portante committed Jan 17, 2023
1 parent 717b3ea commit f0ebd38
Show file tree
Hide file tree
Showing 4 changed files with 211 additions and 33 deletions.
2 changes: 1 addition & 1 deletion lib/pbench/server/database/alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
# are written from script.py.mako
# output_encoding = utf-8

sqlalchemy.url = driver://user:pass@localhost/dbname
sqlalchemy.url = postgresql://pbench:pbench@localhost:5432/pbench


[post_write_hooks]
Expand Down
21 changes: 20 additions & 1 deletion lib/pbench/server/database/alembic/README
Original file line number Diff line number Diff line change
@@ -1 +1,20 @@
Generic single-database configuration.
Generic single-database configuration.

Some useful commands to run migrations:

Migration commit files are stored in alembic/versions folder.

To create a db migration file
# alembic revision — autogenerate -m “First commit”

Using the above command alembic generates our first migration commit file in versions folder.
File names are usually stored as revision_id_<commit_message>.py

Once this file is generated we are ready for database migration.
# alembic upgrade head

To upgrade to a specific revision
# alembic upgrade <revision_id_>

To downgrade to a specific revision
# alembic downgrade <revision_id_>
52 changes: 21 additions & 31 deletions lib/pbench/server/database/alembic/env.py
Original file line number Diff line number Diff line change
@@ -1,42 +1,35 @@
from logging.config import fileConfig

from sqlalchemy import engine_from_config
from sqlalchemy import pool

from alembic import context
from sqlalchemy import engine_from_config, pool

from pbench.server.database.database import Database

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
# This is the Alembic Config object, which provides access to the values within
# the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Interpret the config file for Python logging and setup the loggers.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# Add your model's MetaData object here for 'autogenerate' support:
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Database.Base.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
# Other values from the config, defined by the needs of env.py, can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
This configures the context with just a URL and not an Engine, though an
Engine is acceptable here as well. By skipping the Engine creation we don't
even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
Expand All @@ -53,9 +46,8 @@ def run_migrations_offline() -> None:
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
In this scenario we need to create an Engine and associate a connection with
the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
Expand All @@ -64,9 +56,7 @@ def run_migrations_online() -> None:
)

with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
context.configure(connection=connection, target_metadata=target_metadata)

with context.begin_transaction():
context.run_migrations()
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
"""Initial database schema
Revision ID: fa12f45a2a5a
Revises:
Create Date: 2023-01-16 18:33:29.144835
Since we are adding Alembic migrations after we have already been using our
database in various contexts, this "Initial database schema" migration describes
how to bring an empty database up to the state of the database as of commit
6a764f154. That commit was the latest working version of the Pbench Server
deployed in Red Hat's staging environment.
"""
from alembic import op
import sqlalchemy as sa

from pbench.server.database.models import TZDateTime

# revision identifiers, used by Alembic.
revision = "fa12f45a2a5a"
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"audit",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("root_id", sa.Integer(), nullable=True),
sa.Column("name", sa.String(length=128), nullable=True),
sa.Column(
"operation",
sa.Enum("CREATE", "READ", "UPDATE", "DELETE", name="operationcode"),
nullable=False,
),
sa.Column(
"object_type",
sa.Enum("DATASET", "CONFIG", "NONE", "TEMPLATE", "TOKEN", name="audittype"),
nullable=True,
),
sa.Column("object_id", sa.String(length=128), nullable=True),
sa.Column("object_name", sa.String(length=256), nullable=True),
sa.Column("user_id", sa.String(length=128), nullable=True),
sa.Column("user_name", sa.String(length=256), nullable=True),
sa.Column(
"status",
sa.Enum("BEGIN", "SUCCESS", "FAILURE", "WARNING", name="auditstatus"),
nullable=False,
),
sa.Column(
"reason",
sa.Enum("PERMISSION", "INTERNAL", "CONSISTENCY", name="auditreason"),
nullable=True,
),
sa.Column("attributes", sa.JSON(), nullable=True),
sa.Column("timestamp", TZDateTime(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"datasets",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("name", sa.String(length=1024), nullable=False),
sa.Column("owner_id", sa.String(length=255), nullable=False),
sa.Column("access", sa.String(length=255), nullable=False),
sa.Column("resource_id", sa.String(length=255), nullable=False),
sa.Column("uploaded", TZDateTime(), nullable=False),
sa.Column("created", TZDateTime(), nullable=True),
sa.Column(
"state",
sa.Enum(
"UPLOADING",
"UPLOADED",
"INDEXING",
"INDEXED",
"DELETING",
"DELETED",
name="states",
),
nullable=False,
),
sa.Column("transition", TZDateTime(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("resource_id"),
)
op.create_table(
"serverconfig",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("key", sa.String(length=255), nullable=False),
sa.Column("value", sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_serverconfig_key"), "serverconfig", ["key"], unique=True)
op.create_table(
"templates",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("idxname", sa.String(length=255), nullable=False),
sa.Column("template_name", sa.String(length=255), nullable=False),
sa.Column("file", sa.String(length=255), nullable=False),
sa.Column("mtime", sa.DateTime(), nullable=False),
sa.Column("template_pattern", sa.String(length=255), nullable=False),
sa.Column("index_template", sa.String(length=225), nullable=False),
sa.Column("settings", sa.JSON(), nullable=False),
sa.Column("mappings", sa.JSON(), nullable=False),
sa.Column("version", sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("idxname"),
sa.UniqueConstraint("name"),
sa.UniqueConstraint("template_name"),
)
op.create_table(
"users",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("username", sa.String(length=255), nullable=False),
sa.Column("first_name", sa.String(length=255), nullable=False),
sa.Column("last_name", sa.String(length=255), nullable=False),
sa.Column("password", sa.LargeBinary(length=128), nullable=False),
sa.Column("registered_on", sa.DateTime(), nullable=False),
sa.Column("email", sa.String(length=255), nullable=False),
sa.Column("role", sa.Enum("ADMIN", name="roles"), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("email"),
sa.UniqueConstraint("username"),
)
op.create_table(
"active_tokens",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("token", sa.String(length=500), nullable=False),
sa.Column("created", sa.DateTime(), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_active_tokens_token"), "active_tokens", ["token"], unique=True
)
op.create_table(
"dataset_metadata",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("key", sa.String(length=255), nullable=False),
sa.Column("value", sa.JSON(), nullable=True),
sa.Column("dataset_ref", sa.Integer(), nullable=False),
sa.Column("user_id", sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(
["dataset_ref"],
["datasets.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_dataset_metadata_key"), "dataset_metadata", ["key"], unique=False
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_dataset_metadata_key"), table_name="dataset_metadata")
op.drop_table("dataset_metadata")
op.drop_index(op.f("ix_active_tokens_token"), table_name="active_tokens")
op.drop_table("active_tokens")
op.drop_table("users")
op.drop_table("templates")
op.drop_index(op.f("ix_serverconfig_key"), table_name="serverconfig")
op.drop_table("serverconfig")
op.drop_table("datasets")
op.drop_table("audit")
# ### end Alembic commands ###

0 comments on commit f0ebd38

Please sign in to comment.