Skip to content

Commit

Permalink
How PR distributed-system-analysis#3174 compares to initial Alembic c…
Browse files Browse the repository at this point in the history
…onfig
  • Loading branch information
portante committed Jan 17, 2023
1 parent 717b3ea commit 495ac72
Show file tree
Hide file tree
Showing 5 changed files with 220 additions and 47 deletions.
17 changes: 6 additions & 11 deletions lib/pbench/server/database/alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,8 @@
# path to migration scripts
script_location = alembic

# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
Expand Down Expand Up @@ -38,24 +35,22 @@ prepend_sys_path = .
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# The path separator used here should be the separator specified by "version_path_separator"
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions

# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
# version_locations. Valid values are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
version_path_separator = os # default: use os.pathsep

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8

sqlalchemy.url = driver://user:pass@localhost/dbname
sqlalchemy.url = postgresql://pbench:pbench@localhost:5432/pbench


[post_write_hooks]
Expand Down
21 changes: 20 additions & 1 deletion lib/pbench/server/database/alembic/README
Original file line number Diff line number Diff line change
@@ -1 +1,20 @@
Generic single-database configuration.
Generic single-database configuration.

Some useful commands to run migrations:

Migration commit files are stored in alembic/versions folder.

To create a db migration file
# alembic revision — autogenerate -m “First commit”

Using the above command alembic generates our first migration commit file in versions folder.
File names are usually stored as revision_id_<commit_message>.py

Once this file is generated we are ready for database migration.
# alembic upgrade head

To upgrade to a specific revision
# alembic upgrade <revision_id_>

To downgrade to a specific revision
# alembic downgrade <revision_id_>
56 changes: 23 additions & 33 deletions lib/pbench/server/database/alembic/env.py
Original file line number Diff line number Diff line change
@@ -1,42 +1,35 @@
from logging.config import fileConfig

from sqlalchemy import engine_from_config
from sqlalchemy import pool

from alembic import context
from sqlalchemy import engine_from_config, pool

from pbench.server.database.database import Database

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
# This is the Alembic Config object, which provides access to the values within
# the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Interpret the config file for Python logging and setup the loggers.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# Add your model's MetaData object here for 'autogenerate' support:
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Database.Base.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
# Other values from the config, defined by the needs of env.py, can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline() -> None:
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
This configures the context with just a URL and not an Engine, though an
Engine is acceptable here as well. By skipping the Engine creation we don't
even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
Expand All @@ -50,12 +43,11 @@ def run_migrations_offline() -> None:
context.run_migrations()


def run_migrations_online() -> None:
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
In this scenario we need to create an Engine and associate a connection with
the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
Expand All @@ -64,9 +56,7 @@ def run_migrations_online() -> None:
)

with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
context.configure(connection=connection, target_metadata=target_metadata)

with context.begin_transaction():
context.run_migrations()
Expand Down
4 changes: 2 additions & 2 deletions lib/pbench/server/database/alembic/script.py.mako
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade() -> None:
def upgrade():
${upgrades if upgrades else "pass"}


def downgrade() -> None:
def downgrade():
${downgrades if downgrades else "pass"}
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
"""Initial database schema
Revision ID: fa12f45a2a5a
Revises:
Create Date: 2023-01-16 18:33:29.144835
Since we are adding Alembic migrations after we have already been using our
database in various contexts, this "Initial database schema" migration describes
how to bring an empty database up to the state of the database as of commit
6a764f154. That commit was the latest working version of the Pbench Server
deployed in Red Hat's staging environment.
"""
from alembic import op
import sqlalchemy as sa

from pbench.server.database.models import TZDateTime

# revision identifiers, used by Alembic.
revision = "fa12f45a2a5a"
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"audit",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("root_id", sa.Integer(), nullable=True),
sa.Column("name", sa.String(length=128), nullable=True),
sa.Column(
"operation",
sa.Enum("CREATE", "READ", "UPDATE", "DELETE", name="operationcode"),
nullable=False,
),
sa.Column(
"object_type",
sa.Enum("DATASET", "CONFIG", "NONE", "TEMPLATE", "TOKEN", name="audittype"),
nullable=True,
),
sa.Column("object_id", sa.String(length=128), nullable=True),
sa.Column("object_name", sa.String(length=256), nullable=True),
sa.Column("user_id", sa.String(length=128), nullable=True),
sa.Column("user_name", sa.String(length=256), nullable=True),
sa.Column(
"status",
sa.Enum("BEGIN", "SUCCESS", "FAILURE", "WARNING", name="auditstatus"),
nullable=False,
),
sa.Column(
"reason",
sa.Enum("PERMISSION", "INTERNAL", "CONSISTENCY", name="auditreason"),
nullable=True,
),
sa.Column("attributes", sa.JSON(), nullable=True),
sa.Column("timestamp", TZDateTime(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"datasets",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("name", sa.String(length=1024), nullable=False),
sa.Column("owner_id", sa.String(length=255), nullable=False),
sa.Column("access", sa.String(length=255), nullable=False),
sa.Column("resource_id", sa.String(length=255), nullable=False),
sa.Column("uploaded", TZDateTime(), nullable=False),
sa.Column("created", TZDateTime(), nullable=True),
sa.Column(
"state",
sa.Enum(
"UPLOADING",
"UPLOADED",
"INDEXING",
"INDEXED",
"DELETING",
"DELETED",
name="states",
),
nullable=False,
),
sa.Column("transition", TZDateTime(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("resource_id"),
)
op.create_table(
"serverconfig",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("key", sa.String(length=255), nullable=False),
sa.Column("value", sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_serverconfig_key"), "serverconfig", ["key"], unique=True)
op.create_table(
"templates",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("idxname", sa.String(length=255), nullable=False),
sa.Column("template_name", sa.String(length=255), nullable=False),
sa.Column("file", sa.String(length=255), nullable=False),
sa.Column("mtime", sa.DateTime(), nullable=False),
sa.Column("template_pattern", sa.String(length=255), nullable=False),
sa.Column("index_template", sa.String(length=225), nullable=False),
sa.Column("settings", sa.JSON(), nullable=False),
sa.Column("mappings", sa.JSON(), nullable=False),
sa.Column("version", sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("idxname"),
sa.UniqueConstraint("name"),
sa.UniqueConstraint("template_name"),
)
op.create_table(
"users",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("username", sa.String(length=255), nullable=False),
sa.Column("first_name", sa.String(length=255), nullable=False),
sa.Column("last_name", sa.String(length=255), nullable=False),
sa.Column("password", sa.LargeBinary(length=128), nullable=False),
sa.Column("registered_on", sa.DateTime(), nullable=False),
sa.Column("email", sa.String(length=255), nullable=False),
sa.Column("role", sa.Enum("ADMIN", name="roles"), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("email"),
sa.UniqueConstraint("username"),
)
op.create_table(
"active_tokens",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("token", sa.String(length=500), nullable=False),
sa.Column("created", sa.DateTime(), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_active_tokens_token"), "active_tokens", ["token"], unique=True
)
op.create_table(
"dataset_metadata",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("key", sa.String(length=255), nullable=False),
sa.Column("value", sa.JSON(), nullable=True),
sa.Column("dataset_ref", sa.Integer(), nullable=False),
sa.Column("user_id", sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(
["dataset_ref"],
["datasets.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_dataset_metadata_key"), "dataset_metadata", ["key"], unique=False
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_dataset_metadata_key"), table_name="dataset_metadata")
op.drop_table("dataset_metadata")
op.drop_index(op.f("ix_active_tokens_token"), table_name="active_tokens")
op.drop_table("active_tokens")
op.drop_table("users")
op.drop_table("templates")
op.drop_index(op.f("ix_serverconfig_key"), table_name="serverconfig")
op.drop_table("serverconfig")
op.drop_table("datasets")
op.drop_table("audit")
# ### end Alembic commands ###

0 comments on commit 495ac72

Please sign in to comment.