Set up Alembic database migrations
- Initialize Alembic in bloxserver/ - Configure for async SQLAlchemy (converts async URLs to sync for migrations) - Generate initial migration with all existing tables: - users (Clerk sync, Stripe billing) - flows (workflow definitions) - triggers (webhook, schedule, manual, event) - executions (run history) - user_api_keys (BYOK encrypted storage) - usage_records (billing metrics) - stripe_events (webhook idempotency) Usage: cd bloxserver alembic upgrade head # Apply migrations alembic revision --autogenerate -m 'description' # New migration Co-authored-by: Dan
This commit is contained in:
parent
cf49154ddd
commit
a6f44b1fc9
5 changed files with 449 additions and 0 deletions
151
bloxserver/alembic.ini
Normal file
151
bloxserver/alembic.ini
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts.
|
||||
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||
# format, relative to the token %(here)s which refers to the location of this
|
||||
# ini file
|
||||
script_location = %(here)s/alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
# Or organize into date-based subdirectories (requires recursive_version_locations = true)
|
||||
# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory. for multiple paths, the path separator
|
||||
# is defined by "path_separator" below.
|
||||
prepend_sys_path = .
|
||||
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the tzdata library which can be installed by adding
|
||||
# `alembic[tz]` to the pip requirements.
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to <script_location>/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "path_separator"
|
||||
# below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||
|
||||
# path_separator; This indicates what character is used to split lists of file
|
||||
# paths, including version_locations and prepend_sys_path within configparser
|
||||
# files such as alembic.ini.
|
||||
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||
# to provide os-dependent path splitting.
|
||||
#
|
||||
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||
# take place if path_separator is not present in alembic.ini. If this
|
||||
# option is omitted entirely, fallback logic is as follows:
|
||||
#
|
||||
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||
# behavior of splitting on spaces and/or commas.
|
||||
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||
# behavior of splitting on spaces, commas, or colons.
|
||||
#
|
||||
# Valid values for path_separator are:
|
||||
#
|
||||
# path_separator = :
|
||||
# path_separator = ;
|
||||
# path_separator = space
|
||||
# path_separator = newline
|
||||
#
|
||||
# Use os.pathsep. Default configuration used for new projects.
|
||||
path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# database URL. This is consumed by the user-maintained env.py script only.
|
||||
# other means of configuring database URLs may be customized within the env.py
|
||||
# file.
|
||||
# Database URL is loaded from DATABASE_URL environment variable in env.py
|
||||
# This placeholder is not used
|
||||
sqlalchemy.url = postgresql://localhost/bloxserver
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||
# hooks = ruff
|
||||
# ruff.type = module
|
||||
# ruff.module = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration. This is also consumed by the user-maintained
|
||||
# env.py script only.
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
1
bloxserver/alembic/README
Normal file
1
bloxserver/alembic/README
Normal file
|
|
@ -0,0 +1 @@
|
|||
Generic single-database configuration.
|
||||
108
bloxserver/alembic/env.py
Normal file
108
bloxserver/alembic/env.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
"""
|
||||
Alembic migration environment.
|
||||
|
||||
Configured for async SQLAlchemy with PostgreSQL/SQLite.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Add parent directory to path so 'bloxserver' package is importable
|
||||
# bloxserver/ is at xml-pipeline/bloxserver/, so we need xml-pipeline/ on the path
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
||||
|
||||
# Import models to register them with Base.metadata
|
||||
from bloxserver.api.models.database import Base
|
||||
from bloxserver.api.models import tables # noqa: F401 - imports register models
|
||||
|
||||
# Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Setup logging from alembic.ini
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Target metadata for autogenerate
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def get_url() -> str:
|
||||
"""
|
||||
Get database URL from environment.
|
||||
|
||||
Converts async URLs (postgresql+asyncpg://) to sync (postgresql://)
|
||||
because Alembic runs migrations synchronously.
|
||||
"""
|
||||
url = os.getenv(
|
||||
"DATABASE_URL",
|
||||
"sqlite:///./bloxserver.db",
|
||||
)
|
||||
|
||||
# Convert async driver to sync for Alembic
|
||||
if url.startswith("postgresql+asyncpg://"):
|
||||
url = url.replace("postgresql+asyncpg://", "postgresql://", 1)
|
||||
elif url.startswith("sqlite+aiosqlite://"):
|
||||
url = url.replace("sqlite+aiosqlite://", "sqlite://", 1)
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""
|
||||
Run migrations in 'offline' mode.
|
||||
|
||||
Generates SQL script without connecting to database.
|
||||
Useful for reviewing migrations before applying.
|
||||
"""
|
||||
url = get_url()
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""
|
||||
Run migrations in 'online' mode.
|
||||
|
||||
Connects to database and applies migrations directly.
|
||||
"""
|
||||
configuration = config.get_section(config.config_ini_section, {})
|
||||
configuration["sqlalchemy.url"] = get_url()
|
||||
|
||||
connectable = engine_from_config(
|
||||
configuration,
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
bloxserver/alembic/script.py.mako
Normal file
28
bloxserver/alembic/script.py.mako
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
161
bloxserver/alembic/versions/7136cc209524_initial_schema.py
Normal file
161
bloxserver/alembic/versions/7136cc209524_initial_schema.py
Normal file
|
|
@ -0,0 +1,161 @@
|
|||
"""initial schema
|
||||
|
||||
Revision ID: 7136cc209524
|
||||
Revises:
|
||||
Create Date: 2026-01-26 07:21:39.594527
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '7136cc209524'
|
||||
down_revision: Union[str, Sequence[str], None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('stripe_events',
|
||||
sa.Column('event_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('event_type', sa.String(length=100), nullable=False),
|
||||
sa.Column('processed_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
|
||||
sa.Column('payload', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('event_id')
|
||||
)
|
||||
op.create_index('idx_stripe_events_processed', 'stripe_events', ['processed_at'], unique=False)
|
||||
op.create_table('users',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('clerk_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('email', sa.String(length=255), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True),
|
||||
sa.Column('avatar_url', sa.Text(), nullable=True),
|
||||
sa.Column('stripe_customer_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('stripe_subscription_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('stripe_subscription_item_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('tier', sa.Enum('FREE', 'PRO', 'ENTERPRISE', 'HIGH_FREQUENCY', name='tier'), nullable=False),
|
||||
sa.Column('billing_status', sa.Enum('ACTIVE', 'TRIALING', 'PAST_DUE', 'CANCELED', 'CANCELING', name='billingstatus'), nullable=False),
|
||||
sa.Column('trial_ends_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('current_period_start', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('current_period_end', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('clerk_id'),
|
||||
sa.UniqueConstraint('stripe_customer_id')
|
||||
)
|
||||
op.create_index('idx_users_clerk_id', 'users', ['clerk_id'], unique=False)
|
||||
op.create_index('idx_users_stripe_customer', 'users', ['stripe_customer_id'], unique=False)
|
||||
op.create_table('flows',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('name', sa.String(length=100), nullable=False),
|
||||
sa.Column('description', sa.String(length=500), nullable=True),
|
||||
sa.Column('organism_yaml', sa.Text(), nullable=False),
|
||||
sa.Column('canvas_state', sa.JSON(), nullable=True),
|
||||
sa.Column('status', sa.Enum('STOPPED', 'STARTING', 'RUNNING', 'STOPPING', 'ERROR', name='flowstatus'), nullable=False),
|
||||
sa.Column('container_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_flows_status', 'flows', ['status'], unique=False)
|
||||
op.create_index('idx_flows_user_id', 'flows', ['user_id'], unique=False)
|
||||
op.create_table('usage_records',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('period_start', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('workflow_runs', sa.Integer(), nullable=False),
|
||||
sa.Column('llm_tokens_in', sa.Integer(), nullable=False),
|
||||
sa.Column('llm_tokens_out', sa.Integer(), nullable=False),
|
||||
sa.Column('wasm_cpu_seconds', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('storage_gb_hours', sa.Numeric(precision=10, scale=2), nullable=False),
|
||||
sa.Column('last_synced_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('last_synced_runs', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_usage_user_period', 'usage_records', ['user_id', 'period_start'], unique=True)
|
||||
op.create_table('user_api_keys',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('user_id', sa.UUID(), nullable=False),
|
||||
sa.Column('provider', sa.String(length=50), nullable=False),
|
||||
sa.Column('encrypted_key', sa.LargeBinary(), nullable=False),
|
||||
sa.Column('key_hint', sa.String(length=20), nullable=True),
|
||||
sa.Column('is_valid', sa.Boolean(), nullable=False),
|
||||
sa.Column('last_error', sa.String(length=255), nullable=True),
|
||||
sa.Column('last_used_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_user_api_keys_user_provider', 'user_api_keys', ['user_id', 'provider'], unique=True)
|
||||
op.create_table('triggers',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('flow_id', sa.UUID(), nullable=False),
|
||||
sa.Column('type', sa.Enum('WEBHOOK', 'SCHEDULE', 'MANUAL', name='triggertype'), nullable=False),
|
||||
sa.Column('name', sa.String(length=100), nullable=False),
|
||||
sa.Column('config', sa.JSON(), nullable=False),
|
||||
sa.Column('webhook_token', sa.String(length=64), nullable=True),
|
||||
sa.Column('webhook_url', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['flow_id'], ['flows.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('webhook_token')
|
||||
)
|
||||
op.create_index('idx_triggers_flow_id', 'triggers', ['flow_id'], unique=False)
|
||||
op.create_index('idx_triggers_webhook_token', 'triggers', ['webhook_token'], unique=False)
|
||||
op.create_table('executions',
|
||||
sa.Column('id', sa.UUID(), nullable=False),
|
||||
sa.Column('flow_id', sa.UUID(), nullable=False),
|
||||
sa.Column('trigger_id', sa.UUID(), nullable=True),
|
||||
sa.Column('trigger_type', sa.Enum('WEBHOOK', 'SCHEDULE', 'MANUAL', name='triggertype'), nullable=False),
|
||||
sa.Column('status', sa.Enum('RUNNING', 'SUCCESS', 'ERROR', 'TIMEOUT', name='executionstatus'), nullable=False),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('input_payload', sa.Text(), nullable=True),
|
||||
sa.Column('output_payload', sa.Text(), nullable=True),
|
||||
sa.Column('started_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
|
||||
sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('duration_ms', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['flow_id'], ['flows.id'], ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['trigger_id'], ['triggers.id'], ondelete='SET NULL'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('idx_executions_flow_id', 'executions', ['flow_id'], unique=False)
|
||||
op.create_index('idx_executions_started_at', 'executions', ['started_at'], unique=False)
|
||||
op.create_index('idx_executions_status', 'executions', ['status'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('idx_executions_status', table_name='executions')
|
||||
op.drop_index('idx_executions_started_at', table_name='executions')
|
||||
op.drop_index('idx_executions_flow_id', table_name='executions')
|
||||
op.drop_table('executions')
|
||||
op.drop_index('idx_triggers_webhook_token', table_name='triggers')
|
||||
op.drop_index('idx_triggers_flow_id', table_name='triggers')
|
||||
op.drop_table('triggers')
|
||||
op.drop_index('idx_user_api_keys_user_provider', table_name='user_api_keys')
|
||||
op.drop_table('user_api_keys')
|
||||
op.drop_index('idx_usage_user_period', table_name='usage_records')
|
||||
op.drop_table('usage_records')
|
||||
op.drop_index('idx_flows_user_id', table_name='flows')
|
||||
op.drop_index('idx_flows_status', table_name='flows')
|
||||
op.drop_table('flows')
|
||||
op.drop_index('idx_users_stripe_customer', table_name='users')
|
||||
op.drop_index('idx_users_clerk_id', table_name='users')
|
||||
op.drop_table('users')
|
||||
op.drop_index('idx_stripe_events_processed', table_name='stripe_events')
|
||||
op.drop_table('stripe_events')
|
||||
# ### end Alembic commands ###
|
||||
Loading…
Reference in a new issue