summaryrefslogtreecommitdiffhomepage
path: root/migrations
diff options
context:
space:
mode:
authormorpheus65535 <[email protected]>2023-07-26 19:34:49 -0400
committerGitHub <[email protected]>2023-07-26 19:34:49 -0400
commitbccded275c3cb09dc001d66858f3200c78723935 (patch)
tree14f4409119ff7ca8af5f4827dca249bc0b25f768 /migrations
parent486d2f9481982fef0ff0a30c314f74e9268cc7fd (diff)
downloadbazarr-bccded275c3cb09dc001d66858f3200c78723935.tar.gz
bazarr-bccded275c3cb09dc001d66858f3200c78723935.zip
Replaced peewee with sqlalchemy as ORM. This is a major change, please report related issues on Discord.v1.2.5-beta.3
Diffstat (limited to 'migrations')
-rw-r--r--migrations/alembic.ini9
-rw-r--r--migrations/env.py104
-rw-r--r--migrations/script.py.mako24
-rw-r--r--migrations/versions/95cd4cf40d7a_.py42
-rw-r--r--migrations/versions/dc09994b7e65_.py271
5 files changed, 450 insertions, 0 deletions
diff --git a/migrations/alembic.ini b/migrations/alembic.ini
new file mode 100644
index 000000000..5f8ee17e0
--- /dev/null
+++ b/migrations/alembic.ini
@@ -0,0 +1,9 @@
+# A generic, single database configuration.
+
+[alembic]
+# template used to generate migration files
+# file_template = %%(rev)s_%%(slug)s
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
diff --git a/migrations/env.py b/migrations/env.py
new file mode 100644
index 000000000..d706218a2
--- /dev/null
+++ b/migrations/env.py
@@ -0,0 +1,104 @@
+from flask import current_app
+
+from alembic import context
+
+import logging
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+
+def get_engine():
+ try:
+ # this works with Flask-SQLAlchemy<3 and Alchemical
+ return current_app.extensions['migrate'].db.get_engine()
+ except TypeError:
+ # this works with Flask-SQLAlchemy>=3
+ return current_app.extensions['migrate'].db.engine
+
+
+def get_engine_url():
+ try:
+ return get_engine().url.render_as_string(hide_password=False).replace(
+ '%', '%%')
+ except AttributeError:
+ return str(get_engine().url).replace('%', '%%')
+
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+config.set_main_option('sqlalchemy.url', get_engine_url())
+target_db = current_app.extensions['migrate'].db
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def get_metadata():
+ if hasattr(target_db, 'metadatas'):
+ return target_db.metadatas[None]
+ return target_db.metadata
+
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url, target_metadata=get_metadata(), literal_binds=True
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+
+ # this callback is used to prevent an auto-migration from being generated
+ # when there are no changes to the schema
+ # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
+ def process_revision_directives(context, revision, directives):
+ if getattr(config.cmd_opts, 'autogenerate', False):
+ script = directives[0]
+ if script.upgrade_ops.is_empty():
+ directives[:] = []
+ logging.info('No changes in schema detected.')
+
+ connectable = get_engine()
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection,
+ target_metadata=get_metadata(),
+ process_revision_directives=process_revision_directives,
+ **current_app.extensions['migrate'].configure_args
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/migrations/script.py.mako b/migrations/script.py.mako
new file mode 100644
index 000000000..2c0156303
--- /dev/null
+++ b/migrations/script.py.mako
@@ -0,0 +1,24 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"}
diff --git a/migrations/versions/95cd4cf40d7a_.py b/migrations/versions/95cd4cf40d7a_.py
new file mode 100644
index 000000000..734bbc950
--- /dev/null
+++ b/migrations/versions/95cd4cf40d7a_.py
@@ -0,0 +1,42 @@
+"""empty message
+
+Revision ID: 95cd4cf40d7a
+Revises: dc09994b7e65
+Create Date: 2023-05-30 08:44:11.636511
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '95cd4cf40d7a'
+down_revision = 'dc09994b7e65'
+branch_labels = None
+depends_on = None
+
+bind = op.get_context().bind
+insp = sa.inspect(bind)
+
+
+def column_exists(table_name, column_name):
+ columns = insp.get_columns(table_name)
+ return any(c["name"] == column_name for c in columns)
+
+
+def upgrade():
+ with op.batch_alter_table('table_history') as batch_op:
+ if not column_exists('table_history', 'matched'):
+ batch_op.add_column(sa.Column('matched', sa.Text))
+ if not column_exists('table_history', 'not_matched'):
+ batch_op.add_column(sa.Column('not_matched', sa.Text))
+
+ with op.batch_alter_table('table_history_movie') as batch_op:
+ if not column_exists('table_history_movie', 'matched'):
+ batch_op.add_column(sa.Column('matched', sa.Text))
+ if not column_exists('table_history_movie', 'not_matched'):
+ batch_op.add_column(sa.Column('not_matched', sa.Text))
+
+
+def downgrade():
+ pass
diff --git a/migrations/versions/dc09994b7e65_.py b/migrations/versions/dc09994b7e65_.py
new file mode 100644
index 000000000..d91f9e38c
--- /dev/null
+++ b/migrations/versions/dc09994b7e65_.py
@@ -0,0 +1,271 @@
+"""empty message
+
+Revision ID: dc09994b7e65
+Revises:
+Create Date: 2023-04-12 14:50:25.281288
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+try:
+ from psycopg2.errors import UndefinedObject
+except ImportError:
+ pass
+
+from app.database import TableHistory, TableHistoryMovie, TableBlacklist, TableBlacklistMovie, TableEpisodes, \
+ TableShows, TableMovies, TableLanguagesProfiles
+
+# revision identifiers, used by Alembic.
+revision = 'dc09994b7e65'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+bind = op.get_context().bind
+insp = sa.inspect(bind)
+
+should_recreate = 'always' if bind.engine.name == 'sqlite' else 'auto'
+
+
+def column_exists(table_name, column_name):
+ columns = insp.get_columns(table_name)
+ return any(c["name"] == column_name for c in columns)
+
+
+def constraint_exists(table_name, constraint_name):
+ constraints = insp.get_unique_constraints(table_name)
+ return any(c["name"] == constraint_name for c in constraints)
+
+
+def column_type(table_name, column_name):
+ _type = [x['type'].python_type for x in insp.get_columns(table_name) if x['name'] == column_name]
+ return _type[0] if _type else None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+
+ # Update announcements table
+ with op.batch_alter_table('table_announcements') as batch_op:
+ batch_op.execute('DROP INDEX IF EXISTS tableannouncements_hash')
+ if not column_exists('table_announcements', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+
+ # Update system table
+ with op.batch_alter_table('system', recreate=should_recreate) as batch_op:
+ if not column_exists('system', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+
+ # Update custom_score_profile_conditions table
+ with op.batch_alter_table('table_custom_score_profile_conditions') as batch_op:
+ batch_op.execute('DROP INDEX IF EXISTS tablecustomscoreprofileconditions_profile_id')
+ batch_op.alter_column('profile_id', index=False)
+ batch_op.execute('DROP INDEX IF EXISTS ix_table_custom_score_profile_conditions_profile_id;')
+
+ # Update notifier table
+ with op.batch_alter_table('table_settings_notifier') as batch_op:
+ batch_op.alter_column('name', existing_type=sa.TEXT(), nullable=False)
+
+ # Update series table
+ with op.batch_alter_table('table_shows', recreate=should_recreate) as batch_op:
+ if bind.engine.name == 'postgresql':
+ batch_op.execute('ALTER TABLE table_shows DROP CONSTRAINT IF EXISTS table_shows_pkey;')
+ batch_op.execute(sa.update(TableShows)
+ .values({TableShows.profileId: None})
+ .where(TableShows.profileId.not_in(sa.select(TableLanguagesProfiles.profileId))))
+ batch_op.create_primary_key(constraint_name='pk_table_shows', columns=['sonarrSeriesId'])
+ batch_op.create_foreign_key(constraint_name='fk_series_profileId_languages_profiles',
+ referent_table='table_languages_profiles',
+ local_cols=['profileId'],
+ remote_cols=['profileId'],
+ ondelete='SET NULL')
+ if column_exists(table_name='table_shows', column_name='hearing_impaired'):
+ batch_op.drop_column(column_name='hearing_impaired')
+ if column_exists(table_name='table_shows', column_name='forced'):
+ batch_op.drop_column(column_name='forced')
+ batch_op.alter_column(column_name='imdbId', server_default=None)
+ batch_op.alter_column(column_name='tags', server_default=None)
+ batch_op.alter_column(column_name='seriesType', server_default=None)
+ batch_op.alter_column(column_name='tvdbId', existing_type=sa.INTEGER(), nullable=True)
+ if column_exists('table_shows', 'alternateTitles') and not column_exists('table_shows', 'alternativeTitles'):
+ batch_op.alter_column(column_name='alternateTitles', new_column_name='alternativeTitles')
+ batch_op.execute('DROP INDEX IF EXISTS tableshows_path')
+ batch_op.execute('DROP INDEX IF EXISTS tableshows_profileId')
+ batch_op.execute('DROP INDEX IF EXISTS tableshows_sonarrSeriesId')
+ if not constraint_exists('table_shows', 'unique_table_shows_path'):
+ batch_op.create_unique_constraint(constraint_name='unique_table_shows_path', columns=['path'])
+
+ # Update episodes table
+ with op.batch_alter_table('table_episodes') as batch_op:
+ if bind.engine.name == 'postgresql':
+ batch_op.execute('ALTER TABLE table_episodes DROP CONSTRAINT IF EXISTS table_episodes_pkey;')
+ batch_op.execute(sa.delete(TableEpisodes).where(TableEpisodes.sonarrSeriesId.not_in(
+ sa.select(TableShows.sonarrSeriesId))))
+ batch_op.alter_column(column_name='sonarrSeriesId', existing_type=sa.INTEGER(), nullable=True)
+ batch_op.create_primary_key(constraint_name='pk_table_episodes', columns=['sonarrEpisodeId'])
+ batch_op.create_foreign_key(constraint_name='fk_sonarrSeriesId_episodes',
+ referent_table='table_shows',
+ local_cols=['sonarrSeriesId'],
+ remote_cols=['sonarrSeriesId'],
+ ondelete='CASCADE')
+ batch_op.alter_column(column_name='file_size', server_default='0')
+ if column_exists('table_episodes', 'scene_name'):
+ batch_op.alter_column(column_name='scene_name', new_column_name='sceneName')
+ batch_op.execute('DROP INDEX IF EXISTS tableepisodes_sonarrEpisodeId')
+
+ # Update series history table
+ table_history_timestamp_altered = False
+ with op.batch_alter_table('table_history') as batch_op:
+ batch_op.execute(sa.delete(TableHistory).where(TableHistory.sonarrEpisodeId.not_in(
+ sa.select(TableEpisodes.sonarrEpisodeId))))
+ batch_op.execute(sa.delete(TableHistory).where(TableHistory.sonarrSeriesId.not_in(
+ sa.select(TableShows.sonarrSeriesId))))
+ batch_op.alter_column(column_name='sonarrEpisodeId', existing_type=sa.INTEGER(), nullable=True)
+ batch_op.alter_column(column_name='sonarrSeriesId', existing_type=sa.INTEGER(), nullable=True)
+ batch_op.create_foreign_key(constraint_name='fk_sonarrEpisodeId_history',
+ referent_table='table_episodes',
+ local_cols=['sonarrEpisodeId'],
+ remote_cols=['sonarrEpisodeId'],
+ ondelete='CASCADE')
+ batch_op.create_foreign_key(constraint_name='fk_sonarrSeriesId_history',
+ referent_table='table_shows',
+ local_cols=['sonarrSeriesId'],
+ remote_cols=['sonarrSeriesId'],
+ ondelete='CASCADE')
+ if not column_exists('table_history', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+ if column_type('table_history', 'score') == str:
+ batch_op.alter_column(column_name='score', existing_type=sa.Text, type_=sa.Integer)
+ if column_type('table_history', 'timestamp') == int:
+ table_history_timestamp_altered = True
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.Integer, type_=sa.DateTime)
+ with op.batch_alter_table('table_history') as batch_op:
+ # must be run after alter_column as been committed
+ if table_history_timestamp_altered:
+ batch_op.execute(sa.text("UPDATE table_history SET timestamp = datetime(timestamp, 'unixepoch')"))
+
+ # Update series blacklist table
+ table_blacklist_timestamp_altered = False
+ with op.batch_alter_table('table_blacklist') as batch_op:
+ batch_op.execute(sa.delete(TableBlacklist).where(TableBlacklist.sonarr_episode_id.not_in(
+ sa.select(TableEpisodes.sonarrEpisodeId))))
+ batch_op.execute(sa.delete(TableBlacklist).where(TableBlacklist.sonarr_series_id.not_in(
+ sa.select(TableShows.sonarrSeriesId))))
+ batch_op.create_foreign_key(constraint_name='fk_sonarrEpisodeId_blacklist',
+ referent_table='table_episodes',
+ local_cols=['sonarr_episode_id'],
+ remote_cols=['sonarrEpisodeId'],
+ ondelete='CASCADE')
+ batch_op.create_foreign_key(constraint_name='fk_sonarrSeriesId_blacklist',
+ referent_table='table_shows',
+ local_cols=['sonarr_series_id'],
+ remote_cols=['sonarrSeriesId'],
+ ondelete='CASCADE')
+ if not column_exists('table_blacklist', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+ if column_type('table_blacklist', 'timestamp') == int:
+ table_blacklist_timestamp_altered = True
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.Integer, type_=sa.DateTime, nullable=True)
+ else:
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.DATETIME(), nullable=True)
+ with op.batch_alter_table('table_blacklist') as batch_op:
+ # must be run after alter_column as been committed
+ if table_blacklist_timestamp_altered:
+ batch_op.execute(sa.text("UPDATE table_blacklist SET timestamp = datetime(timestamp, 'unixepoch')"))
+
+ # Update series rootfolder table
+ with op.batch_alter_table('table_shows_rootfolder') as batch_op:
+ if bind.engine.name == 'postgresql':
+ batch_op.execute('ALTER TABLE table_shows_rootfolder DROP CONSTRAINT IF EXISTS '
+ 'table_shows_rootfolder_pkey;')
+ batch_op.alter_column(column_name='id', existing_type=sa.INTEGER(), nullable=False, autoincrement=True)
+ batch_op.create_primary_key(constraint_name='pk_table_shows_rootfolder', columns=['id'])
+
+ # Update movies table
+ with op.batch_alter_table('table_movies', recreate=should_recreate) as batch_op:
+ if bind.engine.name == 'postgresql':
+ batch_op.execute('ALTER TABLE table_movies DROP CONSTRAINT IF EXISTS table_movies_pkey;')
+ batch_op.execute(sa.update(TableMovies)
+ .values({TableMovies.profileId: None})
+ .where(TableMovies.profileId.not_in(sa.select(TableLanguagesProfiles.profileId))))
+ batch_op.create_primary_key(constraint_name='pk_table_movies', columns=['radarrId'])
+ batch_op.create_foreign_key(constraint_name='fk_movies_profileId_languages_profiles',
+ referent_table='table_languages_profiles',
+ local_cols=['profileId'],
+ remote_cols=['profileId'],
+ ondelete='SET NULL')
+ if column_exists(table_name='table_shows', column_name='hearing_impaired'):
+ batch_op.drop_column(column_name='hearing_impaired')
+ if column_exists(table_name='table_shows', column_name='forced'):
+ batch_op.drop_column(column_name='forced')
+ batch_op.alter_column(column_name='file_size', server_default='0')
+ batch_op.alter_column(column_name='tags', server_default=None)
+ batch_op.execute('DROP INDEX IF EXISTS tablemovies_path')
+ batch_op.execute('DROP INDEX IF EXISTS tablemovies_profileId')
+ batch_op.execute('DROP INDEX IF EXISTS tablemovies_radarrId')
+ batch_op.execute('DROP INDEX IF EXISTS tablemovies_tmdbId')
+ if not constraint_exists('table_movies', 'unique_table_movies_path'):
+ batch_op.create_unique_constraint(constraint_name='unique_table_movies_path', columns=['path'])
+ if not constraint_exists('table_movies', 'unique_table_movies_tmdbId'):
+ batch_op.create_unique_constraint(constraint_name='unique_table_movies_tmdbId', columns=['tmdbId'])
+
+ # Update movies history table
+ table_history_movie_timestamp_altered = False
+ with op.batch_alter_table('table_history_movie') as batch_op:
+ batch_op.execute(sa.delete(TableHistoryMovie).where(TableHistoryMovie.radarrId.not_in(
+ sa.select(TableMovies.radarrId))))
+ batch_op.alter_column(column_name='radarrId', existing_type=sa.INTEGER(), nullable=True)
+ batch_op.create_foreign_key(constraint_name='fk_radarrId_history_movie',
+ referent_table='table_movies',
+ local_cols=['radarrId'],
+ remote_cols=['radarrId'],
+ ondelete='CASCADE')
+ if not column_exists('table_history_movie', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+ if column_type('table_history_movie', 'score') == str:
+ batch_op.alter_column(column_name='score', existing_type=sa.Text, type_=sa.Integer)
+ if column_type('table_history_movie', 'timestamp') == int:
+ table_history_movie_timestamp_altered = True
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.Integer, type_=sa.DateTime)
+ with op.batch_alter_table('table_history_movie') as batch_op:
+ # must be run after alter_column as been committed
+ if table_history_movie_timestamp_altered:
+ batch_op.execute(sa.text("UPDATE table_history_movie SET timestamp = datetime(timestamp, 'unixepoch')"))
+
+ # Update movies blacklist table
+ table_blacklist_movie_timestamp_altered = False
+ with op.batch_alter_table('table_blacklist_movie') as batch_op:
+ batch_op.execute(sa.delete(TableBlacklistMovie).where(TableBlacklistMovie.radarr_id.not_in(
+ sa.select(TableMovies.radarrId))))
+ batch_op.create_foreign_key(constraint_name='fk_radarrId_blacklist_movie',
+ referent_table='table_movies',
+ local_cols=['radarr_id'],
+ remote_cols=['radarrId'],
+ ondelete='CASCADE')
+ if not column_exists('table_blacklist_movie', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+ if column_type('table_blacklist_movie', 'timestamp') == int:
+ table_blacklist_movie_timestamp_altered = True
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.Integer, type_=sa.DateTime, nullable=True)
+ else:
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.DATETIME(), nullable=True)
+ with op.batch_alter_table('table_blacklist_movie') as batch_op:
+ # must be run after alter_column as been committed
+ if table_blacklist_movie_timestamp_altered:
+ batch_op.execute(sa.text("UPDATE table_blacklist_movie SET timestamp = datetime(timestamp, 'unixepoch')"))
+
+ # Update movies rootfolder table
+ with op.batch_alter_table('table_movies_rootfolder') as batch_op:
+ if bind.engine.name == 'postgresql':
+ batch_op.execute('ALTER TABLE table_movies_rootfolder DROP CONSTRAINT IF EXISTS '
+ 'table_movies_rootfolder_pkey;')
+ batch_op.alter_column(column_name='id', existing_type=sa.INTEGER(), nullable=False, autoincrement=True)
+ batch_op.create_primary_key(constraint_name='pk_table_movies_rootfolder', columns=['id'])
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ pass
+ # ### end Alembic commands ###