summaryrefslogtreecommitdiffhomepage
path: root/migrations/versions
diff options
context:
space:
mode:
authormorpheus65535 <[email protected]>2023-07-26 19:34:49 -0400
committerGitHub <[email protected]>2023-07-26 19:34:49 -0400
commitbccded275c3cb09dc001d66858f3200c78723935 (patch)
tree14f4409119ff7ca8af5f4827dca249bc0b25f768 /migrations/versions
parent486d2f9481982fef0ff0a30c314f74e9268cc7fd (diff)
downloadbazarr-bccded275c3cb09dc001d66858f3200c78723935.tar.gz
bazarr-bccded275c3cb09dc001d66858f3200c78723935.zip
Replaced peewee with sqlalchemy as ORM. This is a major change, please report related issues on Discord.v1.2.5-beta.3
Diffstat (limited to 'migrations/versions')
-rw-r--r--migrations/versions/95cd4cf40d7a_.py42
-rw-r--r--migrations/versions/dc09994b7e65_.py271
2 files changed, 313 insertions, 0 deletions
diff --git a/migrations/versions/95cd4cf40d7a_.py b/migrations/versions/95cd4cf40d7a_.py
new file mode 100644
index 000000000..734bbc950
--- /dev/null
+++ b/migrations/versions/95cd4cf40d7a_.py
@@ -0,0 +1,42 @@
+"""empty message
+
+Revision ID: 95cd4cf40d7a
+Revises: dc09994b7e65
+Create Date: 2023-05-30 08:44:11.636511
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '95cd4cf40d7a'
+down_revision = 'dc09994b7e65'
+branch_labels = None
+depends_on = None
+
+bind = op.get_context().bind
+insp = sa.inspect(bind)
+
+
+def column_exists(table_name, column_name):
+ columns = insp.get_columns(table_name)
+ return any(c["name"] == column_name for c in columns)
+
+
+def upgrade():
+ with op.batch_alter_table('table_history') as batch_op:
+ if not column_exists('table_history', 'matched'):
+ batch_op.add_column(sa.Column('matched', sa.Text))
+ if not column_exists('table_history', 'not_matched'):
+ batch_op.add_column(sa.Column('not_matched', sa.Text))
+
+ with op.batch_alter_table('table_history_movie') as batch_op:
+ if not column_exists('table_history_movie', 'matched'):
+ batch_op.add_column(sa.Column('matched', sa.Text))
+ if not column_exists('table_history_movie', 'not_matched'):
+ batch_op.add_column(sa.Column('not_matched', sa.Text))
+
+
+def downgrade():
+ pass
diff --git a/migrations/versions/dc09994b7e65_.py b/migrations/versions/dc09994b7e65_.py
new file mode 100644
index 000000000..d91f9e38c
--- /dev/null
+++ b/migrations/versions/dc09994b7e65_.py
@@ -0,0 +1,271 @@
+"""empty message
+
+Revision ID: dc09994b7e65
+Revises:
+Create Date: 2023-04-12 14:50:25.281288
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+try:
+ from psycopg2.errors import UndefinedObject
+except ImportError:
+ pass
+
+from app.database import TableHistory, TableHistoryMovie, TableBlacklist, TableBlacklistMovie, TableEpisodes, \
+ TableShows, TableMovies, TableLanguagesProfiles
+
+# revision identifiers, used by Alembic.
+revision = 'dc09994b7e65'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+bind = op.get_context().bind
+insp = sa.inspect(bind)
+
+should_recreate = 'always' if bind.engine.name == 'sqlite' else 'auto'
+
+
+def column_exists(table_name, column_name):
+ columns = insp.get_columns(table_name)
+ return any(c["name"] == column_name for c in columns)
+
+
+def constraint_exists(table_name, constraint_name):
+ constraints = insp.get_unique_constraints(table_name)
+ return any(c["name"] == constraint_name for c in constraints)
+
+
+def column_type(table_name, column_name):
+ _type = [x['type'].python_type for x in insp.get_columns(table_name) if x['name'] == column_name]
+ return _type[0] if _type else None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+
+ # Update announcements table
+ with op.batch_alter_table('table_announcements') as batch_op:
+ batch_op.execute('DROP INDEX IF EXISTS tableannouncements_hash')
+ if not column_exists('table_announcements', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+
+ # Update system table
+ with op.batch_alter_table('system', recreate=should_recreate) as batch_op:
+ if not column_exists('system', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+
+ # Update custom_score_profile_conditions table
+ with op.batch_alter_table('table_custom_score_profile_conditions') as batch_op:
+ batch_op.execute('DROP INDEX IF EXISTS tablecustomscoreprofileconditions_profile_id')
+ batch_op.alter_column('profile_id', index=False)
+ batch_op.execute('DROP INDEX IF EXISTS ix_table_custom_score_profile_conditions_profile_id;')
+
+ # Update notifier table
+ with op.batch_alter_table('table_settings_notifier') as batch_op:
+ batch_op.alter_column('name', existing_type=sa.TEXT(), nullable=False)
+
+ # Update series table
+ with op.batch_alter_table('table_shows', recreate=should_recreate) as batch_op:
+ if bind.engine.name == 'postgresql':
+ batch_op.execute('ALTER TABLE table_shows DROP CONSTRAINT IF EXISTS table_shows_pkey;')
+ batch_op.execute(sa.update(TableShows)
+ .values({TableShows.profileId: None})
+ .where(TableShows.profileId.not_in(sa.select(TableLanguagesProfiles.profileId))))
+ batch_op.create_primary_key(constraint_name='pk_table_shows', columns=['sonarrSeriesId'])
+ batch_op.create_foreign_key(constraint_name='fk_series_profileId_languages_profiles',
+ referent_table='table_languages_profiles',
+ local_cols=['profileId'],
+ remote_cols=['profileId'],
+ ondelete='SET NULL')
+ if column_exists(table_name='table_shows', column_name='hearing_impaired'):
+ batch_op.drop_column(column_name='hearing_impaired')
+ if column_exists(table_name='table_shows', column_name='forced'):
+ batch_op.drop_column(column_name='forced')
+ batch_op.alter_column(column_name='imdbId', server_default=None)
+ batch_op.alter_column(column_name='tags', server_default=None)
+ batch_op.alter_column(column_name='seriesType', server_default=None)
+ batch_op.alter_column(column_name='tvdbId', existing_type=sa.INTEGER(), nullable=True)
+ if column_exists('table_shows', 'alternateTitles') and not column_exists('table_shows', 'alternativeTitles'):
+ batch_op.alter_column(column_name='alternateTitles', new_column_name='alternativeTitles')
+ batch_op.execute('DROP INDEX IF EXISTS tableshows_path')
+ batch_op.execute('DROP INDEX IF EXISTS tableshows_profileId')
+ batch_op.execute('DROP INDEX IF EXISTS tableshows_sonarrSeriesId')
+ if not constraint_exists('table_shows', 'unique_table_shows_path'):
+ batch_op.create_unique_constraint(constraint_name='unique_table_shows_path', columns=['path'])
+
+ # Update episodes table
+ with op.batch_alter_table('table_episodes') as batch_op:
+ if bind.engine.name == 'postgresql':
+ batch_op.execute('ALTER TABLE table_episodes DROP CONSTRAINT IF EXISTS table_episodes_pkey;')
+ batch_op.execute(sa.delete(TableEpisodes).where(TableEpisodes.sonarrSeriesId.not_in(
+ sa.select(TableShows.sonarrSeriesId))))
+ batch_op.alter_column(column_name='sonarrSeriesId', existing_type=sa.INTEGER(), nullable=True)
+ batch_op.create_primary_key(constraint_name='pk_table_episodes', columns=['sonarrEpisodeId'])
+ batch_op.create_foreign_key(constraint_name='fk_sonarrSeriesId_episodes',
+ referent_table='table_shows',
+ local_cols=['sonarrSeriesId'],
+ remote_cols=['sonarrSeriesId'],
+ ondelete='CASCADE')
+ batch_op.alter_column(column_name='file_size', server_default='0')
+ if column_exists('table_episodes', 'scene_name'):
+ batch_op.alter_column(column_name='scene_name', new_column_name='sceneName')
+ batch_op.execute('DROP INDEX IF EXISTS tableepisodes_sonarrEpisodeId')
+
+ # Update series history table
+ table_history_timestamp_altered = False
+ with op.batch_alter_table('table_history') as batch_op:
+ batch_op.execute(sa.delete(TableHistory).where(TableHistory.sonarrEpisodeId.not_in(
+ sa.select(TableEpisodes.sonarrEpisodeId))))
+ batch_op.execute(sa.delete(TableHistory).where(TableHistory.sonarrSeriesId.not_in(
+ sa.select(TableShows.sonarrSeriesId))))
+ batch_op.alter_column(column_name='sonarrEpisodeId', existing_type=sa.INTEGER(), nullable=True)
+ batch_op.alter_column(column_name='sonarrSeriesId', existing_type=sa.INTEGER(), nullable=True)
+ batch_op.create_foreign_key(constraint_name='fk_sonarrEpisodeId_history',
+ referent_table='table_episodes',
+ local_cols=['sonarrEpisodeId'],
+ remote_cols=['sonarrEpisodeId'],
+ ondelete='CASCADE')
+ batch_op.create_foreign_key(constraint_name='fk_sonarrSeriesId_history',
+ referent_table='table_shows',
+ local_cols=['sonarrSeriesId'],
+ remote_cols=['sonarrSeriesId'],
+ ondelete='CASCADE')
+ if not column_exists('table_history', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+ if column_type('table_history', 'score') == str:
+ batch_op.alter_column(column_name='score', existing_type=sa.Text, type_=sa.Integer)
+ if column_type('table_history', 'timestamp') == int:
+ table_history_timestamp_altered = True
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.Integer, type_=sa.DateTime)
+ with op.batch_alter_table('table_history') as batch_op:
+ # must be run after alter_column as been committed
+ if table_history_timestamp_altered:
+ batch_op.execute(sa.text("UPDATE table_history SET timestamp = datetime(timestamp, 'unixepoch')"))
+
+ # Update series blacklist table
+ table_blacklist_timestamp_altered = False
+ with op.batch_alter_table('table_blacklist') as batch_op:
+ batch_op.execute(sa.delete(TableBlacklist).where(TableBlacklist.sonarr_episode_id.not_in(
+ sa.select(TableEpisodes.sonarrEpisodeId))))
+ batch_op.execute(sa.delete(TableBlacklist).where(TableBlacklist.sonarr_series_id.not_in(
+ sa.select(TableShows.sonarrSeriesId))))
+ batch_op.create_foreign_key(constraint_name='fk_sonarrEpisodeId_blacklist',
+ referent_table='table_episodes',
+ local_cols=['sonarr_episode_id'],
+ remote_cols=['sonarrEpisodeId'],
+ ondelete='CASCADE')
+ batch_op.create_foreign_key(constraint_name='fk_sonarrSeriesId_blacklist',
+ referent_table='table_shows',
+ local_cols=['sonarr_series_id'],
+ remote_cols=['sonarrSeriesId'],
+ ondelete='CASCADE')
+ if not column_exists('table_blacklist', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+ if column_type('table_blacklist', 'timestamp') == int:
+ table_blacklist_timestamp_altered = True
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.Integer, type_=sa.DateTime, nullable=True)
+ else:
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.DATETIME(), nullable=True)
+ with op.batch_alter_table('table_blacklist') as batch_op:
+ # must be run after alter_column as been committed
+ if table_blacklist_timestamp_altered:
+ batch_op.execute(sa.text("UPDATE table_blacklist SET timestamp = datetime(timestamp, 'unixepoch')"))
+
+ # Update series rootfolder table
+ with op.batch_alter_table('table_shows_rootfolder') as batch_op:
+ if bind.engine.name == 'postgresql':
+ batch_op.execute('ALTER TABLE table_shows_rootfolder DROP CONSTRAINT IF EXISTS '
+ 'table_shows_rootfolder_pkey;')
+ batch_op.alter_column(column_name='id', existing_type=sa.INTEGER(), nullable=False, autoincrement=True)
+ batch_op.create_primary_key(constraint_name='pk_table_shows_rootfolder', columns=['id'])
+
+ # Update movies table
+ with op.batch_alter_table('table_movies', recreate=should_recreate) as batch_op:
+ if bind.engine.name == 'postgresql':
+ batch_op.execute('ALTER TABLE table_movies DROP CONSTRAINT IF EXISTS table_movies_pkey;')
+ batch_op.execute(sa.update(TableMovies)
+ .values({TableMovies.profileId: None})
+ .where(TableMovies.profileId.not_in(sa.select(TableLanguagesProfiles.profileId))))
+ batch_op.create_primary_key(constraint_name='pk_table_movies', columns=['radarrId'])
+ batch_op.create_foreign_key(constraint_name='fk_movies_profileId_languages_profiles',
+ referent_table='table_languages_profiles',
+ local_cols=['profileId'],
+ remote_cols=['profileId'],
+ ondelete='SET NULL')
+ if column_exists(table_name='table_shows', column_name='hearing_impaired'):
+ batch_op.drop_column(column_name='hearing_impaired')
+ if column_exists(table_name='table_shows', column_name='forced'):
+ batch_op.drop_column(column_name='forced')
+ batch_op.alter_column(column_name='file_size', server_default='0')
+ batch_op.alter_column(column_name='tags', server_default=None)
+ batch_op.execute('DROP INDEX IF EXISTS tablemovies_path')
+ batch_op.execute('DROP INDEX IF EXISTS tablemovies_profileId')
+ batch_op.execute('DROP INDEX IF EXISTS tablemovies_radarrId')
+ batch_op.execute('DROP INDEX IF EXISTS tablemovies_tmdbId')
+ if not constraint_exists('table_movies', 'unique_table_movies_path'):
+ batch_op.create_unique_constraint(constraint_name='unique_table_movies_path', columns=['path'])
+ if not constraint_exists('table_movies', 'unique_table_movies_tmdbId'):
+ batch_op.create_unique_constraint(constraint_name='unique_table_movies_tmdbId', columns=['tmdbId'])
+
+ # Update movies history table
+ table_history_movie_timestamp_altered = False
+ with op.batch_alter_table('table_history_movie') as batch_op:
+ batch_op.execute(sa.delete(TableHistoryMovie).where(TableHistoryMovie.radarrId.not_in(
+ sa.select(TableMovies.radarrId))))
+ batch_op.alter_column(column_name='radarrId', existing_type=sa.INTEGER(), nullable=True)
+ batch_op.create_foreign_key(constraint_name='fk_radarrId_history_movie',
+ referent_table='table_movies',
+ local_cols=['radarrId'],
+ remote_cols=['radarrId'],
+ ondelete='CASCADE')
+ if not column_exists('table_history_movie', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+ if column_type('table_history_movie', 'score') == str:
+ batch_op.alter_column(column_name='score', existing_type=sa.Text, type_=sa.Integer)
+ if column_type('table_history_movie', 'timestamp') == int:
+ table_history_movie_timestamp_altered = True
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.Integer, type_=sa.DateTime)
+ with op.batch_alter_table('table_history_movie') as batch_op:
+ # must be run after alter_column as been committed
+ if table_history_movie_timestamp_altered:
+ batch_op.execute(sa.text("UPDATE table_history_movie SET timestamp = datetime(timestamp, 'unixepoch')"))
+
+ # Update movies blacklist table
+ table_blacklist_movie_timestamp_altered = False
+ with op.batch_alter_table('table_blacklist_movie') as batch_op:
+ batch_op.execute(sa.delete(TableBlacklistMovie).where(TableBlacklistMovie.radarr_id.not_in(
+ sa.select(TableMovies.radarrId))))
+ batch_op.create_foreign_key(constraint_name='fk_radarrId_blacklist_movie',
+ referent_table='table_movies',
+ local_cols=['radarr_id'],
+ remote_cols=['radarrId'],
+ ondelete='CASCADE')
+ if not column_exists('table_blacklist_movie', 'id'):
+ batch_op.add_column(sa.Column('id', sa.Integer, primary_key=True))
+ if column_type('table_blacklist_movie', 'timestamp') == int:
+ table_blacklist_movie_timestamp_altered = True
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.Integer, type_=sa.DateTime, nullable=True)
+ else:
+ batch_op.alter_column(column_name='timestamp', existing_type=sa.DATETIME(), nullable=True)
+ with op.batch_alter_table('table_blacklist_movie') as batch_op:
+ # must be run after alter_column as been committed
+ if table_blacklist_movie_timestamp_altered:
+ batch_op.execute(sa.text("UPDATE table_blacklist_movie SET timestamp = datetime(timestamp, 'unixepoch')"))
+
+ # Update movies rootfolder table
+ with op.batch_alter_table('table_movies_rootfolder') as batch_op:
+ if bind.engine.name == 'postgresql':
+ batch_op.execute('ALTER TABLE table_movies_rootfolder DROP CONSTRAINT IF EXISTS '
+ 'table_movies_rootfolder_pkey;')
+ batch_op.alter_column(column_name='id', existing_type=sa.INTEGER(), nullable=False, autoincrement=True)
+ batch_op.create_primary_key(constraint_name='pk_table_movies_rootfolder', columns=['id'])
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ pass
+ # ### end Alembic commands ###