alembic: Fix downgrade and tweak for sqlite
authorGeorge Joseph <george.joseph@fairview5.com>
Wed, 2 Mar 2016 02:03:04 +0000 (19:03 -0700)
committerGeorge Joseph <george.joseph@fairview5.com>
Wed, 2 Mar 2016 22:42:01 +0000 (15:42 -0700)
Downgrade had a few issues.  First there was an errant 'update' statement in
add_auto_dtmf_mode that looks like it was a copy/paste error.  Second, we
weren't cleaning up the ENUMs so subsequent upgrades on postgres failed
because the types already existed.

For sqlite...  sqlite doesn't support ALTER or DROP COLUMN directly.
Fortunately alembic batch_operations takes care of this for us if we
use it so the alter and drops were converted to use batch operations.

Here's an example downgrade:

    with op.batch_alter_table('ps_endpoints') as batch_op:
        batch_op.drop_column('tos_audio')
        batch_op.drop_column('tos_video')
        batch_op.add_column(sa.Column('tos_audio', yesno_values))
        batch_op.add_column(sa.Column('tos_video', yesno_values))
        batch_op.drop_column('cos_audio')
        batch_op.drop_column('cos_video')
        batch_op.add_column(sa.Column('cos_audio', yesno_values))
        batch_op.add_column(sa.Column('cos_video', yesno_values))

    with op.batch_alter_table('ps_transports') as batch_op:
        batch_op.drop_column('tos')
        batch_op.add_column(sa.Column('tos', yesno_values))
    # Can't cast integers to YESNO_VALUES, so dropping and adding is required
        batch_op.drop_column('cos')
        batch_op.add_column(sa.Column('cos', yesno_values))

Upgrades from base to head and downgrades from head to base were tested
repeatedly for postgresql, mysql/mariadb, and sqlite3.

Change-Id: I862b0739eb3fd45ec3412dcc13c2340e1b7baef8

35 files changed:
contrib/ast-db-manage/config/env.py
contrib/ast-db-manage/config/versions/10aedae86a32_add_outgoing_enum_va.py
contrib/ast-db-manage/config/versions/136885b81223_add_regcontext_to_pj.py
contrib/ast-db-manage/config/versions/154177371065_add_default_from_user.py
contrib/ast-db-manage/config/versions/15b1430ad6f1_add_moh_passthrough_option_to_pjsip.py
contrib/ast-db-manage/config/versions/1758e8bbf6b_increase_useragent_column_size.py
contrib/ast-db-manage/config/versions/189a235b3fd7_add_keep_alive_interval.py
contrib/ast-db-manage/config/versions/1d50859ed02e_create_accountcode.py
contrib/ast-db-manage/config/versions/21e526ad3040_add_pjsip_debug_option.py
contrib/ast-db-manage/config/versions/23530d604b96_add_rpid_immediate.py
contrib/ast-db-manage/config/versions/26d7f3bf0fa5_add_bind_rtp_to_media_address_to_pjsip.py
contrib/ast-db-manage/config/versions/26f10cadc157_add_pjsip_timeout_options.py
contrib/ast-db-manage/config/versions/28b8e71e541f_add_g726_non_standard.py
contrib/ast-db-manage/config/versions/28ce1e718f05_add_fatal_response_interval.py
contrib/ast-db-manage/config/versions/2d078ec071b7_increaes_contact_column_size.py
contrib/ast-db-manage/config/versions/2fc7930b41b3_add_pjsip_endpoint_options_for_12_1.py
contrib/ast-db-manage/config/versions/31cd4f4891ec_add_auto_dtmf_mode.py
contrib/ast-db-manage/config/versions/339a3bdf53fc_expand_accountcode_to_80.py
contrib/ast-db-manage/config/versions/371a3bf4143e_add_user_eq_phone_option_to_pjsip.py
contrib/ast-db-manage/config/versions/3855ee4e5f85_add_missing_pjsip_options.py
contrib/ast-db-manage/config/versions/3bcc0b5bc2c9_add_allow_reload_to_ps_transports.py
contrib/ast-db-manage/config/versions/423f34ad36e2_fix_pjsip_qualify_ti.py
contrib/ast-db-manage/config/versions/43956d550a44_add_tables_for_pjsip.py
contrib/ast-db-manage/config/versions/45e3f47c6c44_add_pjsip_endpoint_identifier_order.py
contrib/ast-db-manage/config/versions/461d7d691209_add_pjsip_qualify_timeout.py
contrib/ast-db-manage/config/versions/498357a710ae_add_rtp_keepalive.py
contrib/ast-db-manage/config/versions/4c573e7135bd_fix_tos_field_types.py
contrib/ast-db-manage/config/versions/4da0c5f79a9c_create_tables.py
contrib/ast-db-manage/config/versions/5139253c0423_make_q_member_uniqueid_autoinc.py
contrib/ast-db-manage/config/versions/51f8cb66540e_add_further_dtls_options.py
contrib/ast-db-manage/config/versions/5950038a6ead_fix_pjsip_verifiy_typo.py
contrib/ast-db-manage/config/versions/945b1098bdd_add_media_encryption_optimistic_to_pjsip.py
contrib/ast-db-manage/config/versions/a541e0b5e89_add_pjsip_max_initial_qualify_time.py
contrib/ast-db-manage/config/versions/dbc44d5a908_add_missing_columns_to_sys_and_reg.py
contrib/ast-db-manage/config/versions/e96a0b8071c_increase_pjsip_column_size.py

index 6740d59..4118da0 100755 (executable)
@@ -58,7 +58,8 @@ def run_migrations_online():
     connection = engine.connect()
     context.configure(
                 connection=connection,
-                target_metadata=target_metadata
+                target_metadata=target_metadata,
+                render_as_batch=True
                 )
 
     try:
index b4ea71c..cc9f0e0 100755 (executable)
@@ -45,7 +45,10 @@ def upgrade():
     context = op.get_context()
 
     # Upgrading to this revision WILL clear your directmedia values.
-    if context.bind.dialect.name != 'postgresql':
+    if context.bind.dialect.name == 'sqlite':
+        with op.batch_alter_table('sippeers') as batch_op:
+            batch_op.alter_column('directmedia', type_=new_type)
+    elif context.bind.dialect.name != 'postgresql':
         op.alter_column('sippeers', 'directmedia',
                         type_=new_type,
                         existing_type=old_type)
@@ -66,7 +69,10 @@ def downgrade():
     op.execute(tcr.update().where(tcr.c.directmedia==u'outgoing')
                .values(directmedia=None))
 
-    if context.bind.dialect.name != 'postgresql':
+    if context.bind.dialect.name == 'sqlite':
+        with op.batch_alter_table('sippeers') as batch_op:
+            batch_op.alter_column('directmedia', type_=old_type)
+    elif context.bind.dialect.name != 'postgresql':
         op.alter_column('sippeers', 'directmedia',
                         type_=old_type,
                         existing_type=new_type)
index 724a5e5..22fd6c7 100644 (file)
@@ -17,4 +17,5 @@ def upgrade():
     op.add_column('ps_globals', sa.Column('regcontext', sa.String(80)))
 
 def downgrade():
-    op.drop_column('ps_globals', 'regcontext')
+    with op.batch_alter_table('ps_globals') as batch_op:
+        batch_op.drop_column('regcontext')
index 7e6cf99..6c5f808 100644 (file)
@@ -19,4 +19,5 @@ def upgrade():
 
 
 def downgrade():
-    op.drop_column('ps_globals', 'default_from_user')
+    with op.batch_alter_table('ps_globals') as batch_op:
+        batch_op.drop_column('default_from_user')
index 54f08ff..725238c 100644 (file)
@@ -27,4 +27,5 @@ def upgrade():
     op.add_column('ps_endpoints', sa.Column('moh_passthrough', yesno_values))
 
 def downgrade():
-    op.drop_column('ps_endpoints', 'moh_passthrough')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('moh_passthrough')
index 215726f..c16cff9 100755 (executable)
@@ -33,9 +33,11 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.alter_column('sippeers', 'useragent', type_=sa.String(255))
+    with op.batch_alter_table('sippeers') as batch_op:
+        batch_op.alter_column('useragent', type_=sa.String(255))
 
 
 def downgrade():
-    op.alter_column('sippeers', 'useragent', type_=sa.String(20))
+    with op.batch_alter_table('sippeers') as batch_op:
+        batch_op.alter_column('useragent', type_=sa.String(20))
 
index ba972b6..dee384d 100644 (file)
@@ -19,4 +19,5 @@ def upgrade():
 
 
 def downgrade():
-    op.drop_column('ps_globals', 'keep_alive_interval')
+    with op.batch_alter_table('ps_globals') as batch_op:
+        batch_op.drop_column('keep_alive_interval')
index eb20001..4d520fc 100644 (file)
@@ -17,4 +17,5 @@ def upgrade():
     op.add_column('ps_endpoints', sa.Column('accountcode', sa.String(20)))
 
 def downgrade():
-    op.drop_column('ps_endpoints', 'accountcode')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('accountcode')
index 2adca62..8b77eb7 100755 (executable)
@@ -18,4 +18,5 @@ def upgrade():
     op.add_column('ps_globals', sa.Column('debug', sa.String(40)))
 
 def downgrade():
-    op.drop_column('ps_globals', 'debug')
+    with op.batch_alter_table('ps_globals') as batch_op:
+        batch_op.drop_column('debug')
index dc0c01c..8ca63f1 100755 (executable)
@@ -45,4 +45,5 @@ def upgrade():
     op.add_column('ps_endpoints', sa.Column('rpid_immediate', yesno_values))
 
 def downgrade():
-    op.drop_column('ps_endpoints', 'rpid_immediate')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('rpid_immediate')
index e7c11da..1199d0c 100644 (file)
@@ -28,4 +28,5 @@ def upgrade():
 
 
 def downgrade():
-    op.drop_column('ps_endpoints', 'bind_rtp_to_media_address')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('bind_rtp_to_media_address')
index 8972d80..2a792d3 100644 (file)
@@ -20,5 +20,6 @@ def upgrade():
 
 
 def downgrade():
-    op.drop_column('ps_endpoints', 'rtp_timeout')
-    op.drop_column('ps_endpoints', 'rtp_timeout_hold')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('rtp_timeout')
+        batch_op.drop_column('rtp_timeout_hold')
index ad36bd9..09056d6 100644 (file)
@@ -27,4 +27,5 @@ def upgrade():
 
 
 def downgrade():
-    op.drop_column('ps_endpoints', 'g726_non_standard')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('g726_non_standard')
index 8c499ae..8e05a62 100644 (file)
@@ -19,4 +19,5 @@ def upgrade():
 
 
 def downgrade():
-    op.drop_column('ps_registrations', 'fatal_retry_interval')
+    with op.batch_alter_table('ps_registrations') as batch_op:
+        batch_op.drop_column('fatal_retry_interval')
index 2ade86f..9f98750 100644 (file)
@@ -15,8 +15,10 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.alter_column('ps_aors', 'contact', type_=sa.String(255))
+    with op.batch_alter_table('ps_aors') as batch_op:
+        batch_op.alter_column('contact', type_=sa.String(255))
 
 
 def downgrade():
-    op.alter_column('ps_aors', 'contact', type_=sa.String(40))
+    with op.batch_alter_table('ps_aors') as batch_op:
+        batch_op.alter_column('contact', type_=sa.String(40))
index 564897e..102265e 100755 (executable)
@@ -120,15 +120,15 @@ def upgrade():
     op.create_index('ps_registrations_id', 'ps_registrations', ['id'])
 
     ########################## add columns ###########################
-
+    with op.batch_alter_table('ps_endpoints') as batch_op:
     # new columns for endpoints
-    op.add_column('ps_endpoints', sa.Column('media_address', sa.String(40)))
-    op.add_column('ps_endpoints', sa.Column('redirect_method',
+        batch_op.add_column(sa.Column('media_address', sa.String(40)))
+        batch_op.add_column(sa.Column('redirect_method',
                                             pjsip_redirect_method_values))
-    op.add_column('ps_endpoints', sa.Column('set_var', sa.Text()))
+        batch_op.add_column(sa.Column('set_var', sa.Text()))
 
     # rename mwi_fromuser to mwi_from_user
-    op.alter_column('ps_endpoints', 'mwi_fromuser',
+        batch_op.alter_column('mwi_fromuser',
                     new_column_name='mwi_from_user',
                     existing_type=sa.String(40))
 
@@ -144,20 +144,23 @@ def upgrade():
 def downgrade():
     ########################## drop columns ##########################
 
-    op.drop_column('ps_aors', 'support_path')
-    op.drop_column('ps_aors', 'outbound_proxy')
-    op.drop_column('ps_aors', 'maximum_expiration')
+    with op.batch_alter_table('ps_aors') as batch_op:
+        batch_op.drop_column('support_path')
+        batch_op.drop_column('outbound_proxy')
+        batch_op.drop_column('maximum_expiration')
 
-    op.drop_column('ps_contacts', 'path')
-    op.drop_column('ps_contacts', 'outbound_proxy')
+    with op.batch_alter_table('ps_contacts') as batch_op:
+        batch_op.drop_column('path')
+        batch_op.drop_column('outbound_proxy')
 
-    op.alter_column('ps_endpoints', 'mwi_from_user',
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.alter_column('mwi_from_user',
                     new_column_name='mwi_fromuser',
                     existing_type=sa.String(40))
 
-    op.drop_column('ps_endpoints', 'set_var')
-    op.drop_column('ps_endpoints', 'redirect_method')
-    op.drop_column('ps_endpoints', 'media_address')
+        batch_op.drop_column('set_var')
+        batch_op.drop_column('redirect_method')
+        batch_op.drop_column('media_address')
 
     ########################## drop tables ###########################
 
index 0d84390..b1a9f8b 100644 (file)
@@ -20,14 +20,14 @@ NEW_ENUM = ['rfc4733', 'inband', 'info', 'auto']
 old_type = sa.Enum(*OLD_ENUM, name='pjsip_dtmf_mode_values')
 new_type = sa.Enum(*NEW_ENUM, name='pjsip_dtmf_mode_values_v2')
 
-tcr = sa.sql.table('ps_endpoints', sa.Column('dtmf_mode', new_type,
-                   nullable=True))
-
 def upgrade():
     context = op.get_context()
 
     # Upgrading to this revision WILL clear your directmedia values.
-    if context.bind.dialect.name != 'postgresql':
+    if context.bind.dialect.name == 'sqlite':
+        with op.batch_alter_table('ps_endpoints') as batch_op:
+            batch_op.alter_column('dtmf_mode', type_=new_type)
+    elif context.bind.dialect.name != 'postgresql':
         op.alter_column('ps_endpoints', 'dtmf_mode',
                         type_=new_type,
                         existing_type=old_type)
@@ -45,10 +45,10 @@ def upgrade():
 def downgrade():
     context = op.get_context()
 
-    op.execute(tcr.update().where(tcr.c.directmedia==u'outgoing')
-               .values(directmedia=None))
-
-    if context.bind.dialect.name != 'postgresql':
+    if context.bind.dialect.name == 'sqlite':
+        with op.batch_alter_table('ps_endpoints') as batch_op:
+            batch_op.alter_column('dtmf_mode', type_=old_type)
+    elif context.bind.dialect.name != 'postgresql':
         op.alter_column('ps_endpoints', 'dtmf_mode',
                         type_=old_type,
                         existing_type=new_type)
index 31889c6..4891152 100644 (file)
@@ -15,14 +15,20 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.alter_column('ps_endpoints', 'accountcode', type_=sa.String(80))
-    op.alter_column('sippeers', 'accountcode', type_=sa.String(80))
-    op.alter_column('iaxfriends', 'accountcode', type_=sa.String(80))
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.alter_column('accountcode', type_=sa.String(80))
+    with op.batch_alter_table('sippeers') as batch_op:
+        batch_op.alter_column('accountcode', type_=sa.String(80))
+    with op.batch_alter_table('iaxfriends') as batch_op:
+        batch_op.alter_column('accountcode', type_=sa.String(80))
     pass
 
 
 def downgrade():
-    op.alter_column('ps_endpoints', 'accountcode', type_=sa.String(20))
-    op.alter_column('sippeers', 'accountcode', type_=sa.String(40))
-    op.alter_column('iaxfriends', 'accountcode', type_=sa.String(20))
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.alter_column('accountcode', type_=sa.String(20))
+    with op.batch_alter_table('sippeers') as batch_op:
+        batch_op.alter_column('accountcode', type_=sa.String(40))
+    with op.batch_alter_table('iaxfriends') as batch_op:
+        batch_op.alter_column('accountcode', type_=sa.String(20))
     pass
index 145d6be..5de7aa8 100644 (file)
@@ -27,4 +27,5 @@ def upgrade():
     op.add_column('ps_endpoints', sa.Column('user_eq_phone', yesno_values))
 
 def downgrade():
-    op.drop_column('ps_endpoints', 'user_eq_phone')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('user_eq_phone')
index afc1beb..08457a9 100644 (file)
@@ -20,5 +20,7 @@ def upgrade():
 
 
 def downgrade():
-    op.drop_column('ps_contacts', 'user_agent')
-    op.drop_column('ps_endpoints', 'message_context')
+    with op.batch_alter_table('ps_contacts') as batch_op:
+        batch_op.drop_column('user_agent')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('message_context')
index 377179b..7f2c579 100644 (file)
@@ -20,8 +20,7 @@ YESNO_VALUES = ['yes', 'no']
 def upgrade():
     yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
     op.add_column('ps_transports', sa.Column('allow_reload', yesno_values))
-    pass
 
 def downgrade():
-    op.drop_column('ps_transports', 'allow_reload')
-    pass
+    with op.batch_alter_table('ps_transports') as batch_op:
+        batch_op.drop_column('allow_reload')
index cda0b9a..563b98d 100644 (file)
@@ -15,12 +15,13 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.alter_column('ps_aors', 'qualify_timeout', type_=sa.Float)
-    op.alter_column('ps_contacts', 'qualify_timeout', type_=sa.Float)
-    pass
-
+    with op.batch_alter_table('ps_aors') as batch_op:
+        batch_op.alter_column('qualify_timeout', type_=sa.Float)
+    with op.batch_alter_table('ps_contacts') as batch_op:
+        batch_op.alter_column('qualify_timeout', type_=sa.Float)
 
 def downgrade():
-    op.alter_column('ps_aors', 'qualify_timeout', type_=sa.Integer)
-    op.alter_column('ps_contacts', 'qualify_timeout', type_=sa.Integer)
-    pass
+    with op.batch_alter_table('ps_aors') as batch_op:
+        batch_op.alter_column('qualify_timeout', type_=sa.Integer)
+    with op.batch_alter_table('ps_contacts') as batch_op:
+        batch_op.alter_column('qualify_timeout', type_=sa.Integer)
index 0c4d9c8..140fe5b 100755 (executable)
@@ -12,6 +12,7 @@ down_revision = '4da0c5f79a9c'
 
 from alembic import op
 import sqlalchemy as sa
+from sqlalchemy.dialects.postgresql import ENUM
 
 
 YESNO_VALUES = ['yes', 'no']
@@ -181,9 +182,21 @@ def upgrade():
 
 
 def downgrade():
+    context = op.get_context()
+
     op.drop_table('ps_endpoints')
     op.drop_table('ps_auths')
     op.drop_table('ps_aors')
     op.drop_table('ps_contacts')
     op.drop_table('ps_domain_aliases')
     op.drop_table('ps_endpoint_id_ips')
+
+    enums = ['yesno_values',
+             'pjsip_100rel_values','pjsip_auth_type_values','pjsip_cid_privacy_values',
+             'pjsip_connected_line_method_values','pjsip_direct_media_glare_mitigation_values',
+             'pjsip_dtls_setup_values','pjsip_dtmf_mode_values','pjsip_identify_by_values',
+             'pjsip_media_encryption_values','pjsip_t38udptl_ec_values','pjsip_timer_values']
+
+    if context.bind.dialect.name == 'postgresql':
+        for e in enums:
+            ENUM(name=e).drop(op.get_bind(), checkfirst=False)
index b7d9924..b09acf7 100644 (file)
@@ -18,4 +18,5 @@ def upgrade():
     op.add_column('ps_globals', sa.Column('endpoint_identifier_order', sa.String(40)))
 
 def downgrade():
-    op.drop_column('ps_globals', 'endpoint_identifier_order')
+    with op.batch_alter_table('ps_globals') as batch_op:
+        batch_op.drop_column('endpoint_identifier_order')
index 9600c04..ec8a904 100644 (file)
@@ -16,10 +16,9 @@ import sqlalchemy as sa
 def upgrade():
     op.add_column('ps_aors', sa.Column('qualify_timeout', sa.Integer))
     op.add_column('ps_contacts', sa.Column('qualify_timeout', sa.Integer))
-    pass
-
 
 def downgrade():
-    op.drop_column('ps_aors', 'qualify_timeout')
-    op.drop_column('ps_contacts', 'qualify_timeout')
-    pass
+    with op.batch_alter_table('ps_aors') as batch_op:
+        batch_op.drop_column('qualify_timeout')
+    with op.batch_alter_table('ps_contacts') as batch_op:
+        batch_op.drop_column('qualify_timeout')
index 5a4f470..3ad2650 100644 (file)
@@ -19,4 +19,5 @@ def upgrade():
 
 
 def downgrade():
-    op.drop_column('ps_endpoints', 'rtp_keepalive')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('rtp_keepalive')
index aefddd1..d9bbf89 100755 (executable)
@@ -19,43 +19,43 @@ YESNO_NAME = 'yesno_values'
 YESNO_VALUES = ['yes', 'no']
 
 def upgrade():
-    op.alter_column('ps_endpoints', 'tos_audio',
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.alter_column('tos_audio',
                     type_=sa.String(10))
-    op.alter_column('ps_endpoints', 'tos_video',
+        batch_op.alter_column('tos_video',
                     type_=sa.String(10))
-    op.alter_column('ps_transports', 'tos',
+        batch_op.drop_column('cos_audio')
+        batch_op.drop_column('cos_video')
+        batch_op.add_column(sa.Column('cos_audio', sa.Integer))
+        batch_op.add_column(sa.Column('cos_video', sa.Integer))
+
+    with op.batch_alter_table('ps_transports') as batch_op:
+        batch_op.alter_column('tos',
                     type_=sa.String(10))
 
     # Can't cast YENO_VALUES to Integers, so dropping and adding is required
-    op.drop_column('ps_endpoints', 'cos_audio')
-    op.drop_column('ps_endpoints', 'cos_video')
-    op.drop_column('ps_transports', 'cos')
-
-    op.add_column('ps_endpoints', sa.Column('cos_audio', sa.Integer))
-    op.add_column('ps_endpoints', sa.Column('cos_video', sa.Integer))
-    op.add_column('ps_transports', sa.Column('cos', sa.Integer))
-    pass
+        batch_op.drop_column('cos')
 
+        batch_op.add_column(sa.Column('cos', sa.Integer))
 
 def downgrade():
 
     yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
 
     # Can't cast string to YESNO_VALUES, so dropping and adding is required
-    op.drop_column('ps_endpoints', 'tos_audio')
-    op.drop_column('ps_endpoints', 'tos_video')
-    op.drop_column('ps_transports', 'tos')
-
-    op.add_column('ps_endpoints', sa.Column('tos_audio', yesno_values))
-    op.add_column('ps_endpoints', sa.Column('tos_video', yesno_values))
-    op.add_column('ps_transports', sa.Column('tos', yesno_values))
-
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('tos_audio')
+        batch_op.drop_column('tos_video')
+        batch_op.add_column(sa.Column('tos_audio', yesno_values))
+        batch_op.add_column(sa.Column('tos_video', yesno_values))
+        batch_op.drop_column('cos_audio')
+        batch_op.drop_column('cos_video')
+        batch_op.add_column(sa.Column('cos_audio', yesno_values))
+        batch_op.add_column(sa.Column('cos_video', yesno_values))
+
+    with op.batch_alter_table('ps_transports') as batch_op:
+        batch_op.drop_column('tos')
+        batch_op.add_column(sa.Column('tos', yesno_values))
     # Can't cast integers to YESNO_VALUES, so dropping and adding is required
-    op.drop_column('ps_endpoints', 'cos_audio')
-    op.drop_column('ps_endpoints', 'cos_video')
-    op.drop_column('ps_transports', 'cos')
-
-    op.add_column('ps_endpoints', sa.Column('cos_audio', yesno_values))
-    op.add_column('ps_endpoints', sa.Column('cos_video', yesno_values))
-    op.add_column('ps_transports', sa.Column('cos', yesno_values))
-    pass
+        batch_op.drop_column('cos')
+        batch_op.add_column(sa.Column('cos', yesno_values))
index ffaff92..01c40ac 100755 (executable)
@@ -30,7 +30,7 @@ down_revision = None
 
 from alembic import op
 import sqlalchemy as sa
-
+from sqlalchemy.dialects.postgresql import ENUM
 
 YESNO_VALUES = ['yes', 'no']
 TYPE_VALUES = ['friend', 'user', 'peer']
@@ -323,8 +323,20 @@ def upgrade():
 
 
 def downgrade():
+    context = op.get_context()
+
     op.drop_table('sippeers')
     op.drop_table('iaxfriends')
     op.drop_table('voicemail')
     op.drop_table('meetme')
     op.drop_table('musiconhold')
+
+    enums = ['type_values', 'yes_no_values',
+             'sip_transport_values','sip_dtmfmode_values','sip_directmedia_values',
+             'sip_progressinband_values','sip_session_timers_values','sip_session_refresher_values',
+             'sip_callingpres_values','iax_requirecalltoken_values','iax_encryption_values',
+             'iax_transfer_values','moh_mode_values']
+
+    if context.bind.dialect.name == 'postgresql':
+        for e in enums:
+            ENUM(name=e).drop(op.get_bind(), checkfirst=False)
index 6bcaa9a..632f4c4 100755 (executable)
@@ -33,28 +33,31 @@ import sqlalchemy as sa
 
 
 def upgrade():
+    context = op.get_context()
     # Was unable to find a way to use op.alter_column() to add the unique
     # index property.
-    op.drop_column('queue_members', 'uniqueid')
-    op.add_column(
-        'queue_members',
-        sa.Column(
-            name='uniqueid', type_=sa.Integer, nullable=False,
-            unique=True))
+    if context.bind.dialect.name == 'sqlite':
+        with op.batch_alter_table('queue_members') as batch_op:
+            batch_op.create_primary_key('queue_members_pj', columns='uniqueid')
+    else:
+        op.drop_column('queue_members', 'uniqueid')
+        op.add_column(
+                      'queue_members',
+                      sa.Column(
+                                name='uniqueid', type_=sa.Integer, nullable=False,
+                                unique=True))
     # The postgres backend does not like the autoincrement needed for
     # mysql here.  It is just the backend that is giving a warning and
     # not the database itself.
-    op.alter_column(
-        table_name='queue_members', column_name='uniqueid',
-        existing_type=sa.Integer, existing_nullable=False,
-        autoincrement=True)
+        op.alter_column(
+                        table_name='queue_members', column_name='uniqueid',
+                        existing_type=sa.Integer, existing_nullable=False,
+                        autoincrement=True)
 
 
 def downgrade():
     # Was unable to find a way to use op.alter_column() to remove the
     # unique index property.
-    op.drop_column('queue_members', 'uniqueid')
-    op.add_column(
-        'queue_members',
-        sa.Column(name='uniqueid', type_=sa.String(80), nullable=False))
-
+    with op.batch_alter_table('queue_members') as batch_op:
+        batch_op.drop_column('uniqueid')
+        batch_op.add_column(sa.Column(name='uniqueid', type_=sa.String(80), nullable=False))
index c2dacda..8d0f68f 100644 (file)
@@ -28,5 +28,6 @@ def upgrade():
     op.add_column('ps_endpoints', sa.Column('media_use_received_transport', yesno_values))
 
 def downgrade():
-    op.drop_column('ps_endpoints', 'force_avp')
-    op.drop_column('ps_endpoints', 'media_use_received_transport')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('force_avp')
+        batch_op.drop_column('media_use_received_transport')
index 28ebc8b..ace5444 100644 (file)
@@ -19,11 +19,13 @@ YESNO_VALUES = ['yes', 'no']
 
 def upgrade():
     yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
-    op.alter_column('ps_transports', 'verifiy_server', type_=yesno_values,
+    with op.batch_alter_table('ps_transports') as batch_op:
+        batch_op.alter_column('verifiy_server', type_=yesno_values,
                     new_column_name='verify_server')
 
 
 def downgrade():
     yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
-    op.alter_column('ps_transports', 'verify_server', type_=yesno_values,
+    with op.batch_alter_table('ps_transports') as batch_op:
+        batch_op.alter_column('verify_server', type_=yesno_values,
                     new_column_name='verifiy_server')
index 7a463f0..fff2504 100644 (file)
@@ -28,4 +28,5 @@ def upgrade():
 
 
 def downgrade():
-    op.drop_column('ps_endpoints', 'media_encryption_optimistic')
+    with op.batch_alter_table('ps_endpoints') as batch_op:
+        batch_op.drop_column('media_encryption_optimistic')
index 0ffd784..dc06d84 100644 (file)
@@ -17,4 +17,5 @@ def upgrade():
     op.add_column('ps_globals', sa.Column('max_initial_qualify_time', sa.Integer))
 
 def downgrade():
-    op.drop_column('ps_globals', 'max_initial_qualify_time')
+    with op.batch_alter_table('ps_globals') as batch_op:
+        batch_op.drop_column('max_initial_qualify_time')
index 8f46216..b4502eb 100644 (file)
@@ -27,11 +27,10 @@ def upgrade():
     op.add_column('ps_systems', sa.Column('disable_tcp_switch', yesno_values))
     op.add_column('ps_registrations', sa.Column('line', yesno_values))
     op.add_column('ps_registrations', sa.Column('endpoint', sa.String(40)))
-    pass
-
 
 def downgrade():
-    op.drop_column('ps_systems', 'disable_tcp_switch')
-    op.drop_column('ps_registrations', 'line')
-    op.drop_column('ps_registrations', 'endpoint')
-    pass
+    with op.batch_alter_table('ps_systems') as batch_op:
+        batch_op.drop_column('disable_tcp_switch')
+    with op.batch_alter_table('ps_registrations') as batch_op:
+        batch_op.drop_column('line')
+        batch_op.drop_column('endpoint')
index d386ded..f25c298 100644 (file)
@@ -15,25 +15,28 @@ import sqlalchemy as sa
 
 
 def upgrade():
-    op.alter_column('ps_globals', 'user_agent', type_=sa.String(255))
+    with op.batch_alter_table('ps_globals') as batch_op:
+        batch_op.alter_column('user_agent', type_=sa.String(255))
 
-    op.alter_column('ps_contacts', 'id', type_=sa.String(255))
-    op.alter_column('ps_contacts', 'uri', type_=sa.String(255))
-    op.alter_column('ps_contacts', 'user_agent', type_=sa.String(255))
+    with op.batch_alter_table('ps_contacts') as batch_op:
+        batch_op.alter_column('id', type_=sa.String(255))
+        batch_op.alter_column('uri', type_=sa.String(255))
+        batch_op.alter_column('user_agent', type_=sa.String(255))
 
-    op.alter_column('ps_registrations', 'client_uri', type_=sa.String(255))
-    op.alter_column('ps_registrations', 'server_uri', type_=sa.String(255))
+    with op.batch_alter_table('ps_registrations') as batch_op:
+        batch_op.alter_column('client_uri', type_=sa.String(255))
+        batch_op.alter_column('server_uri', type_=sa.String(255))
 
 
 def downgrade():
-    op.alter_column('ps_registrations', 'server_uri', type_=sa.String(40))
-    op.alter_column('ps_registrations', 'client_uri', type_=sa.String(40))
-
-    op.alter_column('ps_contacts', 'user_agent', type_=sa.String(40))
-    op.alter_column('ps_contacts', 'uri', type_=sa.String(40))
-    op.alter_column('ps_contacts', 'id', type_=sa.String(40))
-
-    op.alter_column('ps_globals', 'user_agent', type_=sa.String(40))
-
+    with op.batch_alter_table('ps_globals') as batch_op:
+        batch_op.alter_column('user_agent', type_=sa.String(40))
 
+    with op.batch_alter_table('ps_contacts') as batch_op:
+        batch_op.alter_column('id', type_=sa.String(40))
+        batch_op.alter_column('uri', type_=sa.String(40))
+        batch_op.alter_column('user_agent', type_=sa.String(40))
 
+    with op.batch_alter_table('ps_registrations') as batch_op:
+        batch_op.alter_column('client_uri', type_=sa.String(40))
+        batch_op.alter_column('server_uri', type_=sa.String(40))