diff --git a/alembic/versions/2024_08_14_1348-23a5388bafd9_reboot.py b/alembic/versions/2024_08_14_1348-23a5388bafd9_reboot.py deleted file mode 100644 index 2cd9b2e8..00000000 --- a/alembic/versions/2024_08_14_1348-23a5388bafd9_reboot.py +++ /dev/null @@ -1,1025 +0,0 @@ -"""reboot - -Revision ID: 23a5388bafd9 -Revises: -Create Date: 2024-08-14 13:48:16.784689 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '23a5388bafd9' -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('authuser', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('username', sa.Text(), nullable=False), - sa.Column('displayname', sa.Text(), nullable=False), - sa.Column('email', sa.Text(), nullable=False), - sa.Column('pubkey', sa.Text(), nullable=True), - sa.Column('privkey', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_authuser_created_at'), 'authuser', ['created_at'], unique=False) - op.create_index(op.f('ix_authuser_email'), 'authuser', ['email'], unique=False) - op.create_index(op.f('ix_authuser_username'), 'authuser', ['username'], unique=True) - op.create_table('calibfile_downloadlock', - sa.Column('_type', sa.SMALLINT(), nullable=False), - sa.Column('_calibrator_set', sa.SMALLINT(), nullable=False), - sa.Column('_flat_type', sa.SMALLINT(), nullable=True), - sa.Column('instrument', sa.Text(), nullable=False), - sa.Column('sensor_section', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_calibfile_downloadlock__calibrator_set'), 'calibfile_downloadlock', ['_calibrator_set'], unique=False) - op.create_index(op.f('ix_calibfile_downloadlock__flat_type'), 'calibfile_downloadlock', ['_flat_type'], unique=False) - op.create_index(op.f('ix_calibfile_downloadlock__id'), 'calibfile_downloadlock', ['_id'], unique=False) - op.create_index(op.f('ix_calibfile_downloadlock__type'), 'calibfile_downloadlock', ['_type'], unique=False) - op.create_index(op.f('ix_calibfile_downloadlock_created_at'), 'calibfile_downloadlock', ['created_at'], unique=False) - op.create_index(op.f('ix_calibfile_downloadlock_instrument'), 'calibfile_downloadlock', ['instrument'], unique=False) - op.create_index(op.f('ix_calibfile_downloadlock_sensor_section'), 'calibfile_downloadlock', ['sensor_section'], unique=False) - op.create_table('catalog_excerpts', - sa.Column('_format', sa.SMALLINT(), nullable=False), - sa.Column('_origin', sa.SMALLINT(), nullable=False), - sa.Column('num_items', sa.Integer(), nullable=False), - sa.Column('minmag', sa.REAL(), nullable=True), - sa.Column('maxmag', sa.REAL(), nullable=True), - sa.Column('filters', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('filepath_extensions', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=True), - sa.Column('md5sum', sa.UUID(), nullable=True), - sa.Column('md5sum_extensions', postgresql.ARRAY(sa.UUID(), zero_indexes=True), nullable=True), - sa.Column('filepath', sa.Text(), nullable=False), - sa.Column('ra', sa.Double(), nullable=False), - sa.Column('dec', sa.Double(), nullable=False), - sa.Column('gallat', sa.Double(), nullable=True), - sa.Column('gallon', sa.Double(), nullable=True), - sa.Column('ecllat', sa.Double(), nullable=True), - sa.Column('ecllon', sa.Double(), nullable=True), - sa.Column('ra_corner_00', sa.REAL(), nullable=False), - sa.Column('ra_corner_01', sa.REAL(), nullable=False), - sa.Column('ra_corner_10', sa.REAL(), nullable=False), - sa.Column('ra_corner_11', sa.REAL(), nullable=False), - sa.Column('dec_corner_00', sa.REAL(), nullable=False), - sa.Column('dec_corner_01', sa.REAL(), nullable=False), - sa.Column('dec_corner_10', sa.REAL(), nullable=False), - sa.Column('dec_corner_11', sa.REAL(), nullable=False), - sa.Column('minra', sa.REAL(), nullable=False), - sa.Column('maxra', sa.REAL(), nullable=False), - sa.Column('mindec', sa.REAL(), nullable=False), - sa.Column('maxdec', sa.REAL(), nullable=False), - sa.CheckConstraint('NOT(md5sum IS NULL AND (md5sum_extensions IS NULL OR array_position(md5sum_extensions, NULL) IS NOT NULL))', name='catalog_excerpts_md5sum_check'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index('catalog_excerpts_q3c_ang2ipix_idx', 'catalog_excerpts', [sa.text('q3c_ang2ipix(ra, dec)')], unique=False) - op.create_index(op.f('ix_catalog_excerpts__id'), 'catalog_excerpts', ['_id'], unique=False) - op.create_index(op.f('ix_catalog_excerpts__origin'), 'catalog_excerpts', ['_origin'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_created_at'), 'catalog_excerpts', ['created_at'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_dec_corner_00'), 'catalog_excerpts', ['dec_corner_00'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_dec_corner_01'), 'catalog_excerpts', ['dec_corner_01'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_dec_corner_10'), 'catalog_excerpts', ['dec_corner_10'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_dec_corner_11'), 'catalog_excerpts', ['dec_corner_11'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_ecllat'), 'catalog_excerpts', ['ecllat'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_filepath'), 'catalog_excerpts', ['filepath'], unique=True) - op.create_index(op.f('ix_catalog_excerpts_gallat'), 'catalog_excerpts', ['gallat'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_maxdec'), 'catalog_excerpts', ['maxdec'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_maxmag'), 'catalog_excerpts', ['maxmag'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_maxra'), 'catalog_excerpts', ['maxra'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_mindec'), 'catalog_excerpts', ['mindec'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_minmag'), 'catalog_excerpts', ['minmag'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_minra'), 'catalog_excerpts', ['minra'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_num_items'), 'catalog_excerpts', ['num_items'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_ra_corner_00'), 'catalog_excerpts', ['ra_corner_00'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_ra_corner_01'), 'catalog_excerpts', ['ra_corner_01'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_ra_corner_10'), 'catalog_excerpts', ['ra_corner_10'], unique=False) - op.create_index(op.f('ix_catalog_excerpts_ra_corner_11'), 'catalog_excerpts', ['ra_corner_11'], unique=False) - op.create_table('code_versions', - sa.Column('_id', sa.String(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_code_versions_created_at'), 'code_versions', ['created_at'], unique=False) - op.create_table('objects', - sa.Column('name', sa.String(), nullable=False), - sa.Column('is_test', sa.Boolean(), nullable=False), - sa.Column('is_fake', sa.Boolean(), nullable=False), - sa.Column('is_bad', sa.Boolean(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('ra', sa.Double(), nullable=False), - sa.Column('dec', sa.Double(), nullable=False), - sa.Column('gallat', sa.Double(), nullable=True), - sa.Column('gallon', sa.Double(), nullable=True), - sa.Column('ecllat', sa.Double(), nullable=True), - sa.Column('ecllon', sa.Double(), nullable=True), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_objects__id'), 'objects', ['_id'], unique=False) - op.create_index(op.f('ix_objects_created_at'), 'objects', ['created_at'], unique=False) - op.create_index(op.f('ix_objects_ecllat'), 'objects', ['ecllat'], unique=False) - op.create_index(op.f('ix_objects_gallat'), 'objects', ['gallat'], unique=False) - op.create_index(op.f('ix_objects_is_bad'), 'objects', ['is_bad'], unique=False) - op.create_index(op.f('ix_objects_name'), 'objects', ['name'], unique=True) - op.create_index('objects_q3c_ang2ipix_idx', 'objects', [sa.text('q3c_ang2ipix(ra, dec)')], unique=False) - op.create_table('pipelineworkers', - sa.Column('cluster_id', sa.Text(), nullable=False), - sa.Column('node_id', sa.Text(), nullable=True), - sa.Column('nexps', sa.SmallInteger(), nullable=False), - sa.Column('lastheartbeat', sa.DateTime(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_pipelineworkers__id'), 'pipelineworkers', ['_id'], unique=False) - op.create_index(op.f('ix_pipelineworkers_created_at'), 'pipelineworkers', ['created_at'], unique=False) - op.create_table('refsets', - sa.Column('name', sa.Text(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_refsets__id'), 'refsets', ['_id'], unique=False) - op.create_index(op.f('ix_refsets_created_at'), 'refsets', ['created_at'], unique=False) - op.create_index(op.f('ix_refsets_name'), 'refsets', ['name'], unique=True) - op.create_table('sensor_sections', - sa.Column('instrument', sa.Text(), nullable=False), - sa.Column('identifier', sa.Text(), nullable=False), - sa.Column('validity_start', sa.DateTime(), nullable=True), - sa.Column('validity_end', sa.DateTime(), nullable=True), - sa.Column('size_x', sa.Integer(), nullable=True), - sa.Column('size_y', sa.Integer(), nullable=True), - sa.Column('offset_x', sa.Integer(), nullable=True), - sa.Column('offset_y', sa.Integer(), nullable=True), - sa.Column('filter_array_index', sa.Integer(), nullable=True), - sa.Column('read_noise', sa.REAL(), nullable=True), - sa.Column('dark_current', sa.REAL(), nullable=True), - sa.Column('gain', sa.REAL(), nullable=True), - sa.Column('saturation_limit', sa.REAL(), nullable=True), - sa.Column('non_linearity_limit', sa.REAL(), nullable=True), - sa.Column('defective', sa.Boolean(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_sensor_sections__id'), 'sensor_sections', ['_id'], unique=False) - op.create_index(op.f('ix_sensor_sections_created_at'), 'sensor_sections', ['created_at'], unique=False) - op.create_index(op.f('ix_sensor_sections_defective'), 'sensor_sections', ['defective'], unique=False) - op.create_index(op.f('ix_sensor_sections_identifier'), 'sensor_sections', ['identifier'], unique=False) - op.create_index(op.f('ix_sensor_sections_instrument'), 'sensor_sections', ['instrument'], unique=False) - op.create_index(op.f('ix_sensor_sections_validity_end'), 'sensor_sections', ['validity_end'], unique=False) - op.create_index(op.f('ix_sensor_sections_validity_start'), 'sensor_sections', ['validity_start'], unique=False) - op.create_table('code_hashes', - sa.Column('_id', sa.String(), nullable=False), - sa.Column('code_version_id', sa.String(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.ForeignKeyConstraint(['code_version_id'], ['code_versions._id'], name='code_hashes_code_version_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_code_hashes_code_version_id'), 'code_hashes', ['code_version_id'], unique=False) - op.create_index(op.f('ix_code_hashes_created_at'), 'code_hashes', ['created_at'], unique=False) - op.create_table('passwordlink', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('userid', sa.UUID(), nullable=True), - sa.Column('expires', sa.DateTime(timezone=True), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.ForeignKeyConstraint(['userid'], ['authuser.id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('id') - ) - op.create_index(op.f('ix_passwordlink_created_at'), 'passwordlink', ['created_at'], unique=False) - op.create_index(op.f('ix_passwordlink_userid'), 'passwordlink', ['userid'], unique=False) - op.create_table('provenances', - sa.Column('_id', sa.String(), nullable=False), - sa.Column('process', sa.String(), nullable=False), - sa.Column('code_version_id', sa.String(), nullable=False), - sa.Column('parameters', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('is_bad', sa.Boolean(), nullable=False), - sa.Column('bad_comment', sa.String(), nullable=True), - sa.Column('is_outdated', sa.Boolean(), nullable=False), - sa.Column('replaced_by', sa.String(), nullable=True), - sa.Column('is_testing', sa.Boolean(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.ForeignKeyConstraint(['code_version_id'], ['code_versions._id'], name='provenances_code_version_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['replaced_by'], ['provenances._id'], name='provenances_replaced_by_fkey', ondelete='SET NULL'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_provenances_code_version_id'), 'provenances', ['code_version_id'], unique=False) - op.create_index(op.f('ix_provenances_created_at'), 'provenances', ['created_at'], unique=False) - op.create_index(op.f('ix_provenances_process'), 'provenances', ['process'], unique=False) - op.create_index(op.f('ix_provenances_replaced_by'), 'provenances', ['replaced_by'], unique=False) - op.create_table('data_files', - sa.Column('provenance_id', sa.String(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('filepath_extensions', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=True), - sa.Column('md5sum', sa.UUID(), nullable=True), - sa.Column('md5sum_extensions', postgresql.ARRAY(sa.UUID(), zero_indexes=True), nullable=True), - sa.Column('filepath', sa.Text(), nullable=False), - sa.CheckConstraint('NOT(md5sum IS NULL AND (md5sum_extensions IS NULL OR array_position(md5sum_extensions, NULL) IS NOT NULL))', name='data_files_md5sum_check'), - sa.ForeignKeyConstraint(['provenance_id'], ['provenances._id'], name='data_files_provenance_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_data_files__id'), 'data_files', ['_id'], unique=False) - op.create_index(op.f('ix_data_files_created_at'), 'data_files', ['created_at'], unique=False) - op.create_index(op.f('ix_data_files_filepath'), 'data_files', ['filepath'], unique=True) - op.create_index(op.f('ix_data_files_provenance_id'), 'data_files', ['provenance_id'], unique=False) - op.create_table('exposures', - sa.Column('_type', sa.SMALLINT(), nullable=False), - sa.Column('_format', sa.SMALLINT(), nullable=False), - sa.Column('provenance_id', sa.String(), nullable=False), - sa.Column('info', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('mjd', sa.Double(), nullable=False), - sa.Column('exp_time', sa.REAL(), nullable=False), - sa.Column('filter', sa.Text(), nullable=True), - sa.Column('airmass', sa.REAL(), nullable=True), - sa.Column('filter_array', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=True), - sa.Column('instrument', sa.Text(), nullable=False), - sa.Column('project', sa.Text(), nullable=False), - sa.Column('target', sa.Text(), nullable=False), - sa.Column('_bitflag', sa.BIGINT(), nullable=False), - sa.Column('origin_identifier', sa.Text(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('filepath_extensions', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=True), - sa.Column('md5sum', sa.UUID(), nullable=True), - sa.Column('md5sum_extensions', postgresql.ARRAY(sa.UUID(), zero_indexes=True), nullable=True), - sa.Column('filepath', sa.Text(), nullable=False), - sa.Column('ra', sa.Double(), nullable=False), - sa.Column('dec', sa.Double(), nullable=False), - sa.Column('gallat', sa.Double(), nullable=True), - sa.Column('gallon', sa.Double(), nullable=True), - sa.Column('ecllat', sa.Double(), nullable=True), - sa.Column('ecllon', sa.Double(), nullable=True), - sa.Column('description', sa.Text(), nullable=True), - sa.CheckConstraint('NOT(filter IS NULL AND filter_array IS NULL)', name='exposures_filter_or_array_check'), - sa.CheckConstraint('NOT(md5sum IS NULL AND (md5sum_extensions IS NULL OR array_position(md5sum_extensions, NULL) IS NOT NULL))', name='exposures_md5sum_check'), - sa.ForeignKeyConstraint(['provenance_id'], ['provenances._id'], ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index('exposures_q3c_ang2ipix_idx', 'exposures', [sa.text('q3c_ang2ipix(ra, dec)')], unique=False) - op.create_index(op.f('ix_exposures__bitflag'), 'exposures', ['_bitflag'], unique=False) - op.create_index(op.f('ix_exposures__id'), 'exposures', ['_id'], unique=False) - op.create_index(op.f('ix_exposures__type'), 'exposures', ['_type'], unique=False) - op.create_index(op.f('ix_exposures_airmass'), 'exposures', ['airmass'], unique=False) - op.create_index(op.f('ix_exposures_created_at'), 'exposures', ['created_at'], unique=False) - op.create_index(op.f('ix_exposures_ecllat'), 'exposures', ['ecllat'], unique=False) - op.create_index(op.f('ix_exposures_exp_time'), 'exposures', ['exp_time'], unique=False) - op.create_index(op.f('ix_exposures_filepath'), 'exposures', ['filepath'], unique=True) - op.create_index(op.f('ix_exposures_filter'), 'exposures', ['filter'], unique=False) - op.create_index(op.f('ix_exposures_filter_array'), 'exposures', ['filter_array'], unique=False) - op.create_index(op.f('ix_exposures_gallat'), 'exposures', ['gallat'], unique=False) - op.create_index(op.f('ix_exposures_instrument'), 'exposures', ['instrument'], unique=False) - op.create_index(op.f('ix_exposures_mjd'), 'exposures', ['mjd'], unique=False) - op.create_index(op.f('ix_exposures_origin_identifier'), 'exposures', ['origin_identifier'], unique=False) - op.create_index(op.f('ix_exposures_project'), 'exposures', ['project'], unique=False) - op.create_index(op.f('ix_exposures_provenance_id'), 'exposures', ['provenance_id'], unique=False) - op.create_index(op.f('ix_exposures_target'), 'exposures', ['target'], unique=False) - op.create_table('provenance_tags', - sa.Column('tag', sa.String(), nullable=False), - sa.Column('provenance_id', sa.String(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.ForeignKeyConstraint(['provenance_id'], ['provenances._id'], name='provenance_tags_provenance_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id'), - sa.UniqueConstraint('tag', 'provenance_id', name='_provenancetag_prov_tag_uc') - ) - op.create_index(op.f('ix_provenance_tags__id'), 'provenance_tags', ['_id'], unique=False) - op.create_index(op.f('ix_provenance_tags_created_at'), 'provenance_tags', ['created_at'], unique=False) - op.create_index(op.f('ix_provenance_tags_provenance_id'), 'provenance_tags', ['provenance_id'], unique=False) - op.create_index(op.f('ix_provenance_tags_tag'), 'provenance_tags', ['tag'], unique=False) - op.create_table('provenance_upstreams', - sa.Column('upstream_id', sa.String(), nullable=False), - sa.Column('downstream_id', sa.String(), nullable=False), - sa.ForeignKeyConstraint(['downstream_id'], ['provenances._id'], name='provenance_upstreams_downstream_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['upstream_id'], ['provenances._id'], name='provenance_upstreams_upstream_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('upstream_id', 'downstream_id') - ) - op.create_table('refset_provenance_association', - sa.Column('provenance_id', sa.Text(), nullable=False), - sa.Column('refset_id', sa.UUID(), nullable=False), - sa.ForeignKeyConstraint(['provenance_id'], ['provenances._id'], name='refset_provenances_association_provenance_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['refset_id'], ['refsets._id'], name='refsets_provenances_association_refset_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('provenance_id', 'refset_id') - ) - op.create_table('images', - sa.Column('_format', sa.SMALLINT(), nullable=False), - sa.Column('exposure_id', sa.UUID(), nullable=True), - sa.Column('ref_image_id', sa.UUID(), nullable=True), - sa.Column('is_sub', sa.Boolean(), nullable=False), - sa.Column('is_coadd', sa.Boolean(), nullable=False), - sa.Column('_type', sa.SMALLINT(), nullable=False), - sa.Column('provenance_id', sa.String(), nullable=False), - sa.Column('info', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('mjd', sa.Double(), nullable=False), - sa.Column('end_mjd', sa.Double(), nullable=False), - sa.Column('exp_time', sa.REAL(), nullable=False), - sa.Column('instrument', sa.Text(), nullable=False), - sa.Column('telescope', sa.Text(), nullable=False), - sa.Column('filter', sa.Text(), nullable=True), - sa.Column('section_id', sa.Text(), nullable=True), - sa.Column('project', sa.Text(), nullable=False), - sa.Column('target', sa.Text(), nullable=False), - sa.Column('preproc_bitflag', sa.SMALLINT(), nullable=False), - sa.Column('astro_cal_done', sa.BOOLEAN(), nullable=False), - sa.Column('sky_sub_done', sa.BOOLEAN(), nullable=False), - sa.Column('airmass', sa.REAL(), nullable=True), - sa.Column('fwhm_estimate', sa.REAL(), nullable=True), - sa.Column('zero_point_estimate', sa.REAL(), nullable=True), - sa.Column('lim_mag_estimate', sa.REAL(), nullable=True), - sa.Column('bkg_mean_estimate', sa.REAL(), nullable=True), - sa.Column('bkg_rms_estimate', sa.REAL(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('filepath_extensions', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=True), - sa.Column('md5sum', sa.UUID(), nullable=True), - sa.Column('md5sum_extensions', postgresql.ARRAY(sa.UUID(), zero_indexes=True), nullable=True), - sa.Column('filepath', sa.Text(), nullable=False), - sa.Column('ra', sa.Double(), nullable=False), - sa.Column('dec', sa.Double(), nullable=False), - sa.Column('gallat', sa.Double(), nullable=True), - sa.Column('gallon', sa.Double(), nullable=True), - sa.Column('ecllat', sa.Double(), nullable=True), - sa.Column('ecllon', sa.Double(), nullable=True), - sa.Column('ra_corner_00', sa.REAL(), nullable=False), - sa.Column('ra_corner_01', sa.REAL(), nullable=False), - sa.Column('ra_corner_10', sa.REAL(), nullable=False), - sa.Column('ra_corner_11', sa.REAL(), nullable=False), - sa.Column('dec_corner_00', sa.REAL(), nullable=False), - sa.Column('dec_corner_01', sa.REAL(), nullable=False), - sa.Column('dec_corner_10', sa.REAL(), nullable=False), - sa.Column('dec_corner_11', sa.REAL(), nullable=False), - sa.Column('minra', sa.REAL(), nullable=False), - sa.Column('maxra', sa.REAL(), nullable=False), - sa.Column('mindec', sa.REAL(), nullable=False), - sa.Column('maxdec', sa.REAL(), nullable=False), - sa.Column('_bitflag', sa.BIGINT(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('_upstream_bitflag', sa.BIGINT(), nullable=False), - sa.CheckConstraint('NOT(md5sum IS NULL AND (md5sum_extensions IS NULL OR array_position(md5sum_extensions, NULL) IS NOT NULL))', name='images_md5sum_check'), - sa.ForeignKeyConstraint(['exposure_id'], ['exposures._id'], name='images_exposure_id_fkey', ondelete='SET NULL'), - sa.ForeignKeyConstraint(['provenance_id'], ['provenances._id'], name='images_provenance_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['ref_image_id'], ['images._id'], name='images_ref_image_id_fkey', ondelete='SET NULL'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index('images_q3c_ang2ipix_idx', 'images', [sa.text('q3c_ang2ipix(ra, dec)')], unique=False) - op.create_index(op.f('ix_images__bitflag'), 'images', ['_bitflag'], unique=False) - op.create_index(op.f('ix_images__id'), 'images', ['_id'], unique=False) - op.create_index(op.f('ix_images__type'), 'images', ['_type'], unique=False) - op.create_index(op.f('ix_images__upstream_bitflag'), 'images', ['_upstream_bitflag'], unique=False) - op.create_index(op.f('ix_images_airmass'), 'images', ['airmass'], unique=False) - op.create_index(op.f('ix_images_bkg_mean_estimate'), 'images', ['bkg_mean_estimate'], unique=False) - op.create_index(op.f('ix_images_bkg_rms_estimate'), 'images', ['bkg_rms_estimate'], unique=False) - op.create_index(op.f('ix_images_created_at'), 'images', ['created_at'], unique=False) - op.create_index(op.f('ix_images_dec_corner_00'), 'images', ['dec_corner_00'], unique=False) - op.create_index(op.f('ix_images_dec_corner_01'), 'images', ['dec_corner_01'], unique=False) - op.create_index(op.f('ix_images_dec_corner_10'), 'images', ['dec_corner_10'], unique=False) - op.create_index(op.f('ix_images_dec_corner_11'), 'images', ['dec_corner_11'], unique=False) - op.create_index(op.f('ix_images_ecllat'), 'images', ['ecllat'], unique=False) - op.create_index(op.f('ix_images_end_mjd'), 'images', ['end_mjd'], unique=False) - op.create_index(op.f('ix_images_exp_time'), 'images', ['exp_time'], unique=False) - op.create_index(op.f('ix_images_exposure_id'), 'images', ['exposure_id'], unique=False) - op.create_index(op.f('ix_images_filepath'), 'images', ['filepath'], unique=True) - op.create_index(op.f('ix_images_filter'), 'images', ['filter'], unique=False) - op.create_index(op.f('ix_images_fwhm_estimate'), 'images', ['fwhm_estimate'], unique=False) - op.create_index(op.f('ix_images_gallat'), 'images', ['gallat'], unique=False) - op.create_index(op.f('ix_images_instrument'), 'images', ['instrument'], unique=False) - op.create_index(op.f('ix_images_is_coadd'), 'images', ['is_coadd'], unique=False) - op.create_index(op.f('ix_images_is_sub'), 'images', ['is_sub'], unique=False) - op.create_index(op.f('ix_images_lim_mag_estimate'), 'images', ['lim_mag_estimate'], unique=False) - op.create_index(op.f('ix_images_maxdec'), 'images', ['maxdec'], unique=False) - op.create_index(op.f('ix_images_maxra'), 'images', ['maxra'], unique=False) - op.create_index(op.f('ix_images_mindec'), 'images', ['mindec'], unique=False) - op.create_index(op.f('ix_images_minra'), 'images', ['minra'], unique=False) - op.create_index(op.f('ix_images_mjd'), 'images', ['mjd'], unique=False) - op.create_index(op.f('ix_images_project'), 'images', ['project'], unique=False) - op.create_index(op.f('ix_images_provenance_id'), 'images', ['provenance_id'], unique=False) - op.create_index(op.f('ix_images_ra_corner_00'), 'images', ['ra_corner_00'], unique=False) - op.create_index(op.f('ix_images_ra_corner_01'), 'images', ['ra_corner_01'], unique=False) - op.create_index(op.f('ix_images_ra_corner_10'), 'images', ['ra_corner_10'], unique=False) - op.create_index(op.f('ix_images_ra_corner_11'), 'images', ['ra_corner_11'], unique=False) - op.create_index(op.f('ix_images_ref_image_id'), 'images', ['ref_image_id'], unique=False) - op.create_index(op.f('ix_images_section_id'), 'images', ['section_id'], unique=False) - op.create_index(op.f('ix_images_target'), 'images', ['target'], unique=False) - op.create_index(op.f('ix_images_telescope'), 'images', ['telescope'], unique=False) - op.create_index(op.f('ix_images_zero_point_estimate'), 'images', ['zero_point_estimate'], unique=False) - op.create_table('knownexposures', - sa.Column('instrument', sa.Text(), nullable=False), - sa.Column('identifier', sa.Text(), nullable=False), - sa.Column('params', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('hold', sa.Boolean(), server_default='false', nullable=False), - sa.Column('exposure_id', sa.UUID(), nullable=True), - sa.Column('mjd', sa.Double(), nullable=True), - sa.Column('exp_time', sa.REAL(), nullable=True), - sa.Column('filter', sa.Text(), nullable=True), - sa.Column('project', sa.Text(), nullable=True), - sa.Column('target', sa.Text(), nullable=True), - sa.Column('cluster_id', sa.Text(), nullable=True), - sa.Column('claim_time', sa.DateTime(), nullable=True), - sa.Column('ra', sa.Double(), nullable=True), - sa.Column('dec', sa.Double(), nullable=True), - sa.Column('gallat', sa.Double(), nullable=True), - sa.Column('gallon', sa.Double(), nullable=True), - sa.Column('ecllat', sa.Double(), nullable=True), - sa.Column('ecllon', sa.Double(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.ForeignKeyConstraint(['exposure_id'], ['exposures._id'], name='knownexposure_exposure_id_fkey'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_knownexposures__id'), 'knownexposures', ['_id'], unique=False) - op.create_index(op.f('ix_knownexposures_created_at'), 'knownexposures', ['created_at'], unique=False) - op.create_index(op.f('ix_knownexposures_ecllat'), 'knownexposures', ['ecllat'], unique=False) - op.create_index(op.f('ix_knownexposures_gallat'), 'knownexposures', ['gallat'], unique=False) - op.create_index(op.f('ix_knownexposures_identifier'), 'knownexposures', ['identifier'], unique=False) - op.create_index(op.f('ix_knownexposures_instrument'), 'knownexposures', ['instrument'], unique=False) - op.create_index(op.f('ix_knownexposures_mjd'), 'knownexposures', ['mjd'], unique=False) - op.create_index('knownexposures_q3c_ang2ipix_idx', 'knownexposures', [sa.text('q3c_ang2ipix(ra, dec)')], unique=False) - op.create_table('reports', - sa.Column('exposure_id', sa.UUID(), nullable=False), - sa.Column('section_id', sa.Text(), nullable=False), - sa.Column('start_time', sa.DateTime(), nullable=False), - sa.Column('finish_time', sa.DateTime(), nullable=True), - sa.Column('success', sa.Boolean(), nullable=False), - sa.Column('num_prev_reports', sa.Integer(), nullable=False), - sa.Column('worker_id', sa.Text(), nullable=True), - sa.Column('node_id', sa.Text(), nullable=True), - sa.Column('cluster_id', sa.Text(), nullable=True), - sa.Column('error_step', sa.Text(), nullable=True), - sa.Column('error_type', sa.Text(), nullable=True), - sa.Column('error_message', sa.Text(), nullable=True), - sa.Column('warnings', sa.Text(), nullable=True), - sa.Column('process_memory', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('process_runtime', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('progress_steps_bitflag', sa.BIGINT(), nullable=False), - sa.Column('products_exist_bitflag', sa.BIGINT(), nullable=False), - sa.Column('products_committed_bitflag', sa.BIGINT(), nullable=False), - sa.Column('provenance_id', sa.String(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.ForeignKeyConstraint(['exposure_id'], ['exposures._id'], name='reports_exposure_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['provenance_id'], ['provenances._id'], name='images_provenance_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_reports__id'), 'reports', ['_id'], unique=False) - op.create_index(op.f('ix_reports_created_at'), 'reports', ['created_at'], unique=False) - op.create_index(op.f('ix_reports_exposure_id'), 'reports', ['exposure_id'], unique=False) - op.create_index(op.f('ix_reports_finish_time'), 'reports', ['finish_time'], unique=False) - op.create_index(op.f('ix_reports_products_committed_bitflag'), 'reports', ['products_committed_bitflag'], unique=False) - op.create_index(op.f('ix_reports_products_exist_bitflag'), 'reports', ['products_exist_bitflag'], unique=False) - op.create_index(op.f('ix_reports_progress_steps_bitflag'), 'reports', ['progress_steps_bitflag'], unique=False) - op.create_index(op.f('ix_reports_provenance_id'), 'reports', ['provenance_id'], unique=False) - op.create_index(op.f('ix_reports_section_id'), 'reports', ['section_id'], unique=False) - op.create_index(op.f('ix_reports_start_time'), 'reports', ['start_time'], unique=False) - op.create_index(op.f('ix_reports_success'), 'reports', ['success'], unique=False) - op.create_table('calibrator_files', - sa.Column('_type', sa.SMALLINT(), nullable=False), - sa.Column('_calibrator_set', sa.SMALLINT(), nullable=False), - sa.Column('_flat_type', sa.SMALLINT(), nullable=True), - sa.Column('instrument', sa.Text(), nullable=False), - sa.Column('sensor_section', sa.Text(), nullable=False), - sa.Column('image_id', sa.UUID(), nullable=True), - sa.Column('datafile_id', sa.UUID(), nullable=True), - sa.Column('validity_start', sa.DateTime(), nullable=True), - sa.Column('validity_end', sa.DateTime(), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.ForeignKeyConstraint(['datafile_id'], ['data_files._id'], name='calibrator_files_data_file_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['image_id'], ['images._id'], name='calibrator_files_image_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_calibrator_files__calibrator_set'), 'calibrator_files', ['_calibrator_set'], unique=False) - op.create_index(op.f('ix_calibrator_files__flat_type'), 'calibrator_files', ['_flat_type'], unique=False) - op.create_index(op.f('ix_calibrator_files__id'), 'calibrator_files', ['_id'], unique=False) - op.create_index(op.f('ix_calibrator_files__type'), 'calibrator_files', ['_type'], unique=False) - op.create_index(op.f('ix_calibrator_files_created_at'), 'calibrator_files', ['created_at'], unique=False) - op.create_index(op.f('ix_calibrator_files_datafile_id'), 'calibrator_files', ['datafile_id'], unique=False) - op.create_index(op.f('ix_calibrator_files_image_id'), 'calibrator_files', ['image_id'], unique=False) - op.create_index(op.f('ix_calibrator_files_instrument'), 'calibrator_files', ['instrument'], unique=False) - op.create_index(op.f('ix_calibrator_files_sensor_section'), 'calibrator_files', ['sensor_section'], unique=False) - op.create_index(op.f('ix_calibrator_files_validity_end'), 'calibrator_files', ['validity_end'], unique=False) - op.create_index(op.f('ix_calibrator_files_validity_start'), 'calibrator_files', ['validity_start'], unique=False) - op.create_table('image_upstreams_association', - sa.Column('upstream_id', sa.UUID(), nullable=False), - sa.Column('downstream_id', sa.UUID(), nullable=False), - sa.ForeignKeyConstraint(['downstream_id'], ['images._id'], name='image_upstreams_association_downstream_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['upstream_id'], ['images._id'], name='image_upstreams_association_upstream_id_fkey', ondelete='RESTRICT'), - sa.PrimaryKeyConstraint('upstream_id', 'downstream_id') - ) - op.create_table('refs', - sa.Column('image_id', sa.UUID(), nullable=False), - sa.Column('target', sa.Text(), nullable=False), - sa.Column('instrument', sa.Text(), nullable=False), - sa.Column('filter', sa.Text(), nullable=False), - sa.Column('section_id', sa.Text(), nullable=False), - sa.Column('is_bad', sa.Boolean(), nullable=False), - sa.Column('bad_reason', sa.Text(), nullable=True), - sa.Column('bad_comment', sa.Text(), nullable=True), - sa.Column('provenance_id', sa.String(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.ForeignKeyConstraint(['image_id'], ['images._id'], name='references_image_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['provenance_id'], ['provenances._id'], name='references_provenance_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_refs__id'), 'refs', ['_id'], unique=False) - op.create_index(op.f('ix_refs_created_at'), 'refs', ['created_at'], unique=False) - op.create_index(op.f('ix_refs_filter'), 'refs', ['filter'], unique=False) - op.create_index(op.f('ix_refs_image_id'), 'refs', ['image_id'], unique=False) - op.create_index(op.f('ix_refs_instrument'), 'refs', ['instrument'], unique=False) - op.create_index(op.f('ix_refs_provenance_id'), 'refs', ['provenance_id'], unique=False) - op.create_index(op.f('ix_refs_section_id'), 'refs', ['section_id'], unique=False) - op.create_index(op.f('ix_refs_target'), 'refs', ['target'], unique=False) - op.create_table('source_lists', - sa.Column('_format', sa.SMALLINT(), nullable=False), - sa.Column('image_id', sa.UUID(), nullable=False), - sa.Column('aper_rads', postgresql.ARRAY(sa.REAL(), zero_indexes=True), nullable=True), - sa.Column('inf_aper_num', sa.SMALLINT(), nullable=True), - sa.Column('best_aper_num', sa.SMALLINT(), nullable=True), - sa.Column('num_sources', sa.Integer(), nullable=False), - sa.Column('provenance_id', sa.String(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('filepath_extensions', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=True), - sa.Column('md5sum', sa.UUID(), nullable=True), - sa.Column('md5sum_extensions', postgresql.ARRAY(sa.UUID(), zero_indexes=True), nullable=True), - sa.Column('filepath', sa.Text(), nullable=False), - sa.Column('_bitflag', sa.BIGINT(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('_upstream_bitflag', sa.BIGINT(), nullable=False), - sa.CheckConstraint('NOT(md5sum IS NULL AND (md5sum_extensions IS NULL OR array_position(md5sum_extensions, NULL) IS NOT NULL))', name='source_lists_md5sum_check'), - sa.ForeignKeyConstraint(['image_id'], ['images._id'], name='source_lists_image_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['provenance_id'], ['provenances._id'], name='source_lists_provenance_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id'), - sa.UniqueConstraint('image_id', 'provenance_id', name='_source_list_image_provenance_uc') - ) - op.create_index(op.f('ix_source_lists__bitflag'), 'source_lists', ['_bitflag'], unique=False) - op.create_index(op.f('ix_source_lists__id'), 'source_lists', ['_id'], unique=False) - op.create_index(op.f('ix_source_lists__upstream_bitflag'), 'source_lists', ['_upstream_bitflag'], unique=False) - op.create_index(op.f('ix_source_lists_created_at'), 'source_lists', ['created_at'], unique=False) - op.create_index(op.f('ix_source_lists_filepath'), 'source_lists', ['filepath'], unique=True) - op.create_index(op.f('ix_source_lists_image_id'), 'source_lists', ['image_id'], unique=False) - op.create_index(op.f('ix_source_lists_num_sources'), 'source_lists', ['num_sources'], unique=False) - op.create_index(op.f('ix_source_lists_provenance_id'), 'source_lists', ['provenance_id'], unique=False) - op.create_table('backgrounds', - sa.Column('_format', sa.SMALLINT(), nullable=False), - sa.Column('_method', sa.SMALLINT(), nullable=False), - sa.Column('sources_id', sa.UUID(), nullable=False), - sa.Column('value', sa.Float(), nullable=False), - sa.Column('noise', sa.Float(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('filepath_extensions', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=True), - sa.Column('md5sum', sa.UUID(), nullable=True), - sa.Column('md5sum_extensions', postgresql.ARRAY(sa.UUID(), zero_indexes=True), nullable=True), - sa.Column('filepath', sa.Text(), nullable=False), - sa.Column('_bitflag', sa.BIGINT(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('_upstream_bitflag', sa.BIGINT(), nullable=False), - sa.CheckConstraint('NOT(md5sum IS NULL AND (md5sum_extensions IS NULL OR array_position(md5sum_extensions, NULL) IS NOT NULL))', name='backgrounds_md5sum_check'), - sa.ForeignKeyConstraint(['sources_id'], ['source_lists._id'], name='backgrounds_source_lists_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_backgrounds__bitflag'), 'backgrounds', ['_bitflag'], unique=False) - op.create_index(op.f('ix_backgrounds__id'), 'backgrounds', ['_id'], unique=False) - op.create_index(op.f('ix_backgrounds__upstream_bitflag'), 'backgrounds', ['_upstream_bitflag'], unique=False) - op.create_index(op.f('ix_backgrounds_created_at'), 'backgrounds', ['created_at'], unique=False) - op.create_index(op.f('ix_backgrounds_filepath'), 'backgrounds', ['filepath'], unique=True) - op.create_index(op.f('ix_backgrounds_noise'), 'backgrounds', ['noise'], unique=False) - op.create_index(op.f('ix_backgrounds_sources_id'), 'backgrounds', ['sources_id'], unique=True) - op.create_index(op.f('ix_backgrounds_value'), 'backgrounds', ['value'], unique=False) - op.create_table('cutouts', - sa.Column('_format', sa.SMALLINT(), nullable=False), - sa.Column('sources_id', sa.UUID(), nullable=False), - sa.Column('provenance_id', sa.String(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('filepath_extensions', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=True), - sa.Column('md5sum', sa.UUID(), nullable=True), - sa.Column('md5sum_extensions', postgresql.ARRAY(sa.UUID(), zero_indexes=True), nullable=True), - sa.Column('filepath', sa.Text(), nullable=False), - sa.Column('_bitflag', sa.BIGINT(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('_upstream_bitflag', sa.BIGINT(), nullable=False), - sa.CheckConstraint('NOT(md5sum IS NULL AND (md5sum_extensions IS NULL OR array_position(md5sum_extensions, NULL) IS NOT NULL))', name='cutouts_md5sum_check'), - sa.ForeignKeyConstraint(['provenance_id'], ['provenances._id'], name='cutouts_provenance_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['sources_id'], ['source_lists._id'], name='cutouts_source_list_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id'), - sa.UniqueConstraint('sources_id', 'provenance_id', name='_cutouts_sources_provenance_uc') - ) - op.create_index(op.f('ix_cutouts__bitflag'), 'cutouts', ['_bitflag'], unique=False) - op.create_index(op.f('ix_cutouts__id'), 'cutouts', ['_id'], unique=False) - op.create_index(op.f('ix_cutouts__upstream_bitflag'), 'cutouts', ['_upstream_bitflag'], unique=False) - op.create_index(op.f('ix_cutouts_created_at'), 'cutouts', ['created_at'], unique=False) - op.create_index(op.f('ix_cutouts_filepath'), 'cutouts', ['filepath'], unique=True) - op.create_index(op.f('ix_cutouts_provenance_id'), 'cutouts', ['provenance_id'], unique=False) - op.create_index(op.f('ix_cutouts_sources_id'), 'cutouts', ['sources_id'], unique=False) - op.create_table('psfs', - sa.Column('_format', sa.SMALLINT(), nullable=False), - sa.Column('sources_id', sa.UUID(), nullable=False), - sa.Column('fwhm_pixels', sa.REAL(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('filepath_extensions', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=True), - sa.Column('md5sum', sa.UUID(), nullable=True), - sa.Column('md5sum_extensions', postgresql.ARRAY(sa.UUID(), zero_indexes=True), nullable=True), - sa.Column('filepath', sa.Text(), nullable=False), - sa.Column('_bitflag', sa.BIGINT(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('_upstream_bitflag', sa.BIGINT(), nullable=False), - sa.CheckConstraint('NOT(md5sum IS NULL AND (md5sum_extensions IS NULL OR array_position(md5sum_extensions, NULL) IS NOT NULL))', name='psfs_md5sum_check'), - sa.ForeignKeyConstraint(['sources_id'], ['source_lists._id'], name='psfs_source_lists_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_psfs__bitflag'), 'psfs', ['_bitflag'], unique=False) - op.create_index(op.f('ix_psfs__id'), 'psfs', ['_id'], unique=False) - op.create_index(op.f('ix_psfs__upstream_bitflag'), 'psfs', ['_upstream_bitflag'], unique=False) - op.create_index(op.f('ix_psfs_created_at'), 'psfs', ['created_at'], unique=False) - op.create_index(op.f('ix_psfs_filepath'), 'psfs', ['filepath'], unique=True) - op.create_index(op.f('ix_psfs_sources_id'), 'psfs', ['sources_id'], unique=True) - op.create_table('world_coordinates', - sa.Column('sources_id', sa.UUID(), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('filepath_extensions', postgresql.ARRAY(sa.Text(), zero_indexes=True), nullable=True), - sa.Column('md5sum', sa.UUID(), nullable=True), - sa.Column('md5sum_extensions', postgresql.ARRAY(sa.UUID(), zero_indexes=True), nullable=True), - sa.Column('filepath', sa.Text(), nullable=False), - sa.Column('_bitflag', sa.BIGINT(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('_upstream_bitflag', sa.BIGINT(), nullable=False), - sa.CheckConstraint('NOT(md5sum IS NULL AND (md5sum_extensions IS NULL OR array_position(md5sum_extensions, NULL) IS NOT NULL))', name='world_coordinates_md5sum_check'), - sa.ForeignKeyConstraint(['sources_id'], ['source_lists._id'], name='world_coordinates_source_list_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_world_coordinates__bitflag'), 'world_coordinates', ['_bitflag'], unique=False) - op.create_index(op.f('ix_world_coordinates__id'), 'world_coordinates', ['_id'], unique=False) - op.create_index(op.f('ix_world_coordinates__upstream_bitflag'), 'world_coordinates', ['_upstream_bitflag'], unique=False) - op.create_index(op.f('ix_world_coordinates_created_at'), 'world_coordinates', ['created_at'], unique=False) - op.create_index(op.f('ix_world_coordinates_filepath'), 'world_coordinates', ['filepath'], unique=True) - op.create_index(op.f('ix_world_coordinates_sources_id'), 'world_coordinates', ['sources_id'], unique=True) - op.create_table('zero_points', - sa.Column('sources_id', sa.UUID(), nullable=False), - sa.Column('zp', sa.REAL(), nullable=False), - sa.Column('dzp', sa.REAL(), nullable=False), - sa.Column('aper_cor_radii', postgresql.ARRAY(sa.REAL(), zero_indexes=True), nullable=True), - sa.Column('aper_cors', postgresql.ARRAY(sa.REAL(), zero_indexes=True), nullable=True), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('_bitflag', sa.BIGINT(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('_upstream_bitflag', sa.BIGINT(), nullable=False), - sa.ForeignKeyConstraint(['sources_id'], ['source_lists._id'], name='zero_points_source_list_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id') - ) - op.create_index(op.f('ix_zero_points__bitflag'), 'zero_points', ['_bitflag'], unique=False) - op.create_index(op.f('ix_zero_points__id'), 'zero_points', ['_id'], unique=False) - op.create_index(op.f('ix_zero_points__upstream_bitflag'), 'zero_points', ['_upstream_bitflag'], unique=False) - op.create_index(op.f('ix_zero_points_created_at'), 'zero_points', ['created_at'], unique=False) - op.create_index(op.f('ix_zero_points_sources_id'), 'zero_points', ['sources_id'], unique=True) - op.create_table('measurements', - sa.Column('cutouts_id', sa.UUID(), nullable=False), - sa.Column('index_in_sources', sa.Integer(), nullable=False), - sa.Column('object_id', sa.UUID(), nullable=False), - sa.Column('provenance_id', sa.String(), nullable=False), - sa.Column('flux_psf', sa.REAL(), nullable=False), - sa.Column('flux_psf_err', sa.REAL(), nullable=False), - sa.Column('flux_apertures', postgresql.ARRAY(sa.REAL(), zero_indexes=True), nullable=False), - sa.Column('flux_apertures_err', postgresql.ARRAY(sa.REAL(), zero_indexes=True), nullable=False), - sa.Column('aper_radii', postgresql.ARRAY(sa.REAL(), zero_indexes=True), nullable=False), - sa.Column('best_aperture', sa.SMALLINT(), nullable=False), - sa.Column('bkg_mean', sa.REAL(), nullable=False), - sa.Column('bkg_std', sa.REAL(), nullable=False), - sa.Column('bkg_pix', sa.REAL(), nullable=False), - sa.Column('area_psf', sa.REAL(), nullable=False), - sa.Column('area_apertures', postgresql.ARRAY(sa.REAL(), zero_indexes=True), nullable=False), - sa.Column('center_x_pixel', sa.Integer(), nullable=False), - sa.Column('center_y_pixel', sa.Integer(), nullable=False), - sa.Column('offset_x', sa.REAL(), nullable=False), - sa.Column('offset_y', sa.REAL(), nullable=False), - sa.Column('width', sa.REAL(), nullable=False), - sa.Column('elongation', sa.REAL(), nullable=False), - sa.Column('position_angle', sa.REAL(), nullable=False), - sa.Column('is_bad', sa.Boolean(), nullable=False), - sa.Column('disqualifier_scores', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('modified', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), - sa.Column('_id', sa.UUID(), nullable=False), - sa.Column('ra', sa.Double(), nullable=False), - sa.Column('dec', sa.Double(), nullable=False), - sa.Column('gallat', sa.Double(), nullable=True), - sa.Column('gallon', sa.Double(), nullable=True), - sa.Column('ecllat', sa.Double(), nullable=True), - sa.Column('ecllon', sa.Double(), nullable=True), - sa.Column('_bitflag', sa.BIGINT(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('_upstream_bitflag', sa.BIGINT(), nullable=False), - sa.ForeignKeyConstraint(['cutouts_id'], ['cutouts._id'], name='measurements_cutouts_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['object_id'], ['objects._id'], name='measurements_object_id_fkey', ondelete='CASCADE'), - sa.ForeignKeyConstraint(['provenance_id'], ['provenances._id'], name='measurements_provenance_id_fkey', ondelete='CASCADE'), - sa.PrimaryKeyConstraint('_id'), - sa.UniqueConstraint('cutouts_id', 'index_in_sources', 'provenance_id', name='_measurements_cutouts_provenance_uc') - ) - op.create_index(op.f('ix_measurements__bitflag'), 'measurements', ['_bitflag'], unique=False) - op.create_index(op.f('ix_measurements__id'), 'measurements', ['_id'], unique=False) - op.create_index(op.f('ix_measurements__upstream_bitflag'), 'measurements', ['_upstream_bitflag'], unique=False) - op.create_index(op.f('ix_measurements_created_at'), 'measurements', ['created_at'], unique=False) - op.create_index(op.f('ix_measurements_cutouts_id'), 'measurements', ['cutouts_id'], unique=False) - op.create_index(op.f('ix_measurements_disqualifier_scores'), 'measurements', ['disqualifier_scores'], unique=False) - op.create_index(op.f('ix_measurements_ecllat'), 'measurements', ['ecllat'], unique=False) - op.create_index(op.f('ix_measurements_gallat'), 'measurements', ['gallat'], unique=False) - op.create_index(op.f('ix_measurements_is_bad'), 'measurements', ['is_bad'], unique=False) - op.create_index(op.f('ix_measurements_object_id'), 'measurements', ['object_id'], unique=False) - op.create_index(op.f('ix_measurements_provenance_id'), 'measurements', ['provenance_id'], unique=False) - op.create_index('ix_measurements_scores_gin', 'measurements', ['disqualifier_scores'], unique=False, postgresql_using='gin') - op.create_index(op.f('ix_measurements_width'), 'measurements', ['width'], unique=False) - op.create_index('measurements_q3c_ang2ipix_idx', 'measurements', [sa.text('q3c_ang2ipix(ra, dec)')], unique=False) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index('measurements_q3c_ang2ipix_idx', table_name='measurements') - op.drop_index(op.f('ix_measurements_width'), table_name='measurements') - op.drop_index('ix_measurements_scores_gin', table_name='measurements', postgresql_using='gin') - op.drop_index(op.f('ix_measurements_provenance_id'), table_name='measurements') - op.drop_index(op.f('ix_measurements_object_id'), table_name='measurements') - op.drop_index(op.f('ix_measurements_is_bad'), table_name='measurements') - op.drop_index(op.f('ix_measurements_gallat'), table_name='measurements') - op.drop_index(op.f('ix_measurements_ecllat'), table_name='measurements') - op.drop_index(op.f('ix_measurements_disqualifier_scores'), table_name='measurements') - op.drop_index(op.f('ix_measurements_cutouts_id'), table_name='measurements') - op.drop_index(op.f('ix_measurements_created_at'), table_name='measurements') - op.drop_index(op.f('ix_measurements__upstream_bitflag'), table_name='measurements') - op.drop_index(op.f('ix_measurements__id'), table_name='measurements') - op.drop_index(op.f('ix_measurements__bitflag'), table_name='measurements') - op.drop_table('measurements') - op.drop_index(op.f('ix_zero_points_sources_id'), table_name='zero_points') - op.drop_index(op.f('ix_zero_points_created_at'), table_name='zero_points') - op.drop_index(op.f('ix_zero_points__upstream_bitflag'), table_name='zero_points') - op.drop_index(op.f('ix_zero_points__id'), table_name='zero_points') - op.drop_index(op.f('ix_zero_points__bitflag'), table_name='zero_points') - op.drop_table('zero_points') - op.drop_index(op.f('ix_world_coordinates_sources_id'), table_name='world_coordinates') - op.drop_index(op.f('ix_world_coordinates_filepath'), table_name='world_coordinates') - op.drop_index(op.f('ix_world_coordinates_created_at'), table_name='world_coordinates') - op.drop_index(op.f('ix_world_coordinates__upstream_bitflag'), table_name='world_coordinates') - op.drop_index(op.f('ix_world_coordinates__id'), table_name='world_coordinates') - op.drop_index(op.f('ix_world_coordinates__bitflag'), table_name='world_coordinates') - op.drop_table('world_coordinates') - op.drop_index(op.f('ix_psfs_sources_id'), table_name='psfs') - op.drop_index(op.f('ix_psfs_filepath'), table_name='psfs') - op.drop_index(op.f('ix_psfs_created_at'), table_name='psfs') - op.drop_index(op.f('ix_psfs__upstream_bitflag'), table_name='psfs') - op.drop_index(op.f('ix_psfs__id'), table_name='psfs') - op.drop_index(op.f('ix_psfs__bitflag'), table_name='psfs') - op.drop_table('psfs') - op.drop_index(op.f('ix_cutouts_sources_id'), table_name='cutouts') - op.drop_index(op.f('ix_cutouts_provenance_id'), table_name='cutouts') - op.drop_index(op.f('ix_cutouts_filepath'), table_name='cutouts') - op.drop_index(op.f('ix_cutouts_created_at'), table_name='cutouts') - op.drop_index(op.f('ix_cutouts__upstream_bitflag'), table_name='cutouts') - op.drop_index(op.f('ix_cutouts__id'), table_name='cutouts') - op.drop_index(op.f('ix_cutouts__bitflag'), table_name='cutouts') - op.drop_table('cutouts') - op.drop_index(op.f('ix_backgrounds_value'), table_name='backgrounds') - op.drop_index(op.f('ix_backgrounds_sources_id'), table_name='backgrounds') - op.drop_index(op.f('ix_backgrounds_noise'), table_name='backgrounds') - op.drop_index(op.f('ix_backgrounds_filepath'), table_name='backgrounds') - op.drop_index(op.f('ix_backgrounds_created_at'), table_name='backgrounds') - op.drop_index(op.f('ix_backgrounds__upstream_bitflag'), table_name='backgrounds') - op.drop_index(op.f('ix_backgrounds__id'), table_name='backgrounds') - op.drop_index(op.f('ix_backgrounds__bitflag'), table_name='backgrounds') - op.drop_table('backgrounds') - op.drop_index(op.f('ix_source_lists_provenance_id'), table_name='source_lists') - op.drop_index(op.f('ix_source_lists_num_sources'), table_name='source_lists') - op.drop_index(op.f('ix_source_lists_image_id'), table_name='source_lists') - op.drop_index(op.f('ix_source_lists_filepath'), table_name='source_lists') - op.drop_index(op.f('ix_source_lists_created_at'), table_name='source_lists') - op.drop_index(op.f('ix_source_lists__upstream_bitflag'), table_name='source_lists') - op.drop_index(op.f('ix_source_lists__id'), table_name='source_lists') - op.drop_index(op.f('ix_source_lists__bitflag'), table_name='source_lists') - op.drop_table('source_lists') - op.drop_index(op.f('ix_refs_target'), table_name='refs') - op.drop_index(op.f('ix_refs_section_id'), table_name='refs') - op.drop_index(op.f('ix_refs_provenance_id'), table_name='refs') - op.drop_index(op.f('ix_refs_instrument'), table_name='refs') - op.drop_index(op.f('ix_refs_image_id'), table_name='refs') - op.drop_index(op.f('ix_refs_filter'), table_name='refs') - op.drop_index(op.f('ix_refs_created_at'), table_name='refs') - op.drop_index(op.f('ix_refs__id'), table_name='refs') - op.drop_table('refs') - op.drop_table('image_upstreams_association') - op.drop_index(op.f('ix_calibrator_files_validity_start'), table_name='calibrator_files') - op.drop_index(op.f('ix_calibrator_files_validity_end'), table_name='calibrator_files') - op.drop_index(op.f('ix_calibrator_files_sensor_section'), table_name='calibrator_files') - op.drop_index(op.f('ix_calibrator_files_instrument'), table_name='calibrator_files') - op.drop_index(op.f('ix_calibrator_files_image_id'), table_name='calibrator_files') - op.drop_index(op.f('ix_calibrator_files_datafile_id'), table_name='calibrator_files') - op.drop_index(op.f('ix_calibrator_files_created_at'), table_name='calibrator_files') - op.drop_index(op.f('ix_calibrator_files__type'), table_name='calibrator_files') - op.drop_index(op.f('ix_calibrator_files__id'), table_name='calibrator_files') - op.drop_index(op.f('ix_calibrator_files__flat_type'), table_name='calibrator_files') - op.drop_index(op.f('ix_calibrator_files__calibrator_set'), table_name='calibrator_files') - op.drop_table('calibrator_files') - op.drop_index(op.f('ix_reports_success'), table_name='reports') - op.drop_index(op.f('ix_reports_start_time'), table_name='reports') - op.drop_index(op.f('ix_reports_section_id'), table_name='reports') - op.drop_index(op.f('ix_reports_provenance_id'), table_name='reports') - op.drop_index(op.f('ix_reports_progress_steps_bitflag'), table_name='reports') - op.drop_index(op.f('ix_reports_products_exist_bitflag'), table_name='reports') - op.drop_index(op.f('ix_reports_products_committed_bitflag'), table_name='reports') - op.drop_index(op.f('ix_reports_finish_time'), table_name='reports') - op.drop_index(op.f('ix_reports_exposure_id'), table_name='reports') - op.drop_index(op.f('ix_reports_created_at'), table_name='reports') - op.drop_index(op.f('ix_reports__id'), table_name='reports') - op.drop_table('reports') - op.drop_index('knownexposures_q3c_ang2ipix_idx', table_name='knownexposures') - op.drop_index(op.f('ix_knownexposures_mjd'), table_name='knownexposures') - op.drop_index(op.f('ix_knownexposures_instrument'), table_name='knownexposures') - op.drop_index(op.f('ix_knownexposures_identifier'), table_name='knownexposures') - op.drop_index(op.f('ix_knownexposures_gallat'), table_name='knownexposures') - op.drop_index(op.f('ix_knownexposures_ecllat'), table_name='knownexposures') - op.drop_index(op.f('ix_knownexposures_created_at'), table_name='knownexposures') - op.drop_index(op.f('ix_knownexposures__id'), table_name='knownexposures') - op.drop_table('knownexposures') - op.drop_index(op.f('ix_images_zero_point_estimate'), table_name='images') - op.drop_index(op.f('ix_images_telescope'), table_name='images') - op.drop_index(op.f('ix_images_target'), table_name='images') - op.drop_index(op.f('ix_images_section_id'), table_name='images') - op.drop_index(op.f('ix_images_ref_image_id'), table_name='images') - op.drop_index(op.f('ix_images_ra_corner_11'), table_name='images') - op.drop_index(op.f('ix_images_ra_corner_10'), table_name='images') - op.drop_index(op.f('ix_images_ra_corner_01'), table_name='images') - op.drop_index(op.f('ix_images_ra_corner_00'), table_name='images') - op.drop_index(op.f('ix_images_provenance_id'), table_name='images') - op.drop_index(op.f('ix_images_project'), table_name='images') - op.drop_index(op.f('ix_images_mjd'), table_name='images') - op.drop_index(op.f('ix_images_minra'), table_name='images') - op.drop_index(op.f('ix_images_mindec'), table_name='images') - op.drop_index(op.f('ix_images_maxra'), table_name='images') - op.drop_index(op.f('ix_images_maxdec'), table_name='images') - op.drop_index(op.f('ix_images_lim_mag_estimate'), table_name='images') - op.drop_index(op.f('ix_images_is_sub'), table_name='images') - op.drop_index(op.f('ix_images_is_coadd'), table_name='images') - op.drop_index(op.f('ix_images_instrument'), table_name='images') - op.drop_index(op.f('ix_images_gallat'), table_name='images') - op.drop_index(op.f('ix_images_fwhm_estimate'), table_name='images') - op.drop_index(op.f('ix_images_filter'), table_name='images') - op.drop_index(op.f('ix_images_filepath'), table_name='images') - op.drop_index(op.f('ix_images_exposure_id'), table_name='images') - op.drop_index(op.f('ix_images_exp_time'), table_name='images') - op.drop_index(op.f('ix_images_end_mjd'), table_name='images') - op.drop_index(op.f('ix_images_ecllat'), table_name='images') - op.drop_index(op.f('ix_images_dec_corner_11'), table_name='images') - op.drop_index(op.f('ix_images_dec_corner_10'), table_name='images') - op.drop_index(op.f('ix_images_dec_corner_01'), table_name='images') - op.drop_index(op.f('ix_images_dec_corner_00'), table_name='images') - op.drop_index(op.f('ix_images_created_at'), table_name='images') - op.drop_index(op.f('ix_images_bkg_rms_estimate'), table_name='images') - op.drop_index(op.f('ix_images_bkg_mean_estimate'), table_name='images') - op.drop_index(op.f('ix_images_airmass'), table_name='images') - op.drop_index(op.f('ix_images__upstream_bitflag'), table_name='images') - op.drop_index(op.f('ix_images__type'), table_name='images') - op.drop_index(op.f('ix_images__id'), table_name='images') - op.drop_index(op.f('ix_images__bitflag'), table_name='images') - op.drop_index('images_q3c_ang2ipix_idx', table_name='images') - op.drop_table('images') - op.drop_table('refset_provenance_association') - op.drop_table('provenance_upstreams') - op.drop_index(op.f('ix_provenance_tags_tag'), table_name='provenance_tags') - op.drop_index(op.f('ix_provenance_tags_provenance_id'), table_name='provenance_tags') - op.drop_index(op.f('ix_provenance_tags_created_at'), table_name='provenance_tags') - op.drop_index(op.f('ix_provenance_tags__id'), table_name='provenance_tags') - op.drop_table('provenance_tags') - op.drop_index(op.f('ix_exposures_target'), table_name='exposures') - op.drop_index(op.f('ix_exposures_provenance_id'), table_name='exposures') - op.drop_index(op.f('ix_exposures_project'), table_name='exposures') - op.drop_index(op.f('ix_exposures_origin_identifier'), table_name='exposures') - op.drop_index(op.f('ix_exposures_mjd'), table_name='exposures') - op.drop_index(op.f('ix_exposures_instrument'), table_name='exposures') - op.drop_index(op.f('ix_exposures_gallat'), table_name='exposures') - op.drop_index(op.f('ix_exposures_filter_array'), table_name='exposures') - op.drop_index(op.f('ix_exposures_filter'), table_name='exposures') - op.drop_index(op.f('ix_exposures_filepath'), table_name='exposures') - op.drop_index(op.f('ix_exposures_exp_time'), table_name='exposures') - op.drop_index(op.f('ix_exposures_ecllat'), table_name='exposures') - op.drop_index(op.f('ix_exposures_created_at'), table_name='exposures') - op.drop_index(op.f('ix_exposures_airmass'), table_name='exposures') - op.drop_index(op.f('ix_exposures__type'), table_name='exposures') - op.drop_index(op.f('ix_exposures__id'), table_name='exposures') - op.drop_index(op.f('ix_exposures__bitflag'), table_name='exposures') - op.drop_index('exposures_q3c_ang2ipix_idx', table_name='exposures') - op.drop_table('exposures') - op.drop_index(op.f('ix_data_files_provenance_id'), table_name='data_files') - op.drop_index(op.f('ix_data_files_filepath'), table_name='data_files') - op.drop_index(op.f('ix_data_files_created_at'), table_name='data_files') - op.drop_index(op.f('ix_data_files__id'), table_name='data_files') - op.drop_table('data_files') - op.drop_index(op.f('ix_provenances_replaced_by'), table_name='provenances') - op.drop_index(op.f('ix_provenances_process'), table_name='provenances') - op.drop_index(op.f('ix_provenances_created_at'), table_name='provenances') - op.drop_index(op.f('ix_provenances_code_version_id'), table_name='provenances') - op.drop_table('provenances') - op.drop_index(op.f('ix_passwordlink_userid'), table_name='passwordlink') - op.drop_index(op.f('ix_passwordlink_created_at'), table_name='passwordlink') - op.drop_table('passwordlink') - op.drop_index(op.f('ix_code_hashes_created_at'), table_name='code_hashes') - op.drop_index(op.f('ix_code_hashes_code_version_id'), table_name='code_hashes') - op.drop_table('code_hashes') - op.drop_index(op.f('ix_sensor_sections_validity_start'), table_name='sensor_sections') - op.drop_index(op.f('ix_sensor_sections_validity_end'), table_name='sensor_sections') - op.drop_index(op.f('ix_sensor_sections_instrument'), table_name='sensor_sections') - op.drop_index(op.f('ix_sensor_sections_identifier'), table_name='sensor_sections') - op.drop_index(op.f('ix_sensor_sections_defective'), table_name='sensor_sections') - op.drop_index(op.f('ix_sensor_sections_created_at'), table_name='sensor_sections') - op.drop_index(op.f('ix_sensor_sections__id'), table_name='sensor_sections') - op.drop_table('sensor_sections') - op.drop_index(op.f('ix_refsets_name'), table_name='refsets') - op.drop_index(op.f('ix_refsets_created_at'), table_name='refsets') - op.drop_index(op.f('ix_refsets__id'), table_name='refsets') - op.drop_table('refsets') - op.drop_index(op.f('ix_pipelineworkers_created_at'), table_name='pipelineworkers') - op.drop_index(op.f('ix_pipelineworkers__id'), table_name='pipelineworkers') - op.drop_table('pipelineworkers') - op.drop_index('objects_q3c_ang2ipix_idx', table_name='objects') - op.drop_index(op.f('ix_objects_name'), table_name='objects') - op.drop_index(op.f('ix_objects_is_bad'), table_name='objects') - op.drop_index(op.f('ix_objects_gallat'), table_name='objects') - op.drop_index(op.f('ix_objects_ecllat'), table_name='objects') - op.drop_index(op.f('ix_objects_created_at'), table_name='objects') - op.drop_index(op.f('ix_objects__id'), table_name='objects') - op.drop_table('objects') - op.drop_index(op.f('ix_code_versions_created_at'), table_name='code_versions') - op.drop_table('code_versions') - op.drop_index(op.f('ix_catalog_excerpts_ra_corner_11'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_ra_corner_10'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_ra_corner_01'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_ra_corner_00'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_num_items'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_minra'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_minmag'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_mindec'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_maxra'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_maxmag'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_maxdec'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_gallat'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_filepath'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_ecllat'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_dec_corner_11'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_dec_corner_10'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_dec_corner_01'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_dec_corner_00'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts_created_at'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts__origin'), table_name='catalog_excerpts') - op.drop_index(op.f('ix_catalog_excerpts__id'), table_name='catalog_excerpts') - op.drop_index('catalog_excerpts_q3c_ang2ipix_idx', table_name='catalog_excerpts') - op.drop_table('catalog_excerpts') - op.drop_index(op.f('ix_calibfile_downloadlock_sensor_section'), table_name='calibfile_downloadlock') - op.drop_index(op.f('ix_calibfile_downloadlock_instrument'), table_name='calibfile_downloadlock') - op.drop_index(op.f('ix_calibfile_downloadlock_created_at'), table_name='calibfile_downloadlock') - op.drop_index(op.f('ix_calibfile_downloadlock__type'), table_name='calibfile_downloadlock') - op.drop_index(op.f('ix_calibfile_downloadlock__id'), table_name='calibfile_downloadlock') - op.drop_index(op.f('ix_calibfile_downloadlock__flat_type'), table_name='calibfile_downloadlock') - op.drop_index(op.f('ix_calibfile_downloadlock__calibrator_set'), table_name='calibfile_downloadlock') - op.drop_table('calibfile_downloadlock') - op.drop_index(op.f('ix_authuser_username'), table_name='authuser') - op.drop_index(op.f('ix_authuser_email'), table_name='authuser') - op.drop_index(op.f('ix_authuser_created_at'), table_name='authuser') - op.drop_table('authuser') - # ### end Alembic commands ### diff --git a/models/background.py b/models/background.py index 86bdf330..fe5ceaaf 100644 --- a/models/background.py +++ b/models/background.py @@ -35,7 +35,7 @@ def __table_args__(cls): _format = sa.Column( sa.SMALLINT, nullable=False, - default=BackgroundFormatConverter.convert('scalar'), + server_default=sa.sql.elements.TextClause( str(BackgroundFormatConverter.convert('scalar')) ), doc='Format of the Background model. Can include scalar, map, or polynomial. ' ) @@ -55,7 +55,7 @@ def format(self, value): _method = sa.Column( sa.SMALLINT, nullable=False, - default=BackgroundMethodConverter.convert('zero'), + server_default=sa.sql.elements.TextClause( str(BackgroundMethodConverter.convert('zero')) ), doc='Method used to calculate the background. ' 'Can be an algorithm like "sep", or "zero" for an image that was already background subtracted. ', ) diff --git a/models/base.py b/models/base.py index cda10820..6dca9a50 100644 --- a/models/base.py +++ b/models/base.py @@ -17,8 +17,8 @@ from astropy.coordinates import SkyCoord import sqlalchemy as sa +import sqlalchemy.dialects.postgresql from sqlalchemy import func, orm - from sqlalchemy.orm import sessionmaker, declarative_base from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property @@ -434,105 +434,83 @@ def _get_table_lock( cls, session, tablename=None ): raise RuntimeError( f"Repeated failures getting lock on {tablename}." ) - @classmethod - def upsert_list( cls, objects, session=None ): - """Like upsert, but for a bunch of objects in a list, and tries to be efficient about it. + def _get_cols_and_vals_for_insert( self ): + cols = [] + values = [] + for col in sa.inspect( self.__class__ ).c: + val = getattr( self, col.name ) + if col.name == 'created_at': + continue + elif col.name == 'modified': + val = datetime.datetime.now( tz=datetime.timezone.utc ) + if isinstance( col.type, sqlalchemy.dialects.postgresql.json.JSONB ) and ( val is not None ): + val = json.dumps( val ) + if ( ( ( col.server_default is not None ) and ( col.nullable ) and ( val is None ) ) + or + ( val is not None ) + ): + cols.append( col.name ) + values.append( val ) + return cols, values - Do *not* use this with classes that have things like association - tables that need to get updated (i.e. with Image, maybe - eventually some others). - All reference fields (ids of other objects) of the objects must - be up to date. If the referenced objects don't exist in the - database already, you'll get integrity errors. + def insert( self, session=None ): + """Insert the object into the database. + + Does not do any saving to disk, only saves the database record. + + In any event, if there are no exceptions, self.id will be set upon return. + + Will *not* set any unfilled fileds with their defaults. If you + want that, reload the row from the database. + + Depends on the subclass of SeeChangeBase having a column _id in + the database, and a property id that accesses that column, + autogenerating it if it doesn't exist. + + Parameters + ---------- + session: SQLALchemy Session, or None + Usually you do not want to pass this; it's mostly for other + upsert etc. methods that cascade to this. """ - # The debug comments in this function are for debugging database - # deadlocks. Uncomment them if you're trying to deal with - # that. Normally they're commented out because they make the - # debug output much more verbose. + myid = self.id # Make sure id is generated - if not all( [ isinstance( o, cls ) for o in objects ] ): - raise TypeError( f"{cls.__name__}.upsert_list: passed objects weren't all of this class!" ) + # Doing this manually for a few reasons. First, doing a + # Session.add wasn't always just doing an insert, but was doing + # other things like going to the database and checking if it + # was there and merging, whereas here we want an exception to + # be raised if the row already exists in the database. Second, + # to work around that, we did orm.make_transient( self ), but + # that wiped out the _id field, and I'm nervous about what + # other unintended consequences calling that SQLA function + # might have. Third, now that we've moved defaults to be + # database-side defaults, we'll get errors from SQLA if those + # fields aren't filled by trying to do an add. + # + # In any event, doing this manually dodges any weirdness associated + # with objects attached, or not attached, to sessions. + cols, values = self._get_cols_and_vals_for_insert() + notmod = [ c for c in cols if c != 'modified' ] + q = f'INSERT INTO {self.__tablename__}({",".join(notmod)}) VALUES (:{",:".join(notmod)}) ' + subdict = { c: v for c, v in zip( cols, values ) if c != 'modified' } with SmartSession( session ) as sess: - try: - cls._get_table_lock( sess ) - # Not doing this just with sqlalchemy merge for two reasons. - # (1) That generates mysterious errors that induce rage - # against sqlalchemy (e.g. was getting an error about - # created_at being blank, even though it's got a - # default). - # (2) A for loop of sqlalchemy merge will, I think, go to the database - # repeatedly, pulling down a copy of each object one at a time - # for merge purposes. More efficient to pull them all at once. - # - # I'm hoping that by modifying columns of things pulled - # from sqlalchemy and just committing the session will - # generate single transaction of a bunch of update - # statements, without the need to do selects before the - # updates. - # - # I think this is the case; see - # tests/test_base.py::test_upsert_list. If you want to - # see what sql actually gets generated, uncomment the - # debug outputs below and run that tests with pytest - # --capture=tee-sys. (For production, leave the - # SCLogger.debug statements commented. Even though - # they're debug and won't show up if you haven't set the - # log level that high, it's more efficient not to have - # gratuitous function calls, and this could be an inner - # loop somewhere, plus it holds a database lock.) - - # Figure out which objects are existing, which are new. - # Use .id here once to force generation if necessary, - # then use _id to bypass one python function call. - objids = [ o.id for o in objects ] - existing = sess.query( cls ).filter( cls._id.in_( objids ) ).all() - existing = { o._id: o for o in existing } - updates = [ o for o in objects if o._id in existing.keys() ] - news = [ o for o in objects if o._id not in existing.keys() ] - - # Update the existing ones - if len(updates) > 0: - for obj in updates: - for col in sa.inspect(obj).mapper.columns.keys(): - existingval = getattr( existing[obj._id], col ) - objval = getattr( obj, col ) - mustreplace = False - if isinstance( existingval, list ): - mustreplace = not all( [ i == j for i, j in zip( existingval, objval ) ] ) - else: - mustreplace = ( existingval != objval ) - if mustreplace: - setattr( existing[obj._id], col, objval ) - SCLogger.debug( "SeeChangeBase.upsert_list committing" ) - sess.commit() - - # Insert the new ones. (We may no longer have the lock at this point, - # but it shouldn't matter because uuids will be unique, and if there's - # another conflict on another column that is supposed to be unique, - # we'd get that conflict in any event.) - if len(news) > 0: - for obj in news: - sess.add( obj ) - # SCLogger.debug( "SeeChangeBase.upsert_list committing" ) - sess.commit() - finally: - # Make sure the lock is released if something goes wrong - # SCLogger.debug( "SeeChangeBase.upsert_list rolling back" ) - sess.rollback() + sess.execute( sa.text( q ), subdict ) + sess.commit() + def upsert( self, session=None ): - """Insert an object into the database, or update it if it's already there. + """Insert an object into the database, or update it if it's already there (using _id as the primary key). - Will assign the object an id if it doesn't alrady have one (in self.id). + Will *not* update self's fields with server default values! + Re-get the database row if you want that. - Then looks in the database. If the object is not yet there, - calls the insert method (which will handle things like - association tables). Otherwise, just merges the object to - update the object's own fields. + Will not attach the object to session if you pass it. + + Will assign the object an id if it doesn't alrady have one (in self.id). If the object is already there, will NOT update any association tables (e.g. the image_upstreams_association table), because we @@ -551,28 +529,96 @@ def upsert( self, session=None ): """ - # The debug comments in this function are for debugging database - # deadlocks. Uncomment them if you're trying to deal with - # that. Normally they're commented out because they make the - # debug output much more verbose. + # Doing this manually because I don't think SQLAlchemy has a + # clean and direct upsert statement. + # + # Used to do this with a lock table followed by search followed + # by either an insert or an update. However, SQLAlchemy + # wans't always closing connections when we told it to. + # Sometimes, rarely and unreproducably, there was a lingering + # connection in a transaction that caused lock tables to fail. + # My hypothesis is that SQLAlchemy is relying on garbage + # collection to *actually* close database connections, and I + # have not found a way to say "no, really, close the + # connection for this session right now". So, as long as we + # still use SQLAlchemy at all, locking tables is likely to + # cause intermittent problems. + # + # (Doing this manually also has the added advantage of avoiding + # sqlalchemy "add" and "merge" statements, so we don't have to + # worry about hwatever other side effects those things have.) + + # Make sure that self._id is generated + myid = self.id + cols, values = self._get_cols_and_vals_for_insert() + notmod = [ c for c in cols if c != 'modified' ] + q = ( f'INSERT INTO {self.__tablename__}({",".join(notmod)}) VALUES (:{",:".join(notmod)}) ' + f'ON CONFLICT (_id) DO UPDATE SET ' + f'{",".join( [ f"{c}=:{c}" for c in cols if c!="id" ] )} ') + subdict = { c: v for c, v in zip( cols, values ) } + with SmartSession( session ) as sess: + sess.execute( sa.text( q ), subdict ) + sess.commit() + + + @classmethod + def upsert_list( cls, objects, session=None ): + """Like upsert, but for a bunch of objects in a list, and tries to be efficient about it. + + Do *not* use this with classes that have things like association + tables that need to get updated (i.e. with Image, maybe + eventually some others). + + All reference fields (ids of other objects) of the objects must + be up to date. If the referenced objects don't exist in the + database already, you'll get integrity errors. + + Will update object id fields, but will not update any other + object fields with database defaults. Reload the rows from the + table if that's what you need. + + """ + + # Doing this manually for the same reasons as in upset() + + if not all( [ isinstance( o, cls ) for o in objects ] ): + raise TypeError( f"{cls.__name__}.upsert_list: passed objects weren't all of this class!" ) - id_ = self.id - cls = self.__class__ with SmartSession( session ) as sess: - try: - self._get_table_lock( sess ) - if self.__class__.get_by_id( id_, session=sess ) is None: - # SCLogger.debug( f"SeeChangeBase.upsert running self.insert()" ) - self.insert( session=sess ) - else: - # SCLogger.debug( f"SeeChangeBase.upsert running sess.merge" ) - sess.merge( self ) - # SCLogger.debug( "SeeChangeBase.upsert committing" ) - sess.commit() - finally: - # Make sure to release the lock if anything goes wrong - # SCLogger.debug( "SeeChangeBase.upsert rolling back" ) - sess.rollback() + for obj in objects: + myid = obj.id # Make sure _id is generated + cols, values = obj._get_cols_and_vals_for_insert() + notmod = [ c for c in cols if c != 'modified' ] + q = ( f'INSERT INTO {cls.__tablename__}({",".join(notmod)}) VALUES (:{",:".join(notmod)}) ' + f'ON CONFLICT (_id) DO UPDATE SET ' + f'{",".join( [ f"{c}=:{c}" for c in cols if c!="id" ] )} ') + subdict = { c: v for c, v in zip( cols, values ) } + sess.execute( sa.text( q ), subdict ) + sess.commit() + + + def _delete_from_database( self ): + """Remove the object from the database. Don't call this, call delete_from_disk_and_database. + + This does not remove any associated files (if this is a + FileOnDiskMixin) and does not remove the object from the archive. + + Note that if you call this, cascading relationships in the database + may well delete other objects. This shouldn't be a problem if this is + called from within SeeChangeBase.delete_from_disk_and_database (the + only place it should be called!), because that recurses itself and + makes sure to clean up all files and archive files before the database + records get deleted. + + """ + + with SmartSession() as session: + session.execute( sa.text( f"DELETE FROM {self.__tablename__} WHERE _id=:id" ), { 'id': self.id } ) + session.commit() + + # Look how much easier this is when you don't have to spend a whole bunch of time + # deciding if the object needs to be merged, expunged, etc. to a session + def get_upstreams(self, session=None): """Get all data products that were directly used to create this object (non-recursive).""" @@ -991,14 +1037,14 @@ def filepath(cls): md5sum = sa.Column( sqlUUID(as_uuid=True), nullable=True, - default=None, + server_default=None, doc="md5sum of the file, provided by the archive server" ) md5sum_extensions = sa.Column( ARRAY(sqlUUID(as_uuid=True), zero_indexes=True), nullable=True, - default=None, + server_default=None, doc="md5sum of extension files; must have same number of elements as filepath_extensions" ) @@ -1574,7 +1620,7 @@ class UUIDMixin: sqlUUID, primary_key=True, index=True, - default=uuid.uuid4, + default=uuid.uuid4, # This is the one exception to always using server_default doc="Unique identifier for this row", ) @@ -1621,61 +1667,6 @@ def get_batch_by_ids( cls, uuids, session=None, return_dict=False ): return { o.id: o for o in objs } if return_dict else objs - def insert( self, session=None ): - """Insert the object into the database. - - Does not do any saving to disk, only saves the database record. - - In any event, if there are no exceptions, self.id will be set upon return. - - Parameters - ---------- - session: SQLALchemy Session, or None - Usually you do not want to pass this; it's mostly for other - upsert etc. methods that cascade to this. - - """ - - myid = self.id # Make sure id is generated - with SmartSession( session ) as sess: - # Have to make sure to insert a "transient" object so that SQLAlchemy will *really* do an insert. - # If the object is detached, it does something else, potentially referencing a row that you don't - # want to reference because of the objects vestigal connections to how it was read from the database. - # - # Looking at https://docs.sqlalchemy.org/en/20/orm/session_api.html#sqlalchemy.orm.make_transient - # it sounds like make_transient is scary if you have relationships... but we're getting rid of those. - # - # One side effect of make_transient is that it sets the - # primary key to None. So, put in a SQLAlchemy work-around - # to make our SQLAlchemy work-around actually work. (Rage.) - orm.make_transient( self ) - self._id = myid - sess.add( self ) - sess.commit() - - - def _delete_from_database( self ): - """Remove the object from the database. Don't call this, call delete_from_disk_and_database. - - This does not remove any associated files (if this is a - FileOnDiskMixin) and does not remove the object from the archive. - - Note that if you call this, cascading relationships in the database - may well delete other objects. This shouldn't be a problem if this is - called from within SeeChangeBase.delete_from_disk_and_database (the - only place it should be called!), because that recurses itself and - makes sure to clean up all files and archive files before the database - records get deleted. - - """ - - with SmartSession() as session: - session.execute( sa.text( f"DELETE FROM {self.__tablename__} WHERE _id=:id" ), { 'id': self.id } ) - session.commit() - - # Look how much easier this is when you don't have to spend a whole bunch of time - # deciding if the object needs to be merged, expunged, etc. to a session - class SpatiallyIndexed: """A mixin for tables that have ra and dec fields indexed via q3c.""" @@ -1786,11 +1777,16 @@ def distance_to(self, other, units='arcsec'): class FourCorners: """A mixin for tables that have four RA/Dec corners""" - ra_corner_00 = sa.Column( sa.REAL, nullable=False, index=True, doc="RA of the low-RA, low-Dec corner (degrees)" ) - ra_corner_01 = sa.Column( sa.REAL, nullable=False, index=True, doc="RA of the low-RA, high-Dec corner (degrees)" ) - ra_corner_10 = sa.Column( sa.REAL, nullable=False, index=True, doc="RA of the high-RA, low-Dec corner (degrees)" ) - ra_corner_11 = sa.Column( sa.REAL, nullable=False, index=True, doc="RA of the high-RA, high-Dec corner (degrees)" ) - dec_corner_00 = sa.Column( sa.REAL, nullable=False, index=True, doc="Dec of the low-RA, low-Dec corner (degrees)" ) + ra_corner_00 = sa.Column( sa.REAL, nullable=False, index=True, + doc="RA of the low-RA, low-Dec corner (degrees)" ) + ra_corner_01 = sa.Column( sa.REAL, nullable=False, index=True, + doc="RA of the low-RA, high-Dec corner (degrees)" ) + ra_corner_10 = sa.Column( sa.REAL, nullable=False, index=True, + doc="RA of the high-RA, low-Dec corner (degrees)" ) + ra_corner_11 = sa.Column( sa.REAL, nullable=False, index=True, + doc="RA of the high-RA, high-Dec corner (degrees)" ) + dec_corner_00 = sa.Column( sa.REAL, nullable=False, index=True, + doc="Dec of the low-RA, low-Dec corner (degrees)" ) dec_corner_01 = sa.Column( sa.REAL, nullable=False, index=True, doc="Dec of the low-RA, high-Dec corner (degrees)" ) dec_corner_10 = sa.Column( sa.REAL, nullable=False, index=True, @@ -2155,7 +2151,7 @@ class HasBitFlagBadness: _bitflag = sa.Column( sa.BIGINT, nullable=False, - default=0, + server_default=sa.sql.elements.TextClause( '0' ), index=True, doc='Bitflag for this object. Good objects have a bitflag of 0. ' 'Bad objects are each bad in their own way (i.e., have different bits set). ' @@ -2169,7 +2165,7 @@ def _upstream_bitflag(cls): return sa.Column( sa.BIGINT, nullable=False, - default=0, + server_default=sa.sql.elements.TextClause( '0' ), index=True, doc='Bitflag of objects used to generate this object. ' ) diff --git a/models/calibratorfile.py b/models/calibratorfile.py index 507e8111..7632647e 100644 --- a/models/calibratorfile.py +++ b/models/calibratorfile.py @@ -20,7 +20,7 @@ class CalibratorFile(Base, UUIDMixin): sa.SMALLINT, nullable=False, index=True, - default=CalibratorTypeConverter.convert( 'unknown' ), + server_default=sa.sql.elements.TextClause(str(CalibratorTypeConverter.convert( 'unknown' )) ) , doc="Type of calibrator (Dark, Flat, Linearity, etc.)" ) @@ -40,7 +40,7 @@ def type( self, value ): sa.SMALLINT, nullable=False, index=True, - default=CalibratorTypeConverter.convert('unknown'), + server_default=sa.sql.elements.TextClause( str(CalibratorTypeConverter.convert('unknown')) ), doc="Calibrator set for instrument (unknown, externally_supplied, general, nightly)" ) @@ -144,7 +144,7 @@ class CalibratorFileDownloadLock(Base, UUIDMixin): sa.SMALLINT, nullable=False, index=True, - default=CalibratorTypeConverter.convert( 'unknown' ), + server_default=sa.sql.elements.TextClause( str(CalibratorTypeConverter.convert( 'unknown' )) ), doc="Type of calibrator (Dark, Flat, Linearity, etc.)" ) @@ -166,7 +166,7 @@ def type( self, value ): sa.SMALLINT, nullable=False, index=True, - default=CalibratorTypeConverter.convert('unknown'), + server_default=sa.sql.elements.TextClause( str(CalibratorTypeConverter.convert('unknown')) ), doc="Calibrator set for instrument (unknown, externally_supplied, general, nightly)" ) diff --git a/models/catalog_excerpt.py b/models/catalog_excerpt.py index 802dfaba..872ece70 100644 --- a/models/catalog_excerpt.py +++ b/models/catalog_excerpt.py @@ -48,7 +48,7 @@ def __table_args__( cls ): _format = sa.Column( sa.SMALLINT, nullable=False, - default=CatalogExcerptFormatConverter.convert('fitsldac'), + server_default=sa.sql.elements.TextClause( str(CatalogExcerptFormatConverter.convert('fitsldac')) ), doc="Format of the file on disk. Currently only fitsldac is supported. " "Saved as intetger but is converted to string when loaded." ) @@ -117,7 +117,7 @@ def data( self ): filters = sa.Column( ARRAY(sa.Text, zero_indexes=True), nullable=False, - default=[], + server_default='{}', doc=( "Filters covered by the catalog; names of the filters will be " "standard for the catalog source, not globally standard." ) ) diff --git a/models/cutouts.py b/models/cutouts.py index ec5333c3..26fc60d5 100644 --- a/models/cutouts.py +++ b/models/cutouts.py @@ -60,7 +60,7 @@ def __table_args__(cls): _format = sa.Column( sa.SMALLINT, nullable=False, - default=CutoutsFormatConverter.convert('hdf5'), + server_default=sa.sql.elements.TextClause( str(CutoutsFormatConverter.convert('hdf5')) ), doc="Format of the file on disk. Should be fits, hdf5, csv or npy. " "Saved as integer but is converted to string when loaded. " ) diff --git a/models/exposure.py b/models/exposure.py index 4313a973..33f32695 100644 --- a/models/exposure.py +++ b/models/exposure.py @@ -177,7 +177,7 @@ def __table_args__( cls ): _type = sa.Column( sa.SMALLINT, nullable=False, - default=ImageTypeConverter.convert('Sci'), + server_default=sa.sql.elements.TextClause( str(ImageTypeConverter.convert('Sci')) ), index=True, doc=( "Type of image. One of: Sci, Diff, Bias, Dark, DomeFlat, SkyFlat, TwiFlat, " @@ -202,7 +202,7 @@ def type(self, value): _format = sa.Column( sa.SMALLINT, nullable=False, - default=ImageFormatConverter.convert('fits'), + server_default=sa.sql.elements.TextClause( str(ImageFormatConverter.convert('fits')) ), doc="Format of the file on disk. Should be fits or hdf5. " "The value is saved as SMALLINT but translated to a string when read. " ) @@ -234,7 +234,7 @@ def format(self, value): info = sa.Column( JSONB, nullable=False, - default={}, + server_default='{}', doc=( "Subset of the raw exposure's header. " "Only keep a subset of the keywords, " @@ -294,7 +294,7 @@ def filter_short(self): _bitflag = sa.Column( sa.BIGINT, nullable=False, - default=0, + server_default=sa.sql.elements.TextClause( '0' ), index=True, doc='Bitflag for this exposure. Good exposures have a bitflag of 0. ' 'Bad exposures are each bad in their own way (i.e., have different bits set). ' diff --git a/models/image.py b/models/image.py index 59a82563..c9d750d4 100644 --- a/models/image.py +++ b/models/image.py @@ -86,7 +86,7 @@ def __table_args__( cls ): _format = sa.Column( sa.SMALLINT, nullable=False, - default=ImageFormatConverter.convert('fits'), + server_default=sa.sql.elements.TextClause( str(ImageFormatConverter.convert('fits')) ), doc="Format of the file on disk. Should be fits or hdf5. " ) @@ -152,7 +152,7 @@ def upstream_image_ids( self, val ): is_sub = sa.Column( sa.Boolean, nullable=False, - default=False, + server_default='false', index=True, doc='Is this a subtraction image.' ) @@ -160,7 +160,7 @@ def upstream_image_ids( self, val ): is_coadd = sa.Column( sa.Boolean, nullable=False, - default=False, + server_default='false', index=True, doc='Is this image made by stacking multiple images.' ) @@ -168,7 +168,7 @@ def upstream_image_ids( self, val ): _type = sa.Column( sa.SMALLINT, nullable=False, - default=ImageTypeConverter.convert('Sci'), + server_default=sa.sql.elements.TextClause( str(ImageTypeConverter.convert('Sci')) ), index=True, doc=( "Type of image. One of: [Sci, Diff, Bias, Dark, DomeFlat, SkyFlat, TwiFlat, Warped] " @@ -205,7 +205,7 @@ def type(self, value): info = sa.Column( JSONB, nullable=False, - default={}, + server_default='{}', doc=( "Additional information on the this image. " "Only keep a subset of the header keywords, " @@ -304,7 +304,7 @@ def mid_mjd(self): preproc_bitflag = sa.Column( sa.SMALLINT, nullable=False, - default=0, + server_default=sa.sql.elements.TextClause( '0' ), index=False, doc='Bitflag specifying which preprocessing steps have been completed for the image.' ) @@ -321,7 +321,7 @@ def preprocessing_done(self, value): astro_cal_done = sa.Column( sa.BOOLEAN, nullable=False, - default=False, + server_default='false', index=False, doc=( 'Has a WCS been solved for this image. This should be set to true after astro_cal ' 'has been run, or for images (like subtractions) that are derived from other images ' @@ -333,7 +333,7 @@ def preprocessing_done(self, value): sky_sub_done = sa.Column( sa.BOOLEAN, nullable=False, - default=False, + server_default='false', index=False, doc='Has the sky been subtracted from this image. ' ) @@ -434,6 +434,9 @@ def __init__(self, *args, **kwargs): self._instrument_object = None self._bitflag = 0 self.is_sub = False + self.is_coadd = False + self.astro_cal_done = False + self.photo_cal_done = False if 'header' in kwargs: kwargs['_header'] = kwargs.pop('header') @@ -508,31 +511,17 @@ def insert( self, verifyupstreams=False, session=None ): """ with SmartSession( session ) as sess: - try: - # Lock both the images and image_upstreams_association tables to avoid race - # conditions with another process trying to insert the same image. - self._get_table_lock( sess ) - if ( self._upstream_ids is not None ) and ( len(self._upstream_ids) > 0 ): - self._get_table_lock( sess, 'image_upstreams_association' ) - - # This will raise an exception if the image is already present; in that case, - # the upstream associations should also already be present (loaded by whoever - # loaded the image), so it's fine to just error out. - UUIDMixin.insert( self, session=sess ) - - if ( self._upstream_ids is not None ) and ( len(self._upstream_ids) > 0 ): - for ui in self._upstream_ids: - sess.execute( sa.text( "INSERT INTO " - "image_upstreams_association(upstream_id,downstream_id) " - "VALUES (:them,:me)" ), - { "them": ui, "me": self.id } ) - # SCLogger.debug( "Image.insert comitting" ) - sess.commit() - - finally: - # Make sure the table locks are released - # SCLogger.debug( "Image.insert rolling back" ) - sess.rollback() + # Insert the image. If this raises an exception (because the image already exists), + # then we won't futz with the image_upstreams_association table. + SeeChangeBase.insert( self, session=sess ) + + if ( self._upstream_ids is not None ) and ( len(self._upstream_ids) > 0 ): + for ui in self._upstream_ids: + sess.execute( sa.text( "INSERT INTO " + "image_upstreams_association(upstream_id,downstream_id) " + "VALUES (:them,:me)" ), + { "them": ui, "me": self.id } ) + sess.commit() def set_corners_from_header_wcs( self, wcs=None, setradec=False ): diff --git a/models/instrument.py b/models/instrument.py index a318f278..5ef80130 100644 --- a/models/instrument.py +++ b/models/instrument.py @@ -251,7 +251,7 @@ class SensorSection(Base, UUIDMixin): defective = sa.Column( sa.Boolean, nullable=False, - default=False, + server_default='false', index=True, doc='Whether this section is defective (i.e., if True, do not use it!). ' ) diff --git a/models/knownexposure.py b/models/knownexposure.py index 092aea21..34a5067b 100644 --- a/models/knownexposure.py +++ b/models/knownexposure.py @@ -87,6 +87,8 @@ class PipelineWorker(Base, UUIDMixin): cluster_id = sa.Column( sa.Text, nullable=False, doc="Cluster where the worker is running" ) node_id = sa.Column( sa.Text, nullable=True, doc="Node where the worker is running" ) - nexps = sa.Column( sa.SmallInteger, nullable=False, default=1, + nexps = sa.Column( sa.SmallInteger, + nullable=False, + server_default=sa.sql.elements.TextClause( '1' ), doc="How many exposures this worker can do at once" ) lastheartbeat = sa.Column( sa.DateTime, nullable=False, doc="Last time this pipeline worker checked in" ) diff --git a/models/measurements.py b/models/measurements.py index 466930a0..ca83565e 100644 --- a/models/measurements.py +++ b/models/measurements.py @@ -99,7 +99,7 @@ def __table_args__( cls ): best_aperture = sa.Column( sa.SMALLINT, nullable=False, - default=-1, + server_default=sa.sql.elements.TextClause( '-1' ), doc="The index of the aperture that was chosen as the best aperture for this measurement. " "Set to -1 to select the PSF flux instead of one of the apertures. " ) @@ -305,7 +305,7 @@ def magnitude_err(self): disqualifier_scores = sa.Column( JSONB, nullable=False, - default={}, + server_default='{}', index=True, doc="Values that may disqualify this object, and mark it as not a real source. " "This includes all sorts of analytical cuts defined by the provenance parameters. " diff --git a/models/object.py b/models/object.py index 40b8234f..2c061323 100644 --- a/models/object.py +++ b/models/object.py @@ -37,14 +37,14 @@ def __table_args__(cls): is_test = sa.Column( sa.Boolean, nullable=False, - default=False, + server_default='false', doc='Boolean flag to indicate if the object is a test object created during testing. ' ) is_fake = sa.Column( sa.Boolean, nullable=False, - default=False, + server_default='false', doc='Boolean flag to indicate if the object is a fake object that has been artificially injected. ' ) diff --git a/models/provenance.py b/models/provenance.py index 9ccece03..c7234d0a 100644 --- a/models/provenance.py +++ b/models/provenance.py @@ -1,4 +1,5 @@ import time +import re import json import base64 import hashlib @@ -78,13 +79,11 @@ def code_hashes( self ): self._code_hashes = self.get_code_hashes() return self._code_hashes - def update(self, commit=True, session=None): - """Create a new CodeHash object associated with this CodeVersion using the current git hash.""" + def update(self, session=None): + """Create a new CodeHash object associated with this CodeVersion using the current git hash. - # The debug comments in this function are for debugging database - # deadlocks. Uncomment them if you're trying to deal with - # that. Normally they're commented out because they make the - # debug output much more verbose. + Will do nothing if it already exists. + """ # NOTE: don't trust "commit"; if it fails to get_git_hash(), it # will just quietly return None, and not commit. @@ -93,27 +92,16 @@ def update(self, commit=True, session=None): if git_hash is None: return # quietly fail if we can't get the git hash - with SmartSession(session) as sess: - try: - # Lock the code_hashes table to avoid a race condition - self._get_table_lock( sess, "code_hashes" ) - hash_obj = sess.scalars( sa.select(CodeHash) - .where( CodeHash._id == git_hash ) - .where( CodeHash.code_version_id == self.id ) - ).first() - if hash_obj is None: - if commit: - cv = sess.scalars( sa.select(CodeVersion).where( CodeVersion._id==self.id ) ).first() - if cv is None: - raise RuntimeError( 'CodeVersion must be in the database before running update' ) - hash_obj = CodeHash( id=git_hash, code_version_id=self.id ) - sess.add( hash_obj ) - # SCLogger.debug( "CodeVersion.update committing" ) - sess.commit() - finally: - # If something went wrong, make sure the lock goes away - # SCLogger.debug( "CodeVersion.update rolling back" ) - sess.rollback() + + hash_obj = CodeHash( _id=git_hash, code_version_id=self.id ) + try: + hash_obj.insert( session=session ) + except IntegrityError as ex: + if 'duplicate key value violates unique constraint "code_hashes_pkey"' in str(ex): + # It's already there, so we don't care. + pass + else: + raise def get_code_hashes( self, session=None ): """Return all CodeHash objects associated with this codeversion""" @@ -214,14 +202,14 @@ def id( self, val ): parameters = sa.Column( JSONB, nullable=False, - default={}, + server_default='{}', doc="Critical parameters used to generate the underlying data. ", ) is_bad = sa.Column( sa.Boolean, nullable=False, - default=False, + server_default='false', doc="Flag to indicate if the provenance is bad and should not be used. ", ) @@ -234,7 +222,7 @@ def id( self, val ): is_outdated = sa.Column( sa.Boolean, nullable=False, - default=False, + server_default='false', doc="Flag to indicate if the provenance is outdated and should not be used. ", ) @@ -249,7 +237,7 @@ def id( self, val ): is_testing = sa.Column( sa.Boolean, nullable=False, - default=False, + server_default='false', doc="Flag to indicate if the provenance is for testing purposes only. ", ) @@ -462,7 +450,7 @@ def get_code_version(cls, session=None): return Provenance._current_code_version - def insert( self, session=None ): + def insert( self, session=None, _exists_ok=False ): """Insert the provenance into the database. Will raise a constraint violation if the provenance ID already exists in the database. @@ -474,23 +462,29 @@ def insert( self, session=None ): """ - # This will raise a unique id constraint violation if the provenance id already exists - myid = self.id with SmartSession( session ) as sess: - if self._upstreams is None: - raise RuntimeError( "Can't save provenance, don't know upstreams. This usually happens " - "if you try to save one that you loaded from the database. Use " - "insert_if_needed() instead of insert()." ) - # See comment in models/base.py::UUIDMixin.insert - orm.make_transient( self ) - self._id = myid - sess.add( self ) - sess.commit() - for upstream in self._upstreams: - sess.execute( sa.text( "INSERT INTO provenance_upstreams(upstream_id,downstream_id) " - "VALUES (:upstream,:me)" ), - { 'me': self.id, 'upstream': upstream.id } ) - sess.commit() + try: + SeeChangeBase.insert( self, sess ) + + # Should be safe to go ahead and insert into the association table + # If the provenance already existed, we will have raised an exceptipn. + # If not, somebody else who might try to insert this provenance + # will get an exception on the insert() statement above, and so won't + # try the following association table inserts. + + upstreams = self._upstreams if self._upstreams is not None else self.get_upstreams( session=sess ) + if len(upstreams) > 0: + for upstream in upstreams: + sess.execute( sa.text( "INSERT INTO provenance_upstreams(upstream_id,downstream_id) " + "VALUES (:upstream,:me)" ), + { 'me': self.id, 'upstream': upstream.id } ) + sess.commit() + except IntegrityError as ex: + if _exists_ok and ( 'duplicate key value violates unique constraint "provenances_pkey"' in str(ex) ): + sess.rollback() + else: + raise + def insert_if_needed( self, session=None ): """Insert the provenance into the database if it's not already there. @@ -501,24 +495,9 @@ def insert_if_needed( self, session=None ): Usually you don't want to use this """ - # The debug comments in this function are for debugging database - # deadlocks. Uncomment them if you're trying to deal with - # that. Normally they're commented out because they make the - # debug output much more verbose. - with SmartSession( session ) as sess: - try: - self._get_table_lock( sess ) - provobj = sess.scalars( sa.select( Provenance ).where( Provenance._id == self.id ) ).first() - # There's no need to verify that the provenance is consistent, since Provenance.id is - # a hash of the contents of the provenance. Insofar as the hashing is unique, - # it *will* be consistent. - if provobj is None: - self.insert( session=sess ) - finally: - # Make sure lock is released - # SCLogger.debug( "Provenance.insert_if_needed rolling back" ) - sess.rollback() + self.insert( session=session, _exists_ok=True ) + def get_upstreams( self, session=None ): with SmartSession( session ) as sess: diff --git a/models/psf.py b/models/psf.py index 22f70c49..fcfc53f1 100644 --- a/models/psf.py +++ b/models/psf.py @@ -44,7 +44,7 @@ def __table_args__(cls): _format = sa.Column( sa.SMALLINT, nullable=False, - default=PSFFormatConverter.convert('psfex'), + server_default=sa.sql.elements.TextClause( str(PSFFormatConverter.convert('psfex')) ), doc='Format of the PSF file. Currently only supports psfex.' ) diff --git a/models/reference.py b/models/reference.py index fd94a37b..0ee35cc5 100644 --- a/models/reference.py +++ b/models/reference.py @@ -76,7 +76,7 @@ class Reference(Base, UUIDMixin): is_bad = sa.Column( sa.Boolean, nullable=False, - default=False, + server_default='false', doc="Whether this reference image is bad. " ) diff --git a/models/report.py b/models/report.py index 78194989..27d73d1d 100644 --- a/models/report.py +++ b/models/report.py @@ -67,7 +67,7 @@ class Report(Base, UUIDMixin): sa.Boolean, nullable=False, index=True, - default=False, + server_default='false', doc=( "Whether the processing of this section was successful. " ) @@ -76,7 +76,7 @@ class Report(Base, UUIDMixin): num_prev_reports = sa.Column( sa.Integer, nullable=False, - default=0, + server_default=sa.sql.elements.TextClause( '0' ), doc=( "Number of previous reports for this exposure, section, and provenance. " ) @@ -142,7 +142,7 @@ class Report(Base, UUIDMixin): process_memory = sa.Column( JSONB, nullable=False, - default={}, + server_default='{}', doc='Memory usage of the process during processing. ' 'Each key in the dictionary is for a processing step, ' 'and the value is the memory usage in megabytes. ' @@ -151,7 +151,7 @@ class Report(Base, UUIDMixin): process_runtime = sa.Column( JSONB, nullable=False, - default={}, + server_default='{}', doc='Runtime of the process during processing. ' 'Each key in the dictionary is for a processing step, ' 'and the value is the runtime in seconds. ' @@ -160,7 +160,7 @@ class Report(Base, UUIDMixin): progress_steps_bitflag = sa.Column( sa.BIGINT, nullable=False, - default=0, + server_default=sa.sql.elements.TextClause( '0' ), index=True, doc='Bitflag recording what processing steps have already been applied to this section. ' ) @@ -186,7 +186,7 @@ def append_progress(self, value): products_exist_bitflag = sa.Column( sa.BIGINT, nullable=False, - default=0, + server_default=sa.sql.elements.TextClause( '0' ), index=True, doc='Bitflag recording which pipeline products were not None when the pipeline finished. ' ) @@ -214,7 +214,7 @@ def append_products_exist(self, value): products_committed_bitflag = sa.Column( sa.BIGINT, nullable=False, - default=0, + server_default=sa.sql.elements.TextClause( '0' ), index=True, doc='Bitflag recording which pipeline products were not None when the pipeline finished. ' ) diff --git a/models/source_list.py b/models/source_list.py index 97170882..de388728 100644 --- a/models/source_list.py +++ b/models/source_list.py @@ -52,7 +52,7 @@ def __table_args__( cls ): _format = sa.Column( sa.SMALLINT, nullable=False, - default=SourceListFormatConverter.convert('sextrfits'), + server_default=sa.sql.elements.TextClause( str(SourceListFormatConverter.convert('sextrfits')) ), doc="Format of the file on disk. Should be sepnpy or sextrfits. " "Saved as integer but is converter to string when loaded. " ) @@ -80,7 +80,7 @@ def format(self, value): aper_rads = sa.Column( ARRAY( sa.REAL, zero_indexes=True ), nullable=True, - default=None, + server_default=None, index=False, doc="Radius of apertures used for aperture photometry in pixels." ) @@ -88,7 +88,7 @@ def format(self, value): inf_aper_num = sa.Column( sa.SMALLINT, nullable=True, - default=None, + server_default=None, index=False, doc="Which element of aper_rads to use as the 'infinite' aperture; -1 = last one. " ) @@ -96,7 +96,7 @@ def format(self, value): best_aper_num = sa.Column( sa.SMALLINT, nullable=True, - default=None, + server_default=None, index=False, doc="Which element of aper_rads to use as the 'best' aperture; -1 = use PSF photometry. " ) diff --git a/models/zero_point.py b/models/zero_point.py index 5c3853f0..85819dd9 100644 --- a/models/zero_point.py +++ b/models/zero_point.py @@ -40,7 +40,7 @@ class ZeroPoint(SourceListSibling, Base, UUIDMixin, HasBitFlagBadness): aper_cor_radii = sa.Column( ARRAY( sa.REAL, zero_indexes=True ), nullable=True, - default=None, + server_default=None, index=False, doc="Pixel radii of apertures whose aperture corrections are in aper_cors." ) @@ -48,7 +48,7 @@ class ZeroPoint(SourceListSibling, Base, UUIDMixin, HasBitFlagBadness): aper_cors = sa.Column( ARRAY( sa.REAL, zero_indexes=True ), nullable=True, - default=None, + server_default=None, index=False, doc=( "Aperture corrections for apertures with radii in aper_cor_radii. Defined so that " "mag = -2.5*log10(adu_aper) + zp + aper_cor, where adu_aper is the number of ADU " diff --git a/tests/conftest.py b/tests/conftest.py index a46377ed..5448cd15 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -309,20 +309,12 @@ def test_config(): @pytest.fixture(scope="session", autouse=True) def code_version(): - with SmartSession() as session: - cv = session.scalars(sa.select(CodeVersion).where(CodeVersion._id == 'test_v1.0.0')).first() - if cv is None: - cv = CodeVersion(id="test_v1.0.0") - session.add( cv ) - cv.update( session=session ) - session.commit() - # cv = session.scalars(sa.select(CodeVersion).where(CodeVersion._id == 'test_v1.0.0')).first() - - # HACK ALERT + cv = CodeVersion( id="test_v1.0.0" ) + cv.insert() + with SmartSession() as session: newcv = session.scalars( sa.select(CodeVersion ) ).first() assert newcv is not None - # HACK ALERT yield cv @@ -346,7 +338,7 @@ def provenance_base(code_version): yield p with SmartSession() as session: - session.delete(p) + session.execute( sa.delete( Provenance ).where( Provenance._id==p.id ) ) session.commit() @@ -364,7 +356,7 @@ def provenance_extra( provenance_base ): yield p with SmartSession() as session: - session.delete(p) + session.execute( sa.delete( Provenance ).where( Provenance._id==p.id ) ) session.commit() diff --git a/tests/models/test_base.py b/tests/models/test_base.py index ba68f593..87dad15a 100644 --- a/tests/models/test_base.py +++ b/tests/models/test_base.py @@ -98,30 +98,88 @@ def test_upsert( provenance_base ): uuidstodel = [ uuid.uuid4() ] try: - # Make sure we can insert something new - with SmartSession() as sess: - assert sess.query( DataFile ).filter( DataFile._id==uuidstodel[0] ).first() is None - df = DataFile( _id=uuidstodel[0], filepath="foo", md5sum=uuid.uuid4(), provenance_id=provenance_base.id ) - df.upsert() - founddf = DataFile.get_by_id( df.id ) - assert founddf is not None - assert founddf.filepath == df.filepath - assert founddf.md5sum == df.md5sum + assert Image.get_by_id( uuidstodel[0] ) is None + + image = Image( _id = uuidstodel[0], + provenance_id = provenance_base.id, + mjd = 60575.474664, + end_mjd = 60575.4750116, + exp_time = 30., + # instrument = 'DemoInstrument', + telescope = 'DemoTelescope', + project = 'test', + target = 'nothing', + filepath = 'foo/bar.fits', + ra = '23.', + dec = '42.', + ra_corner_00 = 22.5, + ra_corner_01 = 22.5, + ra_corner_10 = 23.5, + ra_corner_11 = 23.5, + dec_corner_00 = 41.5, + dec_corner_01 = 42.5, + dec_corner_10 = 41.5, + dec_corner_11 = 42.5, + minra = 22.5, + maxra = 23.5, + mindec = 41.5, + maxdec = 42.5, + md5sum = uuid.uuid4() # spoof since we didn't save a file + ) + + # Make sure the database yells at us if a required column is missing + + with pytest.raises( IntegrityError, match='null value in column "instrument".*violates not-null' ): + image.upsert() + + # == Make sure we can insert a thing == a + image.instrument = 'DemoInstrument' + image.upsert() + + # Object didn't get updated + assert image._format is None + assert image.preproc_bitflag is None + assert image.created_at is None + assert image.modified is None + + found = Image.get_by_id( image.id ) + assert found is not None + + # Check the server side defaults + assert found._format == 1 + assert found.preproc_bitflag == 0 + assert found.created_at is not None + assert found.modified == found.created_at + + # Change something, do an update + found.project = 'another_test' + found.upsert() + refound = Image.get_by_id( image.id ) + for col in sa.inspect( Image ).c: + if col.name == 'modified': + assert refound.modified > found.modified + elif col.name == 'project': + assert refound.project == 'another_test' + else: + assert getattr( found, col.name ) == getattr( refound, col.name ) - # Make sure we can update it - origmd5sum = df.md5sum - df.md5sum = uuid.uuid4() - df.upsert() - founddf = DataFile.get_by_id( df.id ) - assert founddf is not None - assert founddf.md5sum == df.md5sum - assert founddf.md5sum != origmd5sum - assert founddf.modified > founddf.created_at + # Verify that we get a new image and the id is generated if the id starts undefined + refound._id = None + refound.filepath = 'foo/bar_none.fits' + + refound.upsert() + assert refound._id is not None + uuidstodel.append( refound._id ) + + with SmartSession() as session: + multifound = session.query( Image ).filter( Image._id.in_( uuidstodel ) ).all() + assert len(multifound) == 2 + assert set( [ i.id for i in multifound ] ) == set( uuidstodel ) finally: # Clean up with SmartSession() as sess: - sess.execute( sa.delete( DataFile ).where( DataFile._id.in_( uuidstodel ) ) ) + sess.execute( sa.delete( Image ).where( Image._id.in_( uuidstodel ) ) ) sess.commit() # TODO : test test_upsert_list when one of the object properties is a SQL array. diff --git a/tests/models/test_provenance.py b/tests/models/test_provenance.py index 41d66ec6..9152be78 100644 --- a/tests/models/test_provenance.py +++ b/tests/models/test_provenance.py @@ -3,6 +3,7 @@ import sqlalchemy as sa from sqlalchemy.orm.exc import DetachedInstanceError +from sqlalchemy.exc import IntegrityError from models.base import SmartSession from models.provenance import CodeHash, CodeVersion, Provenance @@ -12,16 +13,26 @@ # Note: ProvenanceTag.newtag is tested as part of pipeline/test_pipeline.py::test_provenance_tree def test_code_versions( code_version ): - cv = CodeVersion( id="this_code_version_does_not_exist_v0.0.1" ) + cv = code_version git_hash = get_git_hash() - # The exception won't be raised if get_git_hash() returns None, because it won't - # have a hash to try to add. So, only run this test where it might actually pass. + # These things won't work if get_git_hash() returns None, because it won't + # have a hash to try to add. So, only run these tests where they might actually pass. if git_hash is not None: - with pytest.raises( RuntimeError, match='CodeVersion must be in the database before running update' ): - cv.update() - - cv = code_version + # Make sure we can't update a cv that's not yet in the database + newcv = CodeVersion( id="this_code_version_does_not_exist_v0.0.1" ) + with pytest.raises( IntegrityError, match='insert or update on table "code_hashes" violates foreign key' ): + newcv.update() + + # Make sure we have a code hash associated with code_version + cv.update() + with SmartSession() as sess: + n1 = sess.query( CodeHash ).count() + # Make sure that we can run it again + cv.update() + with SmartSession() as sess: + n2 = sess.query( CodeHash ).count() + assert n2 == n1 hashes = cv.get_code_hashes() assert set( [ i.id for i in cv.code_hashes ] ) == set( [ i.id for i in hashes ] ) @@ -131,30 +142,26 @@ def test_unique_provenance_hash(code_version): ) try: # cleanup - with SmartSession() as session: - session.add(p) - session.commit() - pid = p.id - assert pid is not None - assert len(p.id) == 20 - hash = p.id - - # start new session - with SmartSession() as session: - p2 = Provenance( - process='test_process', - code_version_id=code_version.id, - parameters={'test_parameter': parameter}, - upstreams=[], - is_testing=True, - ) - assert p2.id == hash + p.insert() + pid = p.id + assert pid is not None + assert len(p.id) == 20 + hash = p.id + + p2 = Provenance( + process='test_process', + code_version_id=code_version.id, + parameters={'test_parameter': parameter}, + upstreams=[], + is_testing=True, + ) + assert p2.id == hash + + with pytest.raises(sa.exc.IntegrityError) as e: + p2.insert() + assert 'duplicate key value violates unique constraint "provenances_pkey"' in str(e) - with pytest.raises(sa.exc.IntegrityError) as e: - session.add(p2) - session.commit() - assert 'duplicate key value violates unique constraint "provenances_pkey"' in str(e) - session.rollback() + p2.insert( _exists_ok=True ) finally: if 'pid' in locals():