Skip to content

Commit

Permalink
Merge branch 'c3-time-domain:main' into 143-limiting-magnitude
Browse files Browse the repository at this point in the history
  • Loading branch information
dryczanowski authored Jul 10, 2024
2 parents 70270ad + 02db299 commit 626c819
Show file tree
Hide file tree
Showing 86 changed files with 5,193 additions and 3,079 deletions.
8 changes: 8 additions & 0 deletions .github/workflows/run-improc-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,4 +59,12 @@ jobs:
- name: run test
run: |
# try to save HDD space on the runner by removing some unneeded stuff
# ref: https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
shopt -s nullglob
TEST_SUBFOLDER=tests/improc docker compose run runtests
7 changes: 7 additions & 0 deletions .github/workflows/run-model-tests-1.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,5 +59,12 @@ jobs:
- name: run test
run: |
# try to save HDD space on the runner by removing some unneeded stuff
# ref: https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
shopt -s nullglob
TEST_SUBFOLDER=$(ls tests/models/test_{a..l}*.py) docker compose run runtests
7 changes: 7 additions & 0 deletions .github/workflows/run-model-tests-2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,5 +59,12 @@ jobs:
- name: run test
run: |
# try to save HDD space on the runner by removing some unneeded stuff
# ref: https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
shopt -s nullglob
TEST_SUBFOLDER=$(ls tests/models/test_{m..z}*.py) docker compose run runtests
7 changes: 7 additions & 0 deletions .github/workflows/run-pipeline-tests-1.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,5 +59,12 @@ jobs:
- name: run test
run: |
# try to save HDD space on the runner by removing some uneeded stuff
# ref: https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
shopt -s nullglob
TEST_SUBFOLDER=$(ls tests/pipeline/test_{a..o}*.py) docker compose run runtests
8 changes: 8 additions & 0 deletions .github/workflows/run-pipeline-tests-2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,5 +59,13 @@ jobs:
- name: run test
run: |
# try to save HDD space on the runner by removing some unneeded stuff
# ref: https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
shopt -s nullglob
TEST_SUBFOLDER=$(ls tests/pipeline/test_{p..z}*.py) docker compose run runtests
7 changes: 7 additions & 0 deletions .github/workflows/run-util-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,4 +59,11 @@ jobs:
- name: run test
run: |
# try to save HDD space on the runner by removing some unneeded stuff
# ref: https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
TEST_SUBFOLDER=tests/util docker compose run runtests
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
"""rework cutouts and measurements
Revision ID: 7384c6d07485
Revises: a375526c8260
Create Date: 2024-06-28 17:57:44.173607
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '7384c6d07485'
down_revision = 'a375526c8260'
branch_labels = None
depends_on = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('_cutouts_index_sources_provenance_uc', 'cutouts', type_='unique')
op.drop_index('ix_cutouts_ecllat', table_name='cutouts')
op.drop_index('ix_cutouts_gallat', table_name='cutouts')
op.drop_index('ix_cutouts_filepath', table_name='cutouts')
op.create_index(op.f('ix_cutouts_filepath'), 'cutouts', ['filepath'], unique=True)
op.create_unique_constraint('_cutouts_sources_provenance_uc', 'cutouts', ['sources_id', 'provenance_id'])
op.drop_column('cutouts', 'ecllon')
op.drop_column('cutouts', 'ra')
op.drop_column('cutouts', 'gallat')
op.drop_column('cutouts', 'index_in_sources')
op.drop_column('cutouts', 'y')
op.drop_column('cutouts', 'gallon')
op.drop_column('cutouts', 'dec')
op.drop_column('cutouts', 'x')
op.drop_column('cutouts', 'ecllat')
op.add_column('measurements', sa.Column('index_in_sources', sa.Integer(), nullable=False))
op.add_column('measurements', sa.Column('center_x_pixel', sa.Integer(), nullable=False))
op.add_column('measurements', sa.Column('center_y_pixel', sa.Integer(), nullable=False))
op.drop_constraint('_measurements_cutouts_provenance_uc', 'measurements', type_='unique')
op.create_unique_constraint('_measurements_cutouts_provenance_uc', 'measurements', ['cutouts_id', 'index_in_sources', 'provenance_id'])
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('_measurements_cutouts_provenance_uc', 'measurements', type_='unique')
op.create_unique_constraint('_measurements_cutouts_provenance_uc', 'measurements', ['cutouts_id', 'provenance_id'])
op.drop_column('measurements', 'center_y_pixel')
op.drop_column('measurements', 'center_x_pixel')
op.drop_column('measurements', 'index_in_sources')
op.add_column('cutouts', sa.Column('ecllat', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True))
op.add_column('cutouts', sa.Column('x', sa.INTEGER(), autoincrement=False, nullable=False))
op.add_column('cutouts', sa.Column('dec', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False))
op.add_column('cutouts', sa.Column('gallon', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True))
op.add_column('cutouts', sa.Column('y', sa.INTEGER(), autoincrement=False, nullable=False))
op.add_column('cutouts', sa.Column('index_in_sources', sa.INTEGER(), autoincrement=False, nullable=False))
op.add_column('cutouts', sa.Column('gallat', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True))
op.add_column('cutouts', sa.Column('ra', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False))
op.add_column('cutouts', sa.Column('ecllon', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True))
op.drop_constraint('_cutouts_sources_provenance_uc', 'cutouts', type_='unique')
op.drop_index(op.f('ix_cutouts_filepath'), table_name='cutouts')
op.create_index('ix_cutouts_filepath', 'cutouts', ['filepath'], unique=False)
op.create_index('ix_cutouts_gallat', 'cutouts', ['gallat'], unique=False)
op.create_index('ix_cutouts_ecllat', 'cutouts', ['ecllat'], unique=False)
op.create_unique_constraint('_cutouts_index_sources_provenance_uc', 'cutouts', ['index_in_sources', 'sources_id', 'provenance_id'])
# ### end Alembic commands ###
69 changes: 69 additions & 0 deletions alembic/versions/2024_07_01_1135-370933973646_reference_sets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
"""reference sets
Revision ID: 370933973646
Revises: a375526c8260
Create Date: 2024-06-23 11:35:43.941095
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = '370933973646'
down_revision = '7384c6d07485'
branch_labels = None
depends_on = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('refsets',
sa.Column('name', sa.Text(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('upstream_hash', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('modified', sa.DateTime(), nullable=False),
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_refsets_created_at'), 'refsets', ['created_at'], unique=False)
op.create_index(op.f('ix_refsets_id'), 'refsets', ['id'], unique=False)
op.create_index(op.f('ix_refsets_name'), 'refsets', ['name'], unique=True)
op.create_index(op.f('ix_refsets_upstream_hash'), 'refsets', ['upstream_hash'], unique=False)
op.create_table('refset_provenance_association',
sa.Column('provenance_id', sa.Text(), nullable=False),
sa.Column('refset_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['provenance_id'], ['provenances.id'], name='refset_provenances_association_provenance_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['refset_id'], ['refsets.id'], name='refsets_provenances_association_refset_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('provenance_id', 'refset_id')
)
op.drop_index('ix_refs_validity_end', table_name='refs')
op.drop_index('ix_refs_validity_start', table_name='refs')
op.drop_column('refs', 'validity_start')
op.drop_column('refs', 'validity_end')

op.add_column('images', sa.Column('airmass', sa.REAL(), nullable=True))
op.create_index(op.f('ix_images_airmass'), 'images', ['airmass'], unique=False)
op.add_column('exposures', sa.Column('airmass', sa.REAL(), nullable=True))
op.create_index(op.f('ix_exposures_airmass'), 'exposures', ['airmass'], unique=False)
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_images_airmass'), table_name='images')
op.drop_column('images', 'airmass')
op.drop_index(op.f('ix_exposures_airmass'), table_name='exposures')
op.drop_column('exposures', 'airmass')
op.add_column('refs', sa.Column('validity_end', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.add_column('refs', sa.Column('validity_start', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.create_index('ix_refs_validity_start', 'refs', ['validity_start'], unique=False)
op.create_index('ix_refs_validity_end', 'refs', ['validity_end'], unique=False)
op.drop_table('refset_provenance_association')
op.drop_index(op.f('ix_refsets_upstream_hash'), table_name='refsets')
op.drop_index(op.f('ix_refsets_name'), table_name='refsets')
op.drop_index(op.f('ix_refsets_id'), table_name='refsets')
op.drop_index(op.f('ix_refsets_created_at'), table_name='refsets')
op.drop_table('refsets')
# ### end Alembic commands ###
Loading

0 comments on commit 626c819

Please sign in to comment.