Skip to content

Commit

Permalink
Fixture cleanup (c3-time-domain#144)
Browse files Browse the repository at this point in the history
* carve off fixtures into files
* add caching for fixtures
* add fixtures for all pipeline objects
* change column names from source_list to sources
  • Loading branch information
guynir42 authored Jan 5, 2024
1 parent 4a2c416 commit 1341884
Show file tree
Hide file tree
Showing 74 changed files with 3,777 additions and 2,432 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,14 @@ tests/local_config.yaml
tests/local_overrides.yaml
tests/local_augments.yaml
tests/improc/cache/*
data/cache/*
data/test_data/DECam_examples/c4d_221104_074232_ori.fits.fz
data/test_data/DECam_examples/*_cached
data/DECam_default_calibrators
data/GaiaDR3_excerpt
.pytest.ini
tests/plots/*
tests/plots
tests/temp_data

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
"""source lists to sources
Revision ID: 4c2a7e8a525a
Revises: b53eed9ccb36
Create Date: 2023-12-04 13:07:43.519787
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '4c2a7e8a525a'
down_revision = 'b53eed9ccb36'
branch_labels = None
depends_on = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('cutouts', sa.Column('sources_id', sa.BigInteger(), nullable=False))
op.drop_index('ix_cutouts_source_list_id', table_name='cutouts')
op.create_index(op.f('ix_cutouts_sources_id'), 'cutouts', ['sources_id'], unique=False)
op.drop_constraint('cutouts_source_list_id_fkey', 'cutouts', type_='foreignkey')
op.create_foreign_key('cutouts_source_list_id_fkey', 'cutouts', 'source_lists', ['sources_id'], ['id'])
op.drop_column('cutouts', 'source_list_id')
op.add_column('world_coordinates', sa.Column('sources_id', sa.BigInteger(), nullable=False))
op.drop_index('ix_world_coordinates_source_list_id', table_name='world_coordinates')
op.create_index(op.f('ix_world_coordinates_sources_id'), 'world_coordinates', ['sources_id'], unique=False)
op.drop_constraint('world_coordinates_source_list_id_fkey', 'world_coordinates', type_='foreignkey')
op.create_foreign_key('world_coordinates_source_list_id_fkey', 'world_coordinates', 'source_lists', ['sources_id'], ['id'], ondelete='CASCADE')
op.drop_column('world_coordinates', 'source_list_id')
op.add_column('zero_points', sa.Column('sources_id', sa.BigInteger(), nullable=False))
op.drop_index('ix_zero_points_source_list_id', table_name='zero_points')
op.create_index(op.f('ix_zero_points_sources_id'), 'zero_points', ['sources_id'], unique=False)
op.drop_constraint('zero_points_source_list_id_fkey', 'zero_points', type_='foreignkey')
op.create_foreign_key('zero_points_source_list_id_fkey', 'zero_points', 'source_lists', ['sources_id'], ['id'], ondelete='CASCADE')
op.drop_column('zero_points', 'source_list_id')
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('zero_points', sa.Column('source_list_id', sa.BIGINT(), autoincrement=False, nullable=False))
op.drop_constraint('zero_points_source_list_id_fkey', 'zero_points', type_='foreignkey')
op.create_foreign_key('zero_points_source_list_id_fkey', 'zero_points', 'source_lists', ['source_list_id'], ['id'], ondelete='CASCADE')
op.drop_index(op.f('ix_zero_points_sources_id'), table_name='zero_points')
op.create_index('ix_zero_points_source_list_id', 'zero_points', ['source_list_id'], unique=False)
op.drop_column('zero_points', 'sources_id')
op.add_column('world_coordinates', sa.Column('source_list_id', sa.BIGINT(), autoincrement=False, nullable=False))
op.drop_constraint('world_coordinates_source_list_id_fkey', 'world_coordinates', type_='foreignkey')
op.create_foreign_key('world_coordinates_source_list_id_fkey', 'world_coordinates', 'source_lists', ['source_list_id'], ['id'], ondelete='CASCADE')
op.drop_index(op.f('ix_world_coordinates_sources_id'), table_name='world_coordinates')
op.create_index('ix_world_coordinates_source_list_id', 'world_coordinates', ['source_list_id'], unique=False)
op.drop_column('world_coordinates', 'sources_id')
op.add_column('cutouts', sa.Column('source_list_id', sa.BIGINT(), autoincrement=False, nullable=False))
op.drop_constraint('cutouts_source_list_id_fkey', 'cutouts', type_='foreignkey')
op.create_foreign_key('cutouts_source_list_id_fkey', 'cutouts', 'source_lists', ['source_list_id'], ['id'])
op.drop_index(op.f('ix_cutouts_sources_id'), table_name='cutouts')
op.create_index('ix_cutouts_source_list_id', 'cutouts', ['source_list_id'], unique=False)
op.drop_column('cutouts', 'sources_id')
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
"""add bitflag to zeropoint
Revision ID: f831276c00e2
Revises: 4c2a7e8a525a
Create Date: 2023-12-06 12:03:29.031978
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = 'f831276c00e2'
down_revision = '4c2a7e8a525a'
branch_labels = None
depends_on = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('zero_points', sa.Column('_bitflag', sa.BIGINT(), nullable=False))
op.add_column('zero_points', sa.Column('description', sa.Text(), nullable=True))
op.add_column('zero_points', sa.Column('_upstream_bitflag', sa.BIGINT(), nullable=False))
op.create_index(op.f('ix_zero_points__bitflag'), 'zero_points', ['_bitflag'], unique=False)
op.create_index(op.f('ix_zero_points__upstream_bitflag'), 'zero_points', ['_upstream_bitflag'], unique=False)
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_zero_points__upstream_bitflag'), table_name='zero_points')
op.drop_index(op.f('ix_zero_points__bitflag'), table_name='zero_points')
op.drop_column('zero_points', '_upstream_bitflag')
op.drop_column('zero_points', 'description')
op.drop_column('zero_points', '_bitflag')
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
"""cascade foreign key deletion on source list
Revision ID: 360a5ebe3848
Revises: f831276c00e2
Create Date: 2023-12-08 11:40:17.861309
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '360a5ebe3848'
down_revision = 'f831276c00e2'
branch_labels = None
depends_on = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('source_lists_image_id_fkey', 'source_lists', type_='foreignkey')
op.create_foreign_key('source_lists_image_id_fkey', 'source_lists', 'images', ['image_id'], ['id'], ondelete='CASCADE')
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('source_lists_image_id_fkey', 'source_lists', type_='foreignkey')
op.create_foreign_key('source_lists_image_id_fkey', 'source_lists', 'images', ['image_id'], ['id'])
# ### end Alembic commands ###

This file was deleted.

This file was deleted.

This file was deleted.

File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
14 changes: 12 additions & 2 deletions default_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -59,13 +59,23 @@ pipeline: {}
preprocessing:
use_sky_subtraction: False

extraction: {}
extraction:
measure_psf: true
threshold: 3.0
method: sextractor

astro_cal:
cross_match_catalog: GaiaDR3
solution_method: scamp
max_catalog_mag: [20.0]
mag_range_catalog: 4.0
min_catalog_stars: 50

photo_cal:
cross_match_catalog: GaiaDR3
max_catalog_mag: [20.0]
mag_range_catalog: 4.0
min_catalog_stars: 50

subtraction:
method: hotpants
Expand All @@ -82,7 +92,7 @@ measurement:
aperture_radius: 3.0
real_bogus_version: null

# Specific configuration for specific instruments
# Specific configuration for specific instruments.
# Instruments should override the two defaults from
# instrument_default; they may add additional
# configuration that their code needs.
Expand Down
5 changes: 2 additions & 3 deletions docs/setup.md
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,6 @@ submodules that have not been initialized yet.

Some environmental variables are used by SeeChange.
- `GITHUB_REPOSITORY_OWNER` is the name of your github user (used only for dockerized tests).
- `SEECHANGE_TEST_ARCHIVE_DIR` is used to set up a local directory for test data archive.
Usually this will point to a folder outside the SeeChange directory,
where data can be downloaded and stored.
- `SEECHANGE_CONFIG` can be used to specify the location of the main config file,
Expand All @@ -290,6 +289,7 @@ One way to set things up is to put the following into
```yaml
archive:
local_read_dir: /path/to/local/archive
local_write_dir: /path/to/local/archive
archive_url: null

db:
Expand All @@ -301,8 +301,7 @@ db:
database: seechange
```
Replace `/path/to/local/archive` with the path to the local archive directory,
which should also be defined as the environmental variable `SEECHANGE_TEST_ARCHIVE_DIR`.
Replace `/path/to/local/archive` with the path to the local archive directory.

The same files (`local_overrides.yaml` and `local_augments.yaml`) can be used
on the main SeeChange directory, where they have the same effect,
Expand Down
45 changes: 45 additions & 0 deletions docs/testing.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
## Testing

Tests are an integral part of the development process.
We run mostly unit tests that test specific parts of the code,
but a few integration tests are also included (end-to-end tests).
We plan to add some regression tests that verify the results
of the pipeline are consistent with previous code versions.

### Running tests

To run the tests, simply run the following command from the root directory of the project:

```bash
pytest
```

To run the tests in a dockerized environment, see the setup.md file, under "Running tests".

### Test caching and data folders

Some of our tests require large datasets (mostly images).
We include a few example images in the repo itself,
but most of the required data is lazy downloaded from
the appropriate servers (e.g., from Noirlab).

To avoid downloading the same data over and over again,
we cache the data in the `data/cache` folder.
To make sure the downloading process works as expected,
users can choose to delete this folder.
In the tests, the path to this folder is given by
the `cache_dir` fixture.

Note that the persistent data, that comes with the
repo, is anything else in the `data` folder,
which is pointed to by the `persistent_dir` fixture.

Finally, the working directory for local storage,
which is referenced by the `FileOnDiskMixin.local_path`
class variable, is defined in the test config YAML file,
and can be accessed using the `data_dir` fixture.
This folder is systematically wiped when the tests
are completed.



4 changes: 2 additions & 2 deletions improc/scamp.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,10 @@ def _solve_wcs_scamp( sources, catalog, crossid_rad=2.,
scamp must be able to match at least this many objects
for the match to be considered good.
min_arcsec_residual: float, default 0.15
max_arcsec_residual: float, default 0.15
maximum residual in arcseconds, along both RA and Dec
(i.e. not a radial residual), for the WCS solution to be
considered succesful.
considered successful.
magkey: str, default MAG
The keyword to use for magnitudes in the catalog file.
Expand Down
Loading

0 comments on commit 1341884

Please sign in to comment.