Skip to content

Commit

Permalink
still whackamole
Browse files Browse the repository at this point in the history
  • Loading branch information
rknop committed Jul 18, 2024
1 parent 5e75e26 commit 601e4b3
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 10 deletions.
53 changes: 45 additions & 8 deletions tests/fixtures/ptf.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,9 +287,30 @@ def ptf_reference_images(ptf_images_factory):

yield images

for image in images:
image.exposure.delete_from_disk_and_database(commit=True)
image.delete_from_disk_and_database(commit=True, remove_downstreams=True)
# Not just using an sqlalchmey merge on the objects here, because
# that was leading to MSEs (Mysterious SQLAlchmey Errors -- they
# happen often enough that we need a bloody acronym for them). So,
# even though we're using SQLAlchemy, figure out what needs to be
# deleted the "database" way rather than counting on opaque
# SA merges. (The images in the images variable created above
# won't have their database IDs yet, but may well have received them
# in something that uses this fixture, which is why we have to search
# the database for filepath.)

with SmartSession() as session:
imgs = session.query( Image ).filter( Image.filepath.in_( [ i.filepath for i in images ] ) ).all()
expsrs = session.query( Exposure ).filter(
Exposure.filepath.in_( [ i.exposure.filepath for i in images ] ) ).all()
# Deliberately do *not* pass the session on to
# delete_from_disk_and_database to avoid further SQLAlchemy
# automatic behavior-- though since in this case we just got these
# images, we *might* know what's been loaded with them and that
# will then be automatically refreshed at some point (But, with
# SA, you can never really be sure.)
for expsr in expsrs:
expsr.delete_from_disk_and_database( commit=True )
for image in imgs:
image.delete_from_disk_and_database( commit=True, remove_downstreams=True )

# ROB REMOVE THIS COMMENT
# with SmartSession() as session:
Expand All @@ -308,9 +329,16 @@ def ptf_supernova_images(ptf_images_factory):

yield images

for image in images:
image.delete_from_disk_and_database(commit=True, remove_downstreams=True)
image.exposure.delete_from_disk_and_database(session=session, commit=True)
# See comment in ptf_reference_images

with SmartSession() as session:
imgs = session.query( Image ).filter( Image.filepath.in_( [ i.filepath for i in images ] ) ).all()
expsrs = session.query( Exposure ).filter(
Exposure.filepath.in_( [ i.exposure.filepath for i in images ] ) ).all()
for expsr in expsrs:
expsr.delete_from_disk_and_database( commit=True )
for image in imgs:
image.delete_from_disk_and_database( commit=True, remove_downstreams=True )

# ROB REMOVE THIS COMMENT
# with SmartSession() as session:
Expand Down Expand Up @@ -409,8 +437,17 @@ def ptf_aligned_images(request, ptf_cache_dir, data_dir, code_version):
action='ignore',
message=r'.*DELETE statement on table .* expected to delete \d* row\(s\).*',
)
for image in ptf_reference_images:
image.exposure.delete_from_disk_and_database( commit=True, remove_downstreams=True )

# See comment in ptf_reference images

with SmartSession() as session:
expsrs = session.query( Exposure ).filter(
Exposure.filepath.in_( [ i.exposure.filepath for i in images ] ) ).all()
for expsr in expsrs:
expsr.delete_from_disk_and_database( commit=True, remove_downstreams=True )

# for image in ptf_reference_images:
# image.exposure.delete_from_disk_and_database( commit=True, remove_downstreams=True )

# ROB REMOVE THIS COMMENT
# with SmartSession() as session, warnings.catch_warnings():
Expand Down
4 changes: 2 additions & 2 deletions tests/models/test_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,10 +156,10 @@ def test_filtering_measurements_on_object(sim_lightcurves):
assert set([m.id for m in found]).issubset(set([m.id for m in new_measurements]))

# get measurements that are very close to the source
found = obj.get_measurements_list(radius=0.2) # should include only 1-3 measurements
found = obj.get_measurements_list(radius=0.1) # should include only 1-3 measurements
assert len(found) > 0
assert len(found) < len(new_measurements)
assert all(m.distance_to(obj) <= 0.2 for m in found)
assert all(m.distance_to(obj) <= 0.1 for m in found)
assert set([m.id for m in found]).issubset(set([m.id for m in new_measurements]))

# filter on all the offsets disqualifier score
Expand Down

0 comments on commit 601e4b3

Please sign in to comment.