From c08f8a27117d642520f8786c2970e2f04cf34acb Mon Sep 17 00:00:00 2001 From: Rob Knop Date: Fri, 26 Jul 2024 10:14:44 -0700 Subject: [PATCH] Mostly comments and debug strings as I figured out an error I was getting (documented in Issue #339) --- models/image.py | 23 ++++++++++ pipeline/data_store.py | 15 +++++- tests/fixtures/datastore_factory.py | 71 ++++++++++++++++++++++------- tests/fixtures/ptf.py | 38 +++++++++++---- util/cache.py | 49 +++++++++++++++++++- 5 files changed, 168 insertions(+), 28 deletions(-) diff --git a/models/image.py b/models/image.py index 94315753..e944bb08 100644 --- a/models/image.py +++ b/models/image.py @@ -562,8 +562,31 @@ def merge_all(self, session): Must provide a session to merge into. Need to commit at the end. Returns the merged image with all its products on the same session. + + DEVELOPER NOTE: changing what gets merged in this function + requires a corresponding change in + pipeline/data_store.py::DataStore.save_and_commit + """ new_image = self.safe_merge(session=session) + + import io + strio = io.StringIO() + strio.write( "In image.merge_all; objects in session:\n" ) + if len( session.new ) > 0 : + strio.write( " NEW:\n" ) + for obj in session.new: + strio.write( f" {obj}\n" ) + if len( session.dirty ) > 0: + strio.write( " DIRTY:\n" ) + for obj in session.dirty: + strio.write( f" {obj}\n" ) + if len( session.deleted ) > 0: + strio.write( " DELETED:\n" ) + for obj in session.deleted: + strio.write( f" {obj}\n" ) + SCLogger.debug( strio.getvalue() ) + session.flush() # make sure new_image gets an ID if self.sources is not None: diff --git a/pipeline/data_store.py b/pipeline/data_store.py index f5f83270..8efddf58 100644 --- a/pipeline/data_store.py +++ b/pipeline/data_store.py @@ -1,3 +1,4 @@ +import io import warnings import datetime import sqlalchemy as sa @@ -1379,6 +1380,9 @@ def save_and_commit(self, exists_ok=False, overwrite=True, no_archive=False, True), as the image headers get "first-look" values, not necessarily the latest and greatest if we tune either process. + DEVELOPER NOTE: this code has to stay synced properly with + models/image.py::Image.merge_all + Parameters ---------- exists_ok: bool, default False @@ -1431,8 +1435,15 @@ def save_and_commit(self, exists_ok=False, overwrite=True, no_archive=False, if obj is None: continue - SCLogger.debug( f'save_and_commit considering a {obj.__class__.__name__} with filepath ' - f'{obj.filepath if isinstance(obj,FileOnDiskMixin) else ""}' ) + strio = io.StringIO() + strio.write( f"save_and_commit of {att} considering a {obj.__class__.__name__}" ) + if isinstance( obj, FileOnDiskMixin ): + strio.write( f" with filepath {obj.filepath}" ) + elif isinstance( obj, list ): + strio.write( f" of types {[type(i) for i in obj]}" ) + SCLogger.debug( strio.getvalue() ) + # SCLogger.debug( f'save_and_commit of {att} considering a {obj.__class__.__name__} with filepath ' + # f'{obj.filepath if isinstance(obj,FileOnDiskMixin) else ""}' ) if isinstance(obj, FileOnDiskMixin): mustsave = True diff --git a/tests/fixtures/datastore_factory.py b/tests/fixtures/datastore_factory.py index bd91118b..c2309b1c 100644 --- a/tests/fixtures/datastore_factory.py +++ b/tests/fixtures/datastore_factory.py @@ -61,6 +61,7 @@ def make_datastore( skip_sub=False ): code_version = args[0].provenance.code_version + SCLogger.debug( f"make_datastore called with args {args}, overrides={overrides}, augments={augments}" ) ds = DataStore(*args) # make a new datastore use_cache = cache_dir is not None and cache_base_name is not None and not env_as_bool( "LIMIT_CACHE_USAGE" ) @@ -83,6 +84,7 @@ def make_datastore( code_version = session.merge(code_version) if ds.image is not None: # if starting from an externally provided Image, must merge it first + SCLogger.debug( f"make_datastore was provided an external image; merging it" ) ds.image = ds.image.merge_all(session) ############ load the reference set ############ @@ -97,14 +99,14 @@ def make_datastore( refset = session.scalars(sa.select(RefSet).where(RefSet.name == refset_name)).first() if refset is None: - raise ValueError(f'No reference set found with name {refset_name}') + raise ValueError(f'make_datastore found no reference with name {refset_name}') ref_prov = refset.provenances[0] ############ preprocessing to create image ############ if ds.image is None and use_cache: # check if preprocessed image is in cache if os.path.isfile(image_cache_path): - SCLogger.debug('loading image from cache. ') + SCLogger.debug('make_datastore loading image from cache. ') ds.image = copy_from_cache(Image, cache_dir, cache_name) # assign the correct exposure to the object loaded from cache if ds.exposure_id is not None: @@ -133,6 +135,8 @@ def make_datastore( # if Image already exists on the database, use that instead of this one existing = session.scalars(sa.select(Image).where(Image.filepath == ds.image.filepath)).first() if existing is not None: + SCLogger.debug( f"make_datastore updating existing image {existing.id} " + f"({existing.filepath}) with image loaded from cache" ) # overwrite the existing row data using the JSON cache file for key in sa.inspect(ds.image).mapper.columns.keys(): value = getattr(ds.image, key) @@ -142,6 +146,9 @@ def make_datastore( ): setattr(existing, key, value) ds.image = existing # replace with the existing row + else: + SCLogger.debug( f"make_datastore did not find image with filepath " + f"{ds.image.filepath} in database" ) ds.image.provenance = prov @@ -149,7 +156,7 @@ def make_datastore( ds.image.save(verify_md5=False) if ds.image is None: # make the preprocessed image - SCLogger.debug('making preprocessed image. ') + SCLogger.debug('make_datastore making preprocessed image. ') ds = p.preprocessor.run(ds, session) ds.image.provenance.is_testing = True if bad_pixel_map is not None: @@ -202,15 +209,16 @@ def make_datastore( cache_name = f'{cache_base_name}.sources_{prov.id[:6]}.fits.json' sources_cache_path = os.path.join(cache_dir, cache_name) if os.path.isfile(sources_cache_path): - SCLogger.debug('loading source list from cache. ') + SCLogger.debug('make_datastore loading source list from cache. ') ds.sources = copy_from_cache(SourceList, cache_dir, cache_name) - # if SourceList already exists on the database, use that instead of this one existing = session.scalars( sa.select(SourceList).where(SourceList.filepath == ds.sources.filepath) ).first() if existing is not None: # overwrite the existing row data using the JSON cache file + SCLogger.debug( f"make_datastore updating existing source list {existing.id} " + f"({existing.filepath}) with source list loaded from cache" ) for key in sa.inspect(ds.sources).mapper.columns.keys(): value = getattr(ds.sources, key) if ( @@ -219,6 +227,9 @@ def make_datastore( ): setattr(existing, key, value) ds.sources = existing # replace with the existing row + else: + SCLogger.debug( f"make_datastore did not find source list with filepath " + f"{ds.sources.filepath} in the database" ) ds.sources.provenance = prov ds.sources.image = ds.image @@ -230,15 +241,16 @@ def make_datastore( cache_name = f'{cache_base_name}.psf_{prov.id[:6]}.fits.json' psf_cache_path = os.path.join(cache_dir, cache_name) if os.path.isfile(psf_cache_path): - SCLogger.debug('loading PSF from cache. ') + SCLogger.debug('make_datastore loading PSF from cache. ') ds.psf = copy_from_cache(PSF, cache_dir, cache_name) - # if PSF already exists on the database, use that instead of this one existing = session.scalars( sa.select(PSF).where(PSF.filepath == ds.psf.filepath) ).first() if existing is not None: # overwrite the existing row data using the JSON cache file + SCLogger.debug( f"make_datastore updating existing psf {existing.id} " + f"({existing.filepath}) with psf loaded from cache" ) for key in sa.inspect(ds.psf).mapper.columns.keys(): value = getattr(ds.psf, key) if ( @@ -247,6 +259,9 @@ def make_datastore( ): setattr(existing, key, value) ds.psf = existing # replace with the existing row + else: + SCLogger.debug( f"make_datastore did not find psf with filepath " + f"{ds.psf.filepath} in the database" ) ds.psf.provenance = prov ds.psf.image = ds.image @@ -258,15 +273,16 @@ def make_datastore( cache_name = f'{cache_base_name}.bg_{prov.id[:6]}.h5.json' bg_cache_path = os.path.join(cache_dir, cache_name) if os.path.isfile(bg_cache_path): - SCLogger.debug('loading background from cache. ') + SCLogger.debug('make_datastore loading background from cache. ') ds.bg = copy_from_cache(Background, cache_dir, cache_name) - # if BG already exists on the database, use that instead of this one existing = session.scalars( sa.select(Background).where(Background.filepath == ds.bg.filepath) ).first() if existing is not None: # overwrite the existing row data using the JSON cache file + SCLogger.debug( f"make_datastore updating existing background {existing.id} " + f"({existing.filepath}) with source list loaded from cache" ) for key in sa.inspect(ds.bg).mapper.columns.keys(): value = getattr(ds.bg, key) if ( @@ -275,6 +291,9 @@ def make_datastore( ): setattr(existing, key, value) ds.bg = existing + else: + SCLogger.debug( f"make_datastore did not find background with filepath " + f"{ds.bg.filepath} in the database" ) ds.bg.provenance = prov ds.bg.image = ds.image @@ -286,7 +305,7 @@ def make_datastore( cache_name = f'{cache_base_name}.wcs_{prov.id[:6]}.txt.json' wcs_cache_path = os.path.join(cache_dir, cache_name) if os.path.isfile(wcs_cache_path): - SCLogger.debug('loading WCS from cache. ') + SCLogger.debug('make_datastore loading WCS from cache. ') ds.wcs = copy_from_cache(WorldCoordinates, cache_dir, cache_name) prov = session.merge(prov) @@ -303,6 +322,8 @@ def make_datastore( if existing is not None: # overwrite the existing row data using the JSON cache file + SCLogger.debug( f"make_datastore updating existing wcs {existing.id} " + f"with wcs loaded from cache" ) for key in sa.inspect(ds.wcs).mapper.columns.keys(): value = getattr(ds.wcs, key) if ( @@ -311,6 +332,8 @@ def make_datastore( ): setattr(existing, key, value) ds.wcs = existing # replace with the existing row + else: + SCLogger.debug( f"make_datastore did not find existing wcs in database" ) ds.wcs.provenance = prov ds.wcs.sources = ds.sources @@ -321,7 +344,7 @@ def make_datastore( cache_name = cache_base_name + '.zp.json' zp_cache_path = os.path.join(cache_dir, cache_name) if os.path.isfile(zp_cache_path): - SCLogger.debug('loading zero point from cache. ') + SCLogger.debug('make_datastore loading zero point from cache. ') ds.zp = copy_from_cache(ZeroPoint, cache_dir, cache_name) # check if ZP already exists on the database @@ -337,6 +360,8 @@ def make_datastore( if existing is not None: # overwrite the existing row data using the JSON cache file + SCLogger.debug( f"make_datastore updating existing zp {existing.id} " + f"with zp loaded from cache" ) for key in sa.inspect(ds.zp).mapper.columns.keys(): value = getattr(ds.zp, key) if ( @@ -345,13 +370,15 @@ def make_datastore( ): setattr(existing, key, value) ds.zp = existing # replace with the existing row + else: + SCLogger.debug( "make_datastore did not find existing zp in database" ) ds.zp.provenance = prov ds.zp.sources = ds.sources # if any data product is missing, must redo the extraction step if ds.sources is None or ds.psf is None or ds.bg is None or ds.wcs is None or ds.zp is None: - SCLogger.debug('extracting sources. ') + SCLogger.debug('make_datastore extracting sources. ') ds = p.extractor.run(ds, session) ds.sources.save(overwrite=True) @@ -391,6 +418,7 @@ def make_datastore( if output_path != zp_cache_path: warnings.warn(f'cache path {zp_cache_path} does not match output path {output_path}') + SCLogger.debug( "make_datastore running ds.save_and_commit on image (before subtraction)" ) ds.save_and_commit(session=session) # make a new copy of the image to cache, including the estimates for lim_mag, fwhm, etc. @@ -399,14 +427,17 @@ def make_datastore( # If we were told not to try to do a subtraction, then we're done if skip_sub: + SCLogger.debug( "make_datastore : skip_sub is True, returning" ) return ds # must provide the reference provenance explicitly since we didn't build a prov_tree ref = ds.get_reference(ref_prov, session=session) if ref is None: + SCLogger.debug( "make_datastore : could not find a reference, returning" ) return ds # if no reference is found, simply return the datastore without the rest of the products if use_cache: # try to find the subtraction image in the cache + SCLogger.debug( "make_datstore looking for subtraction image in cache..." ) prov = Provenance( code_version=code_version, process='subtraction', @@ -428,7 +459,7 @@ def make_datastore( cache_name = cache_sub_name + '.image.fits.json' sub_cache_path = os.path.join(cache_dir, cache_name) if os.path.isfile(sub_cache_path): - SCLogger.debug('loading subtraction image from cache. ') + SCLogger.debug('make_datastore loading subtraction image from cache: {sub_cache_path}" ') ds.sub_image = copy_from_cache(Image, cache_dir, cache_name) ds.sub_image.provenance = prov @@ -505,8 +536,11 @@ def make_datastore( ds.sub_image._aligned_images = [image_aligned_ref, image_aligned_new] else: ds.sub_image._aligned_images = [image_aligned_new, image_aligned_ref] + else: + SCLogger.debug( "make_datastore didn't find subtraction image in cache" ) if ds.sub_image is None: # no hit in the cache + SCLogger.debug( "make_datastore running subtractor to create subtraction image" ) ds = p.subtractor.run(ds, session) ds.sub_image.save(verify_md5=False) # make sure it is also saved to archive if use_cache: @@ -515,6 +549,7 @@ def make_datastore( warnings.warn(f'cache path {sub_cache_path} does not match output path {output_path}') if use_cache: # save the aligned images to cache + SCLogger.debug( "make_datastore saving aligned images to cache" ) for im in ds.sub_image.aligned_images: im.save(no_archive=True) copy_to_cache(im, cache_dir) @@ -532,13 +567,14 @@ def make_datastore( cache_name = os.path.join(cache_dir, cache_sub_name + f'.sources_{prov.id[:6]}.npy.json') if use_cache and os.path.isfile(cache_name): - SCLogger.debug('loading detections from cache. ') + SCLogger.debug( "make_datastore loading detections from cache." ) ds.detections = copy_from_cache(SourceList, cache_dir, cache_name) ds.detections.provenance = prov ds.detections.image = ds.sub_image ds.sub_image.sources = ds.detections ds.detections.save(verify_md5=False) else: # cannot find detections on cache + SCLogger.debug( "make_datastore running detector to find detections" ) ds = p.detector.run(ds, session) ds.detections.save(verify_md5=False) if use_cache: @@ -557,13 +593,14 @@ def make_datastore( cache_name = os.path.join(cache_dir, cache_sub_name + f'.cutouts_{prov.id[:6]}.h5') if use_cache and ( os.path.isfile(cache_name) ): - SCLogger.debug('loading cutouts from cache. ') + SCLogger.debug( 'make_datastore loading cutouts from cache.' ) ds.cutouts = copy_from_cache(Cutouts, cache_dir, cache_name) ds.cutouts.provenance = prov ds.cutouts.sources = ds.detections ds.cutouts.load_all_co_data() # sources must be set first ds.cutouts.save() # make sure to save to archive as well else: # cannot find cutouts on cache + SCLogger.debug( "make_datastore running cutter to create cutouts" ) ds = p.cutter.run(ds, session) ds.cutouts.save() if use_cache: @@ -584,7 +621,7 @@ def make_datastore( if use_cache and ( os.path.isfile(cache_name) ): # note that the cache contains ALL the measurements, not only the good ones - SCLogger.debug('loading measurements from cache. ') + SCLogger.debug( 'make_datastore loading measurements from cache.' ) ds.all_measurements = copy_list_from_cache(Measurements, cache_dir, cache_name) [setattr(m, 'provenance', prov) for m in ds.all_measurements] [setattr(m, 'cutouts', ds.cutouts) for m in ds.all_measurements] @@ -599,10 +636,12 @@ def make_datastore( [m.associate_object(session) for m in ds.measurements] # create or find an object for each measurement # no need to save list because Measurements is not a FileOnDiskMixin! else: # cannot find measurements on cache + SCLogger.debug( "make_datastore running measurer to create measurements" ) ds = p.measurer.run(ds, session) if use_cache: copy_list_to_cache(ds.all_measurements, cache_dir, cache_name) # must provide filepath! + SCLogger.debug( "make_datastore running ds.save_and_commit after subtraction/etc" ) ds.save_and_commit(session=session) return ds diff --git a/tests/fixtures/ptf.py b/tests/fixtures/ptf.py index b4d03b84..37029866 100644 --- a/tests/fixtures/ptf.py +++ b/tests/fixtures/ptf.py @@ -29,7 +29,7 @@ from util.retrydownload import retry_download from util.logger import SCLogger -from util.cache import copy_to_cache, copy_list_to_cache, copy_from_cache, copy_list_from_cache +from util.cache import copy_to_cache, copy_from_cache from util.util import env_as_bool @@ -360,6 +360,14 @@ def ptf_supernova_images(ptf_images_factory): def ptf_aligned_images(request, ptf_cache_dir, data_dir, code_version): cache_dir = os.path.join(ptf_cache_dir, 'aligned_images') + prov = Provenance( + code_version=code_version, + parameters={'alignment': {'method': 'swarp', 'to_index': 'last'}, 'test_parameter': 'test_value'}, + upstreams=[], + process='coaddition', + is_testing=True, + ) + # try to load from cache if ( ( not env_as_bool( "LIMIT_CACHE_USAGE" ) ) and ( os.path.isfile(os.path.join(cache_dir, 'manifest.txt')) ) @@ -370,20 +378,26 @@ def ptf_aligned_images(request, ptf_cache_dir, data_dir, code_version): for filename in filenames: imfile, psffile, bgfile = filename.split() output_images.append(copy_from_cache(Image, cache_dir, imfile + '.image.fits')) + output_images[-1].provenance = prov + # Associate other objects + # BROKEN -- we don't set the provenance properly below! + # Set the provenance_id to None to explicitly indicate + # that we're not depending on the proper provenance + # to happen to have the same id this time around as it + # did when the cache was written. output_images[-1].psf = copy_from_cache(PSF, cache_dir, psffile + '.fits') + output_images[-1].psf.image = output_images[-1] + output_images[-1].psf.provenance_id = None output_images[-1].bg = copy_from_cache(Background, cache_dir, bgfile) + output_images[-1].bg.image = output_images[-1] + output_images[-1].bg.provenance_id = None output_images[-1].zp = copy_from_cache(ZeroPoint, cache_dir, imfile + '.zp') + output_images[-1].zp.sources_id = None # This isn't right, but we dont' have what we need + output_images[-1].zp.provenance_id = None else: # no cache available ptf_reference_images = request.getfixturevalue('ptf_reference_images') images_to_align = ptf_reference_images - prov = Provenance( - code_version=code_version, - parameters={'alignment': {'method': 'swarp', 'to_index': 'last'}, 'test_parameter': 'test_value'}, - upstreams=[], - process='coaddition', - is_testing=True, - ) coadd_image = Image.from_images(images_to_align, index=-1) coadd_image.provenance = prov coadd_image.provenance_id = prov.id @@ -522,16 +536,21 @@ def ptf_ref( coadd_image.sources = copy_from_cache( SourceList, ptf_cache_dir, cache_base_name + f'.sources_{sources_prov.id[:6]}.fits' ) + # Make sure that any automated fields set in the database don't have + # the values they happened to have when the cache was created + coadd_image.sources.image = coadd_image coadd_image.sources.provenance = sources_prov assert coadd_image.sources.provenance_id == coadd_image.sources.provenance.id # get the PSF: coadd_image.psf = copy_from_cache(PSF, ptf_cache_dir, cache_base_name + f'.psf_{sources_prov.id[:6]}.fits') + caddd_image.psf.image = coadd_image coadd_image.psf.provenance = sources_prov assert coadd_image.psf.provenance_id == coadd_image.psf.provenance.id # get the background: coadd_image.bg = copy_from_cache(Background, ptf_cache_dir, cache_base_name + f'.bg_{sources_prov.id[:6]}.h5') + coadd_image.bg.image = coadd_image coadd_image.bg.provenance = sources_prov assert coadd_image.bg.provenance_id == coadd_image.bg.provenance.id @@ -539,12 +558,14 @@ def ptf_ref( coadd_image.wcs = copy_from_cache( WorldCoordinates, ptf_cache_dir, cache_base_name + f'.wcs_{sources_prov.id[:6]}.txt' ) + coadd_image.wcs.sources = coadd_image.sources coadd_image.wcs.provenance = sources_prov coadd_image.sources.wcs = coadd_image.wcs assert coadd_image.wcs.provenance_id == coadd_image.wcs.provenance.id # get the zero point: coadd_image.zp = copy_from_cache(ZeroPoint, ptf_cache_dir, cache_base_name + '.zp') + coadd_image.zp.sources = coadd_image.sources coadd_image.zp.provenance = sources_prov coadd_image.sources.zp = coadd_image.zp assert coadd_image.zp.provenance_id == coadd_image.zp.provenance.id @@ -664,7 +685,6 @@ def ptf_subtraction1(ptf_ref, ptf_supernova_images, subtractor, ptf_cache_dir): im = copy_from_cache(Image, ptf_cache_dir, cache_path) im.upstream_images = [ptf_ref.image, ptf_supernova_images[0]] im.ref_image_id = ptf_ref.image.id - im.provenance = prov else: # cannot find it on cache, need to produce it, using other fixtures diff --git a/util/cache.py b/util/cache.py index d93de5be..82350462 100644 --- a/util/cache.py +++ b/util/cache.py @@ -1,6 +1,20 @@ +# DO NOT USE THESE OUTSIDE OF TESTS IN tests/ +# +# (The cache has some scariness to it, and we don't want +# it built into the mainstream pipeline.) +# +# What's more, because of how it functions, tests will probably fail if +# you don't empty the cache every time you reinitialize the database. +# See Issue #339/ + +# (The cache is still not useless, because if you run multiple tests, +# the cache will be used internally to avoid recalculating stuff for +# different tests.) + import os import shutil import json +import datetime from models.base import FileOnDiskMixin from util.logger import SCLogger @@ -153,6 +167,14 @@ def copy_from_cache(cls, cache_dir, filepath): with its files but will not necessarily have the correct relationships to other objects. + WARNING : this caches database records to json files, including + values of things that are determined automatically (e.g. ids set + from database sequences) or that are foreign ids whose values depend + on the history of what was built into the database, and which may + well not be the same when the object is restored from the cache. As + such, anything that uses this cache needs to very carefully go + thorugh all fields like that and make sure that they're wiped! + Parameters ---------- cls : Class that derives from FileOnDiskMixin, or that implements from_dict(dict) @@ -181,6 +203,20 @@ def copy_from_cache(cls, cache_dir, filepath): output = cls.from_dict(json_dict) + # COMMENTED THE NEXT OUT. + # It's the right thing to do -- automatically assigned + # database attributes should *not* be restored + # from whatever they happened to be when the cache + # was written -- but it was leading to mysterious + # sqlalchemy errors elsewhere. + # if hasattr( output, 'id' ): + # output.id = None + # now = datetime.datetime.now( tz=datetime.timezone.utc ) + # if hasattr( output, 'created_at' ): + # output.created_at = now + # if hasattr( output, 'modified' ): + # output.modified = now + # copy any associated files if isinstance(output, FileOnDiskMixin): # if fullpath ends in filepath_extensions[0] @@ -212,6 +248,8 @@ def copy_list_from_cache(cls, cache_dir, filepath): it will be able to figure out where all the associated files are saved based on the filepath and extensions in the JSON file. + See the WARNING in copy_from_cache docstring. + Parameters ---------- cls: Class that derives from FileOnDiskMixin, or that implements from_dict(dict) @@ -225,6 +263,7 @@ def copy_list_from_cache(cls, cache_dir, filepath): ------- output: list The list of reconstructed objects, of the same type as the class. + """ # allow user to give an absolute path, so long as it is in the cache dir if filepath.startswith(cache_dir): @@ -239,8 +278,16 @@ def copy_list_from_cache(cls, cache_dir, filepath): json_list = json.load(fp) output = [] + now = datetime.datetime.now( tz=datetime.timezone.utc ) for obj_dict in json_list: - output.append(cls.from_dict(obj_dict)) + newobj = cls.from_dict( obj_dict ) + if hasattr( newobj, 'id' ): + newobj.id = None + if hasattr( newobj, 'created_at' ): + newobj.created_at = now + if hasattr( newobj, 'modified' ): + newobj.modified = now + output.append( newobj ) if len(output) == 0: return []