Skip to content

Commit

Permalink
Remove lephare keywords from stage params to unblock pipeline creation.
Browse files Browse the repository at this point in the history
  • Loading branch information
drewoldag committed May 14, 2024
1 parent 62fcb38 commit 2ca49aa
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 30 deletions.
53 changes: 24 additions & 29 deletions src/rail/estimation/algos/lephare.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,38 +29,34 @@ class LephareInformer(CatInformer):
redshift_col=SHARED_PARAMS,
lephare_config=Param(
dict,
lp.read_config(
lp.keymap_to_string_dict(lp.read_config(
"{}/{}".format(os.path.dirname(os.path.abspath(__file__)), "lsst.para")
),
)),
msg="The lephare config keymap.",
),
star_config=Param(
dict,
dict(LIB_ASCII=lp.keyword("LIB_ASCII", "YES")),
dict(LIB_ASCII="YES"),
msg="Star config overrides.",
),
gal_config=Param(
dict,
dict(
LIB_ASCII=lp.keyword("LIB_ASCII", "YES"),
MOD_EXTINC=lp.keyword("MOD_EXTINC", "18,26,26,33,26,33,26,33"),
EXTINC_LAW=lp.keyword(
"EXTINC_LAW",
"SMC_prevot.dat,SB_calzetti.dat,"
"SB_calzetti_bump1.dat,SB_calzetti_bump2.dat",
),
EM_LINES=lp.keyword("EM_LINES", "EMP_UV"),
EM_DISPERSION=lp.keyword("EM_DISPERSION", "0.5,0.75,1.,1.5,2."),
LIB_ASCII="YES",
MOD_EXTINC="18,26,26,33,26,33,26,33",
EXTINC_LAW="SMC_prevot.dat,SB_calzetti.dat,SB_calzetti_bump1.dat,SB_calzetti_bump2.dat",
EM_LINES="EMP_UV",
EM_DISPERSION="0.5,0.75,1.,1.5,2.",
),
msg="Galaxy config overrides.",
),
qso_config=Param(
dict,
dict(
LIB_ASCII=lp.keyword("LIB_ASCII", "YES"),
MOD_EXTINC=lp.keyword("MOD_EXTINC", "0,1000"),
EB_V=lp.keyword("EB_V", "0.,0.1,0.2,0.3"),
EXTINC_LAW=lp.keyword("EXTINC_LAW", "SB_calzetti.dat"),
LIB_ASCII="YES",
MOD_EXTINC="0,1000",
EB_V="0.,0.1,0.2,0.3",
EXTINC_LAW="SB_calzetti.dat",
),
msg="QSO config overrides.",
),
Expand Down Expand Up @@ -104,10 +100,10 @@ def run(self):

# The three main lephare specific inform tasks
lp.prepare(
self.lephare_config,
star_config=self.config["star_config"],
gal_config=self.config["gal_config"],
qso_config=self.config["qso_config"],
lp.string_dict_to_keymap(self.lephare_config),
star_config=lp.string_dict_to_keymap(self.config["star_config"]),
gal_config=lp.string_dict_to_keymap(self.config["gal_config"]),
qso_config=lp.string_dict_to_keymap(self.config["qso_config"]),
)

# Spectroscopic redshifts
Expand All @@ -117,15 +113,15 @@ def run(self):
input = _rail_to_lephare_input(
training_data, self.config.bands, self.config.err_bands
)
if self.config["lephare_config"]["AUTO_ADAPT"].value == "YES":
if self.config["lephare_config"]["AUTO_ADAPT"] == "YES":
a0, a1 = lp.calculate_offsets(self.config["lephare_config"], input)
offsets = [a0, a1]
else:
offsets = None
# We must make a string dictionary to allow pickling and saving
config_text_dict = dict()
for k in self.config["lephare_config"]:
config_text_dict[k] = self.config["lephare_config"][k].value
config_text_dict[k] = self.config["lephare_config"][k]
# Give principle inform config 'model' to instance.
self.model = dict(
lephare_config=config_text_dict, offsets=offsets, run_dir=self.run_dir
Expand All @@ -149,9 +145,9 @@ class LephareEstimator(CatEstimator):
redshift_col=SHARED_PARAMS,
lephare_config=Param(
dict,
lp.read_config(
lp.keymap_to_string_dict(lp.read_config(
"{}/{}".format(os.path.dirname(os.path.abspath(__file__)), "lsst.para")
),
)),
msg="The lephare config keymap.",
),
output_keys=Param(
Expand Down Expand Up @@ -181,8 +177,7 @@ class LephareEstimator(CatEstimator):
def __init__(self, args, comm=None):
CatEstimator.__init__(self, args, comm=comm)
self.lephare_config = self.config["lephare_config"]
self.photz = lp.PhotoZ(self.lephare_config)
Z_STEP = self.lephare_config["Z_STEP"].value
Z_STEP = self.lephare_config["Z_STEP"]
self.zstep = float(Z_STEP.split(",")[0])
self.zmin = float(Z_STEP.split(",")[1])
self.zmax = float(Z_STEP.split(",")[2])
Expand All @@ -205,12 +200,12 @@ def _process_chunk(self, start, end, data, first):
# Set the desired offsets estimate config overide lephare config overide inform offsets
if self.config["offsets"]:
offsets = self.config["offsets"]
elif self.config["lephare_config"]["AUTO_ADAPT"].value == "YES":
a0, a1 = lp.calculate_offsets(self.config["lephare_config"], input)
elif self.config["lephare_config"]["AUTO_ADAPT"] == "YES":
a0, a1 = lp.calculate_offsets(lp.string_dict_to_keymap(self.lephare_config), input)
offsets = [a0, a1]
elif not self.config["offsets"]:
offsets = self.model["offsets"]
output, pdfs, zgrid = lp.process(self.lephare_config, input, offsets=offsets)
output, pdfs, zgrid = lp.process(lp.string_dict_to_keymap(self.lephare_config), input, offsets=offsets)
self.zgrid = zgrid

ng = data[self.config.bands[0]].shape[0]
Expand Down
2 changes: 1 addition & 1 deletion tests/lephare/test_algos.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def test_informer_and_estimator(test_data_dir: str):
nondetect_val=np.nan,
model="lephare.pkl",
hdf5_groupname="",
lephare_config=lephare_config,
lephare_config=lp.keymap_to_string_dict(lephare_config),
)

inform_lephare.inform(traindata_io)
Expand Down
9 changes: 9 additions & 0 deletions tests/lephare/test_lephare_pipeline.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from rail.lephare import *
from rail.core.stage import RailPipeline

def test_lephare_pipeline():
"""Simple test to ensure that a lephare pipeline can be created and saved."""
lephare = LephareInformer.make_stage(name="lephare_inform")
pipe = RailPipeline()
pipe.add_stage(lephare)
pipe.save('dummy.yml')

0 comments on commit 2ca49aa

Please sign in to comment.