From 90212d233384d938000d9b3ddb176a694006ce7f Mon Sep 17 00:00:00 2001 From: wanqqq31 Date: Fri, 23 Aug 2024 16:36:04 +0900 Subject: [PATCH 1/5] add pointing simulation status & show target list on admin --- src/pfs_target_uploader/pn_app.py | 44 +++++++++---------- src/pfs_target_uploader/utils/io.py | 26 +++++++++-- src/pfs_target_uploader/utils/ppp.py | 18 +++++--- .../widgets/PppResultWidgets.py | 10 ++++- 4 files changed, 65 insertions(+), 33 deletions(-) diff --git a/src/pfs_target_uploader/pn_app.py b/src/pfs_target_uploader/pn_app.py index 57c9810..c12780f 100644 --- a/src/pfs_target_uploader/pn_app.py +++ b/src/pfs_target_uploader/pn_app.py @@ -391,6 +391,7 @@ def cb_PPP(event): try: panel_ppp.origname = panel_input.file_input.filename + panel_ppp.origname_ppc = panel_ppcinput.file_input.filename panel_ppp.origdata = panel_input.file_input.value panel_ppp.df_summary = panel_status.df_summary @@ -496,14 +497,23 @@ def cb_submit(event): panel_ppp.df_input = df_validated panel_ppp.df_summary = panel_status.df_summary panel_ppp.origname = panel_input.file_input.filename + panel_ppp.origname_ppc = panel_ppcinput.file_input.filename panel_ppp.origdata = panel_input.file_input.value panel_ppp.upload_time = datetime.now(timezone.utc) panel_ppp.secret_token = panel_input.secret_token + if panel_ppp.status_ == 2: + ppc_status_ = "user" + elif panel_ppp.status_ == 0: + ppc_status_ = "skip" + else: + ppc_status_ = "auto" + outdir, outfile_zip, _ = panel_ppp.upload( outdir_prefix=config["OUTPUT_DIR"], single_exptime=panel_obs_type.single_exptime.value, observation_type=panel_obs_type.obs_type.value, + ppc_status=ppc_status_, ) try: @@ -578,6 +588,8 @@ def list_files_app(use_panel_cli=False): logger.info(f"config params from dotenv: {config}") + panel_targets = TargetWidgets() + if not os.path.exists(config["OUTPUT_DIR"]): logger.error(f"{config['OUTPUT_DIR']} not found") raise ValueError @@ -642,24 +654,7 @@ def list_files_app(use_panel_cli=False): step=1, ) - # setup panel components - # Target & psl summary table - - """def execute_javascript(script): - script = f'' - js_panel.object = script - js_panel.object = "" - - def open_panel_download(event): - if event.column == "download": - p_href = df_files_tgt["fullpath"][event.row].replace( - config["OUTPUT_DIR"], "data", 1 - ) - # c.f. https://www.w3schools.com/jsref/met_win_open.asp - script = f"window.open('{p_href}', '_blank')" - execute_javascript(script)#""" - def Table_files_tgt_psl(column_checkbox_): if psl_info_input.value is not None: df_psl_info = load_input( @@ -687,7 +682,10 @@ def Table_files_tgt_psl(column_checkbox_): frozen_columns=["index"], pagination="remote", header_filters=True, - buttons={"magnify": "", "download": ""}, + buttons={ + "magnify": "", + "download": "", + }, layout="fit_data_table", hidden_columns=_hidden_columns, disabled=True, @@ -732,7 +730,7 @@ def open_panel_magnify(event): table_ppc.clear() # move to "PPC details" tab - tab_panels.active = 1 + tab_panels.active = 2 u_id = _df_files_tgt_psl["Upload ID"][row_target] p_ppc = os.path.split(_df_files_tgt_psl["fullpath_psl"][row_target])[0] @@ -751,6 +749,8 @@ def open_panel_magnify(event): except FileNotFoundError: table_tac_t = Table() + panel_targets.show_results(Table.to_pandas(table_tgt_t)) + ( nppc_fin, p_result_fig_fin, @@ -772,7 +772,6 @@ def open_panel_magnify(event): raise ValueError path_t_server = path_t_all[0] - tac_ppc_list_file_server = f"{path_t_server}/TAC_ppc_{u_id}.ecsv" path_t = path_t_server.replace(config["OUTPUT_DIR"], "data", 1) tac_ppc_list_file = f"{path_t}/TAC_ppc_{u_id}.ecsv" @@ -941,8 +940,8 @@ def tab_ppc_save(event): "timestamp", "TAC_FH_L", "TAC_FH_M", - "TAC_nppc_L", - "TAC_nppc_M", + "observation_type", + "pointing_status", ], options=list(df_files_tgt_psl.columns) + ["proposal ID", "PI name", "rank", "grade"], @@ -992,6 +991,7 @@ def tab_ppc_save(event): js_panel, ), ), + ("Target list", panel_targets.pane), ("PPC details", table_ppc), ) diff --git a/src/pfs_target_uploader/utils/io.py b/src/pfs_target_uploader/utils/io.py index 8c249ed..ef185eb 100644 --- a/src/pfs_target_uploader/utils/io.py +++ b/src/pfs_target_uploader/utils/io.py @@ -129,6 +129,7 @@ def upload_file( ppp_fig, outdir_prefix=".", origname="example.csv", + origname_ppc=None, origdata=None, secret_token=None, upload_time=None, @@ -137,6 +138,7 @@ def upload_file( skip_subdirectories=False, single_exptime=900, observation_type="queue", + ppc_status="auto", ): # use the current UTC time and random hash string to construct an output filename if upload_time is None: @@ -238,11 +240,13 @@ def upload_file( # add metadata obj.meta["original_filename"] = origname if not export: + obj.meta["original_filename_ppc"] = origname_ppc obj.meta["upload_id"] = secret_token obj.meta["upload_at"] = upload_time obj.meta["ppp_status"] = ppp_status obj.meta["single_exptime"] = single_exptime obj.meta["observation_type"] = observation_type + obj.meta["ppc_status"] = ppc_status filename = f"{file_prefix}_{secret_token}.ecsv" elif type == "figure": filename = f"{file_prefix}_{secret_token}.html" @@ -329,6 +333,7 @@ def load_file_properties(datadir, ext="ecsv", n_uid=16): n_files = len(dirs) orignames = np.full(n_files, None, dtype=object) + orignames_ppc = np.full(n_files, None, dtype=object) upload_ids = np.full(n_files, None, dtype=object) timestamps = np.full(n_files, None, dtype="datetime64[s]") filesizes = np.zeros(n_files, dtype=float) @@ -353,6 +358,7 @@ def load_file_properties(datadir, ext="ecsv", n_uid=16): tac_rot_m = np.zeros(n_files, dtype=float) single_exptime = np.full(n_files, 900, dtype=int) observation_type = np.full(n_files, None, dtype=object) + ppc_status = np.full(n_files, None, dtype=object) for i, d in enumerate(dirs): uid = d[-n_uid:] @@ -388,14 +394,14 @@ def load_file_properties(datadir, ext="ecsv", n_uid=16): orignames[i] = None try: - upload_ids[i] = tb_target.meta["upload_id"] + orignames_ppc[i] = tb_target.meta["original_filename_ppc"] except KeyError: - upload_ids[i] = None + orignames_ppc[i] = None try: - observation_type[i] = tb_target.meta["observation_type"] + upload_ids[i] = tb_target.meta["upload_id"] except KeyError: - observation_type[i] = None + upload_ids[i] = None try: if isinstance(tb_target.meta["upload_at"], str): @@ -410,6 +416,16 @@ def load_file_properties(datadir, ext="ecsv", n_uid=16): except KeyError: pass + try: + observation_type[i] = tb_target.meta["observation_type"] + except KeyError: + observation_type[i] = None + + try: + ppc_status[i] = tb_target.meta["ppc_status"] + except KeyError: + ppc_status[i] = None + n_obj[i] = tb_target["ob_code"].size t_exp[i] = np.sum(tb_target["exptime"]) / 3600.0 @@ -474,6 +490,8 @@ def load_file_properties(datadir, ext="ecsv", n_uid=16): "fullpath_psl": fullpath_psl, "single_exptime": single_exptime, "observation_type": observation_type, + "pointing_status": ppc_status, + "Filename_pointing": orignames_ppc, } ) diff --git a/src/pfs_target_uploader/utils/ppp.py b/src/pfs_target_uploader/utils/ppp.py index 96ce61d..82a8624 100644 --- a/src/pfs_target_uploader/utils/ppp.py +++ b/src/pfs_target_uploader/utils/ppp.py @@ -895,8 +895,6 @@ def netflow_iter(uS, obj_allo, weight_para, starttime, exetime): # if total number of ppc > max_nppc (~5 nights), then directly stop """ - status = 999 - if sum(uS["exptime_assign"] == uS["exptime_PPP"]) == len(uS): # remove ppc with no fiber assignment obj_allo.remove_rows(np.where(obj_allo["tel_fiber_usage_frac"] == 0)[0]) @@ -924,7 +922,9 @@ def netflow_iter(uS, obj_allo, weight_para, starttime, exetime): uS_t1["exptime_PPP"] - uS_t1["exptime_assign"] ) # remained exposure time - uS_t2 = PPP_centers(uS_t1, [], True, weight_para, starttime, exetime)[0] + uS_t2, status = PPP_centers( + uS_t1, [], True, weight_para, starttime, exetime + ) obj_allo_t = netflowRun(uS_t2) @@ -1100,7 +1100,6 @@ def ppp_result( sub_m, obj_allo_m, uS_M2, - uPPC, single_exptime=900, d_pfi=1.38, box_width=1200.0, @@ -1130,7 +1129,6 @@ def ppp_result( ) def overheads(n_sci_frame): - t_night_hours: float = 10.0 # [h] total observing time per night # in seconds @@ -1199,7 +1197,7 @@ def ppp_plotFig(RESmode, cR, sub, obj_allo, uS): "Point_" + RESmode + "_" + str(count) for count in (np.arange(0, len(obj_allo), 1) + 1) ] - + obj_allo1 = obj_allo1.group_by("ppc_code") obj_allo1.rename_column("tel_fiber_usage_frac", "Fiber usage fraction (%)") obj_allo2 = Table.to_pandas(obj_allo1) @@ -1750,6 +1748,8 @@ def rot2nppc(rot): else: admin_slider_ini_value = nppc_usr.data[0] + legend_cols = 2 if len(sub) >= 6 else 1 + nppc = pn.widgets.EditableFloatSlider( name=(f"{RESmode.capitalize()}-resolution mode (ROT / hour)"), format="1[.]000", @@ -1913,6 +1913,7 @@ def update_ppp_figures(nppc_fin): show_grid=True, shared_axes=False, height=int(plot_height * 0.5), + legend_cols=legend_cols, ) else: p_ppc_tot = (p_tgt).opts( @@ -1926,6 +1927,7 @@ def update_ppp_figures(nppc_fin): show_grid=True, shared_axes=False, height=plot_height, + legend_cols=legend_cols, ) # update completion rates as a function of PPC ID @@ -1949,6 +1951,8 @@ def update_ppp_figures(nppc_fin): toolbar="left", active_tools=["box_zoom"], height=int(plot_height * 0.5), + legend_cols=legend_cols, + legend_offset=(10, -30), ) p_comp_tot_n = ( @@ -1961,6 +1965,8 @@ def update_ppp_figures(nppc_fin): toolbar="left", active_tools=["box_zoom"], height=int(plot_height * 0.5), + legend_cols=legend_cols, + legend_offset=(10, -30), ) """ diff --git a/src/pfs_target_uploader/widgets/PppResultWidgets.py b/src/pfs_target_uploader/widgets/PppResultWidgets.py index 6b6e251..be08704 100644 --- a/src/pfs_target_uploader/widgets/PppResultWidgets.py +++ b/src/pfs_target_uploader/widgets/PppResultWidgets.py @@ -30,11 +30,13 @@ def __init__( self.df_input = None self.df_summary = None self.origname = None + self.origname_ppc = None self.origdata = None self.upload_time = None self.secret_token = None self.exetime: int = exetime self.max_nppc = max_nppc + self.status_ = 0 self.ppp_title = pn.pane.Markdown( """# Results of PFS pointing simulation""", @@ -91,9 +93,11 @@ def reset(self): self.df_input = None self.df_summary = None self.origname = None + self.origname_ppc = None self.origdata = None self.upload_time = None self.secret_token = None + self.status_ = 0 def show_results(self): logger.info("showing PPP results") @@ -184,9 +188,11 @@ def stream_export_files(df_psl, df_ppc, p_fig): self.df_summary, p_fig, origname=self.origname, + origname_ppc=self.origname_ppc, origdata=self.origdata, export=True, ) + self.export_button.filename = outfile_zip return sio @@ -333,7 +339,6 @@ def run_ppp( sub_m, obj_allo_M_fin, uS_M2, - tb_ppc, single_exptime=self.single_exptime, box_width=self.box_width, ) @@ -346,6 +351,7 @@ def upload( export=False, single_exptime=None, observation_type="queue", + ppc_status="auto", ): if single_exptime is None: single_exptime = self.single_exptime @@ -369,12 +375,14 @@ def upload( ppp_fig, outdir_prefix=outdir_prefix, origname=self.origname, + origname_ppc=self.origname_ppc, origdata=self.origdata, secret_token=self.secret_token, upload_time=self.upload_time, ppp_status=self.ppp_status, single_exptime=single_exptime, observation_type=observation_type, + ppc_status=ppc_status, ) return outdir, outfile_zip, None From 54461295894088f63bbffd759ca282e0e8da70b3 Mon Sep 17 00:00:00 2001 From: wanqqq31 <74391454+wanqqq31@users.noreply.github.com> Date: Sat, 24 Aug 2024 02:23:12 +0900 Subject: [PATCH 2/5] re-scale cost function for netflow setting [related to pfs-obs-help 57] cost function in some cases is too small for netflow to assign fibers, resulting in lower fiber allocation efficiency. This can happen especially for sample lists including targets asking for long exptime, say >7200s. --- src/pfs_target_uploader/utils/ppp.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pfs_target_uploader/utils/ppp.py b/src/pfs_target_uploader/utils/ppp.py index 82a8624..4950d9f 100644 --- a/src/pfs_target_uploader/utils/ppp.py +++ b/src/pfs_target_uploader/utils/ppp.py @@ -62,7 +62,7 @@ def PPPrunStart( ppp_quiet = quiet if weight_para is None: - weight_para = [2.02, 0.01, 0.01] + weight_para = [2.0, 0.0, 0.0] is_exetime = (exetime is not None) and (exetime > 0) is_nppc = (max_nppc is not None) and (max_nppc > 0) @@ -551,7 +551,7 @@ class of targets with costs def cobraMoveCost(dist): """optional: penalize assignments where the cobra has to move far out""" - return 0.1 * dist + return 0.0 * dist def netflowRun_single(Tel, sample, otime="2024-05-20T08:00:00Z"): """run netflow (without iteration) @@ -591,7 +591,7 @@ def netflowRun_single(Tel, sample, otime="2024-05-20T08:00:00Z"): degenmoves=0, heuristics=0.8, mipfocus=0, - mipgap=5.0e-2, + mipgap=1.0e-4, LogToConsole=0, ) From 1dd2cbfaa2c7fc45fce9542081197c0c68c4b6f8 Mon Sep 17 00:00:00 2001 From: wanqqq31 <74391454+wanqqq31@users.noreply.github.com> Date: Sat, 24 Aug 2024 02:24:44 +0900 Subject: [PATCH 3/5] re-scale cost for netflow setting [related to pfs-obs-help 57] --- src/pfs_target_uploader/widgets/PppResultWidgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pfs_target_uploader/widgets/PppResultWidgets.py b/src/pfs_target_uploader/widgets/PppResultWidgets.py index be08704..4711833 100644 --- a/src/pfs_target_uploader/widgets/PppResultWidgets.py +++ b/src/pfs_target_uploader/widgets/PppResultWidgets.py @@ -286,7 +286,7 @@ def run_ppp( quiet=True, ): if weights is None: - weights = [2.02, 0.01, 0.01] + weights = [2.0, 0.0, 0.0] self.df_input = df self.df_input["single_exptime"] = single_exptime From 4648288a611ce5eca70d3dee0e54d41678c18ddd Mon Sep 17 00:00:00 2001 From: Masato Onodera Date: Fri, 23 Aug 2024 08:44:23 -1000 Subject: [PATCH 4/5] Save the original ppc list in the archive --- src/pfs_target_uploader/pn_app.py | 2 + src/pfs_target_uploader/utils/io.py | 51 ++++++++++++++----- .../widgets/PppResultWidgets.py | 4 ++ 3 files changed, 43 insertions(+), 14 deletions(-) diff --git a/src/pfs_target_uploader/pn_app.py b/src/pfs_target_uploader/pn_app.py index c12780f..87c85da 100644 --- a/src/pfs_target_uploader/pn_app.py +++ b/src/pfs_target_uploader/pn_app.py @@ -393,6 +393,7 @@ def cb_PPP(event): panel_ppp.origname = panel_input.file_input.filename panel_ppp.origname_ppc = panel_ppcinput.file_input.filename panel_ppp.origdata = panel_input.file_input.value + panel_ppp.origdata_ppc = panel_input.file_input.value panel_ppp.df_summary = panel_status.df_summary if not validation_status["status"]: @@ -499,6 +500,7 @@ def cb_submit(event): panel_ppp.origname = panel_input.file_input.filename panel_ppp.origname_ppc = panel_ppcinput.file_input.filename panel_ppp.origdata = panel_input.file_input.value + panel_ppp.origdata_ppc = panel_input.file_input.value panel_ppp.upload_time = datetime.now(timezone.utc) panel_ppp.secret_token = panel_input.secret_token diff --git a/src/pfs_target_uploader/utils/io.py b/src/pfs_target_uploader/utils/io.py index ef185eb..86a4b1a 100644 --- a/src/pfs_target_uploader/utils/io.py +++ b/src/pfs_target_uploader/utils/io.py @@ -131,6 +131,7 @@ def upload_file( origname="example.csv", origname_ppc=None, origdata=None, + origdata_ppc=None, secret_token=None, upload_time=None, ppp_status=True, @@ -223,7 +224,7 @@ def upload_file( } for file_prefix, obj, type in zip( - ["target", "target_summary", "psl", "ppc", "ppp_figure", "", ""], + ["target", "target_summary", "psl", "ppc", "ppp_figure", "", "", ""], [ tb_target, tb_target_summary, @@ -231,35 +232,53 @@ def upload_file( tb_ppc, ppp_fig, origdata, + origdata_ppc, generate_readme_text(), ], - ["table", "table", "table", "table", "figure", "original", "readme"], + [ + "table", + "table", + "table", + "table", + "figure", + "original", + "original_ppc", + "readme", + ], ): logger.info(f"Adding metadata to {file_prefix} file") if type == "table": # add metadata obj.meta["original_filename"] = origname + obj.meta["original_filename_ppc"] = origname_ppc + obj.meta["ppp_status"] = ppp_status + obj.meta["single_exptime"] = single_exptime + obj.meta["observation_type"] = observation_type + obj.meta["ppc_status"] = ppc_status if not export: - obj.meta["original_filename_ppc"] = origname_ppc + # add upload-related metadata obj.meta["upload_id"] = secret_token obj.meta["upload_at"] = upload_time - obj.meta["ppp_status"] = ppp_status - obj.meta["single_exptime"] = single_exptime - obj.meta["observation_type"] = observation_type - obj.meta["ppc_status"] = ppc_status filename = f"{file_prefix}_{secret_token}.ecsv" elif type == "figure": filename = f"{file_prefix}_{secret_token}.html" elif type == "original": filename = origname + elif type == "original_ppc": + filename = origname_ppc elif type == "readme": filename = "README.txt" outfiles_dict["filename"].append(filename) outfiles_dict["object"].append(obj) outfiles_dict["type"].append(type) - outfiles_dict["absname"].append(os.path.join(outdir, filename)) - outfiles_dict["arcname"].append(os.path.join(outfile_zip_prefix, filename)) + + outfiles_dict["absname"].append( + os.path.join(outdir, filename) if filename is not None else None + ) + outfiles_dict["arcname"].append( + os.path.join(outfile_zip_prefix, filename) if filename is not None else None + ) outdir, outfile_zip, sio = upload_write( outfiles_dict, outfile_zip_prefix, outdir, export=export @@ -279,6 +298,8 @@ def upload_write(outfiles_dict, outfile_zip_prefix, outdir, export=False): if export: dest = StringIO() else: + if outfiles_dict["filename"][i] is None: + continue dest = os.path.join(outdir, outfiles_dict["filename"][i]) if outfiles_dict["type"][i] == "table": @@ -308,14 +329,16 @@ def upload_write(outfiles_dict, outfile_zip_prefix, outdir, export=False): arcname = outfiles_dict["arcname"][i] if export: - if outfiles_dict["type"][i] == "original": - zipfile.writestr(arcname, outfiles_dict["object"][i]) + if outfiles_dict["type"][i] in ["original", "original_ppc"]: + if outfiles_dict["filename"][i] is not None: + zipfile.writestr(arcname, outfiles_dict["object"][i]) else: zipfile.writestr(arcname, dest.getvalue()) else: - if outfiles_dict["type"][i] == "original": - with open(dest, "wb") as f: - f.write(outfiles_dict["object"][i]) + if outfiles_dict["type"][i] in ["original", "original_ppc"]: + if outfiles_dict["filename"][i] is not None: + with open(dest, "wb") as f: + f.write(outfiles_dict["object"][i]) zipfile.write(absname, arcname=outfiles_dict["arcname"][i]) logger.info(f"File {outfiles_dict['filename'][i]} is saved under {outdir}.") diff --git a/src/pfs_target_uploader/widgets/PppResultWidgets.py b/src/pfs_target_uploader/widgets/PppResultWidgets.py index 4711833..54bb9a3 100644 --- a/src/pfs_target_uploader/widgets/PppResultWidgets.py +++ b/src/pfs_target_uploader/widgets/PppResultWidgets.py @@ -32,6 +32,7 @@ def __init__( self.origname = None self.origname_ppc = None self.origdata = None + self.origdata_ppc = None self.upload_time = None self.secret_token = None self.exetime: int = exetime @@ -95,6 +96,7 @@ def reset(self): self.origname = None self.origname_ppc = None self.origdata = None + self.origdata_ppc = None self.upload_time = None self.secret_token = None self.status_ = 0 @@ -190,6 +192,7 @@ def stream_export_files(df_psl, df_ppc, p_fig): origname=self.origname, origname_ppc=self.origname_ppc, origdata=self.origdata, + origdata_ppc=self.origdata_ppc, export=True, ) @@ -377,6 +380,7 @@ def upload( origname=self.origname, origname_ppc=self.origname_ppc, origdata=self.origdata, + origdata_ppc=self.origdata_ppc, secret_token=self.secret_token, upload_time=self.upload_time, ppp_status=self.ppp_status, From 382c50ccbbed8d78b82769fd947a7925af680cbb Mon Sep 17 00:00:00 2001 From: Masato Onodera Date: Fri, 23 Aug 2024 09:00:42 -1000 Subject: [PATCH 5/5] wip: fix target file download in admin page --- src/pfs_target_uploader/pn_app.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/pfs_target_uploader/pn_app.py b/src/pfs_target_uploader/pn_app.py index 87c85da..572371a 100644 --- a/src/pfs_target_uploader/pn_app.py +++ b/src/pfs_target_uploader/pn_app.py @@ -722,8 +722,12 @@ def execute_javascript(script): def open_panel_download(event): if event.column == "download": href = df_files_tgt_psl["fullpath_tgt"][event.row] + href_mod = href.replace(config["OUTPUT_DIR"], "data", 1) + logger.info(f"{href=}") + logger.info(f"{href_mod=}") + # c.f. https://www.w3schools.com/jsref/met_win_open.asp - script = f"window.open('{href}', '_blank')" + script = f"window.open('{href_mod}', '_blank')" execute_javascript(script) def open_panel_magnify(event):