diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ea83283..94c65a7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ repos: - id: trailing-whitespace - id: check-toml - repo: https://github.com/psf/black - rev: 23.11.0 + rev: 23.12.0 hooks: - id: black # It is recommended to specify the latest version of Python @@ -18,12 +18,16 @@ repos: # https://pre-commit.com/#top_level-default_language_version language_version: python3.11 - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort name: isort (python) - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.1.6 + rev: v0.1.8 hooks: - id: ruff + - repo: https://github.com/numpy/numpydoc + rev: "v1.6.0" + hooks: + - id: numpydoc-validation diff --git a/pyproject.toml b/pyproject.toml index a84b993..f009fe9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -150,3 +150,21 @@ max-doc-length = 79 [tool.ruff.pydocstyle] convention = "numpy" + +[tool.numpydoc_validation] +checks = [ + "all", # All except the rules listed below. + "SA01", # See Also section. + "EX01", # Example section. + "SS06", # Summary can go into second line. + "GL01", # Summary text can start on same line as """ + "GL08", # Do not require docstring. + "ES01", # No extended summary required. + "RT01", # Unfortunately our @property trigger this. + "RT02", # Does not want named return value. DM style says we do. + "SS05", # pydocstyle is better at finding infinitive verb. +] +exclude = [ + '^__init__$', + '\._[a-zA-Z_]+$', # Private methods. +] diff --git a/python/lsst/ctrl/bps/htcondor/htcondor_service.py b/python/lsst/ctrl/bps/htcondor/htcondor_service.py index 4ecb626..826b1e2 100644 --- a/python/lsst/ctrl/bps/htcondor/htcondor_service.py +++ b/python/lsst/ctrl/bps/htcondor/htcondor_service.py @@ -647,9 +647,9 @@ def _translate_job_cmds(cached_vals, generic_workflow, gwjob): cached_vals : `dict` [`str`, `Any`] Config values common to jobs with same label. generic_workflow : `lsst.ctrl.bps.GenericWorkflow` - Generic workflow that contains job to being converted. + Generic workflow that contains job to being converted. gwjob : `lsst.ctrl.bps.GenericWorkflowJob` - Generic workflow job to be converted. + Generic workflow job to be converted. Returns ------- @@ -1071,7 +1071,7 @@ def _get_info_from_schedd(wms_workflow_id, hist, schedds): ---------- wms_workflow_id : `str` Limit to specific run based on id. - hist : + hist : `int` Limit history search to this many days. schedds : `dict` [ `str`, `htcondor.Schedd` ], optional HTCondor schedulers which to query for job information. If None diff --git a/python/lsst/ctrl/bps/htcondor/lssthtc.py b/python/lsst/ctrl/bps/htcondor/lssthtc.py index 9a5f7a5..31a4925 100644 --- a/python/lsst/ctrl/bps/htcondor/lssthtc.py +++ b/python/lsst/ctrl/bps/htcondor/lssthtc.py @@ -456,8 +456,8 @@ def htc_tune_schedd_args(**kwargs): Returns ------- kwargs : `dict` [`str`, Any] - Keywords arguments that are guaranteed to work with the Python - HTCondor API. + Keywords arguments that are guaranteed to work with the Python + HTCondor API. Notes ----- @@ -494,7 +494,7 @@ def htc_tune_schedd_args(**kwargs): Returns ------- kwargs : `dict` [`str`, Any] - Keywords arguments that were passed to the function. + Keywords arguments that were passed to the function. """ return kwargs @@ -752,7 +752,7 @@ class HTCJob: Parameters ---------- name : `str` - Name of the job + Name of the job. label : `str` Label that can used for grouping or lookup. initcmds : `RestrictedDict` @@ -790,7 +790,7 @@ def add_dag_cmds(self, new_commands): Parameters ---------- new_commands : `dict` - DAG file commands to be added to Job + DAG file commands to be added to Job. """ self.dagcmds.update(new_commands) @@ -800,7 +800,7 @@ def add_job_attrs(self, new_attrs): Parameters ---------- new_attrs : `dict` - Attributes to be added to Job + Attributes to be added to Job. """ if self.attrs is None: self.attrs = {} @@ -830,7 +830,7 @@ def write_dag_commands(self, stream): Parameters ---------- stream : `IO` or `str` - Output Stream + Output Stream. """ print(f"JOB {self.name} {self.subfile}", file=stream) _htc_write_job_commands(stream, self.name, self.dagcmds) @@ -841,7 +841,7 @@ def dump(self, fh): Parameters ---------- fh : `~io.TextIOBase` - Output stream + Output stream. """ printer = pprint.PrettyPrinter(indent=4, stream=fh) printer.pprint(self.name) @@ -874,7 +874,7 @@ def __str__(self): Returns ------- info : `str` - String containing basic DAG info. + String containing basic DAG info. """ return f"{self.graph['name']} {len(self)}" @@ -884,7 +884,7 @@ def add_attribs(self, attribs=None): Parameters ---------- attribs : `dict` - DAG attributes + DAG attributes. """ if attribs is not None: self.graph["attr"].update(attribs) @@ -895,11 +895,11 @@ def add_job(self, job, parent_names=None, child_names=None): Parameters ---------- job : `HTCJob` - HTCJob to add to the HTCDag + HTCJob to add to the HTCDag. parent_names : `~collections.abc.Iterable` [`str`], optional - Names of parent jobs + Names of parent jobs. child_names : `~collections.abc.Iterable` [`str`], optional - Names of child jobs + Names of child jobs. """ assert isinstance(job, HTCJob) @@ -945,7 +945,7 @@ def del_job(self, job_name): Parameters ---------- job_name : `str` - Name of job in DAG to delete + Name of job in DAG to delete. """ # Reconnect edges around node to delete parents = self.predecessors(job_name) @@ -1016,7 +1016,7 @@ def write_dot(self, filename): Parameters ---------- filename : `str` - dot filename + Name of the dot file. """ pos = networkx.nx_agraph.graphviz_layout(self) networkx.draw(self, pos=pos) @@ -1033,7 +1033,7 @@ def condor_q(constraint=None, schedds=None, **kwargs): schedds : `dict` [`str`, `htcondor.Schedd`], optional HTCondor schedulers which to query for job information. If None (default), the query will be run against local scheduler only. - **kwargs: + **kwargs : `~typing.Any` Additional keyword arguments that need to be passed to the internal query method. @@ -1058,7 +1058,7 @@ def condor_history(constraint=None, schedds=None, **kwargs): HTCondor schedulers which to query for job information. If None (default), the query will be run against the history file of the local scheduler only. - **kwargs: + **kwargs : `~typing.Any` Additional keyword arguments that need to be passed to the internal query method. @@ -1089,7 +1089,7 @@ def condor_query(constraint=None, schedds=None, query_func=htc_query_present, ** - ``schedds``: Schedulers to query (`list` [`htcondor.Schedd`]). - ``**kwargs``: Keyword arguments that will be passed to the query function. - **kwargs: + **kwargs : `~typing.Any` Additional keyword arguments that need to be passed to the query method. @@ -1236,9 +1236,9 @@ def summary_from_dag(dir_name): ------- summary : `str` Semi-colon separated list of job labels and counts. - (Same format as saved in dag classad.) + (Same format as saved in dag classad). job_name_to_pipetask : `dict` [`str`, `str`] - Mapping of job names to job labels + Mapping of job names to job labels. """ dag = next(Path(dir_name).glob("*.dag")) @@ -1306,7 +1306,7 @@ def pegasus_name_to_label(name): def read_dag_status(wms_path): - """Read the node status file for DAG summary information + """Read the node status file for DAG summary information. Parameters ----------