From aaafdc6b021ff1b88e9f61b7cc19db657932dd99 Mon Sep 17 00:00:00 2001 From: Hilary James Oliver Date: Thu, 23 Nov 2023 13:29:19 +1300 Subject: [PATCH] cylc-set: glob in pool for now. --- cylc/flow/prerequisite.py | 4 + cylc/flow/scheduler.py | 3 + cylc/flow/task_outputs.py | 20 +- cylc/flow/task_pool.py | 249 +++++++++++------- cylc/flow/task_proxy.py | 24 +- cylc/flow/task_state.py | 8 +- .../cylc-poll/03-poll-all/flow.cylc | 2 +- .../hold-release/14-hold-kill/flow.cylc | 2 +- .../hold-release/15-hold-after/flow.cylc | 2 +- tests/functional/cylc-kill/03-simulation.t | 4 +- tests/functional/cylc-message/02-multi.t | 14 +- .../05-poll-multi-messages/flow.cylc | 6 +- .../cylc-remove/00-simple/flow.cylc | 2 +- .../cylc-remove/02-cycling/flow.cylc | 4 +- .../cylc-trigger/02-filter-failed/flow.cylc | 6 +- .../cylc-trigger/04-filter-names/flow.cylc | 10 +- .../cylc-trigger/06-already-active/flow.cylc | 6 +- .../04-polling-intervals.t | 10 +- .../ext-trigger/01-no-nudge/flow.cylc | 2 +- .../flow-triggers/00-new-future/flow.cylc | 2 +- .../flow-triggers/03-new-past/flow.cylc | 2 +- .../flow-triggers/04-all-past/flow.cylc | 2 +- .../flow-triggers/05-none-past/flow.cylc | 2 +- .../06-new-past-switch/flow.cylc | 2 +- .../07-all-past-switch/flow.cylc | 2 +- .../08-none-past-switch/flow.cylc | 2 +- .../flow-triggers/09-retrigger/flow.cylc | 4 +- .../flow-triggers/10-specific-flow/flow.cylc | 2 +- .../flow-triggers/11-wait-merge/flow.cylc | 5 +- .../flow-triggers/13-noflow-nomerge.t | 2 +- tests/functional/hold-release/05-release.t | 2 +- tests/functional/hold-release/08-hold.t | 16 +- .../hold-release/11-retrying/flow.cylc | 12 +- .../18-hold-cycle-globs/flow.cylc | 6 +- .../19-no-reset-prereq-on-waiting/flow.cylc | 2 +- .../05-from-platform-group.t | 2 +- .../12-pause-then-retry/flow.cylc | 8 +- tests/functional/reload/11-retrying/flow.cylc | 2 +- tests/functional/reload/14-waiting/flow.cylc | 2 +- .../reload/19-remote-kill/flow.cylc | 2 +- tests/functional/reload/25-xtriggers.t | 3 +- tests/functional/reload/runahead/flow.cylc | 2 +- .../remote/09-restart-running-file-install.t | 2 +- tests/functional/restart/22-hold/flow.cylc | 2 +- .../functional/restart/50-two-flows/flow.cylc | 4 +- tests/functional/restart/58-removed-task.t | 6 +- .../restart/58-removed-task/flow.cylc | 8 +- .../restart/58-waiting-manual-triggered.t | 2 +- .../spawn-on-demand/06-stop-flow-2/flow.cylc | 2 +- .../spawn-on-demand/07-abs-triggers/flow.cylc | 2 +- .../spawn-on-demand/09-set-outputs/flow.cylc | 4 +- .../spawn-on-demand/10-retrigger/flow.cylc | 2 +- .../11-hold-not-spawned/flow.cylc | 2 +- .../14-trigger-flow-blocker/flow.cylc | 2 +- .../functional/spawn-on-demand/18-submitted.t | 2 +- .../spawn-on-demand/19-submitted-compat.t | 2 +- .../special/08-clock-trigger-retry.t | 2 +- .../triggering/19-and-suicide/flow.cylc | 2 +- tests/integration/test_reload.py | 4 +- tests/integration/test_task_pool.py | 10 +- 60 files changed, 294 insertions(+), 230 deletions(-) diff --git a/cylc/flow/prerequisite.py b/cylc/flow/prerequisite.py index 0b5ac6a18d5..444cbd06722 100644 --- a/cylc/flow/prerequisite.py +++ b/cylc/flow/prerequisite.py @@ -75,6 +75,10 @@ def __init__(self, point): # * `False` (prerequisite unsatisfied). self._all_satisfied = None + def __str__(self): + # TODO make this more useful + return f"{self.point}: {self.satisfied}, {self.conditional_expression}" + def instantaneous_hash(self): """Generate a hash of this prerequisite in its current state. diff --git a/cylc/flow/scheduler.py b/cylc/flow/scheduler.py index 27cfc1dc545..2e14b42e860 100644 --- a/cylc/flow/scheduler.py +++ b/cylc/flow/scheduler.py @@ -590,6 +590,9 @@ def log_start(self) -> None: # Note that the following lines must be present at the top of # the workflow log file for use in reference test runs. + LOG.info( + "Task log key: [//():]" + ) LOG.info( f'Run mode: {self.config.run_mode()}', extra=RotatingLogFileHandler.header_extra diff --git a/cylc/flow/task_outputs.py b/cylc/flow/task_outputs.py index 6a5949b2f3d..f5f9ec6e176 100644 --- a/cylc/flow/task_outputs.py +++ b/cylc/flow/task_outputs.py @@ -15,7 +15,7 @@ # along with this program. If not, see . """Task output message manager and constants.""" -from typing import List +from typing import Set # Standard task output strings, used for triggering. TASK_OUTPUT_EXPIRED = "expired" @@ -271,8 +271,8 @@ def _get_item(self, message, trigger): else: return self._by_message[message] - def add_implied_outputs(self, output: str) -> List[str]: - """Return a list with implied outputs prepended. + def add_implied_outputs(self, output: str) -> Set[str]: + """Return a set with implied outputs prepended. - started implies submitted - any custom output implies started @@ -281,25 +281,25 @@ def add_implied_outputs(self, output: str) -> List[str]: """ if output == TASK_OUTPUT_STARTED: - return [TASK_OUTPUT_SUBMITTED, output] + return {TASK_OUTPUT_SUBMITTED, output} elif output in self._get_custom_triggers(): - return [TASK_OUTPUT_SUBMITTED, TASK_OUTPUT_STARTED, output] + return {TASK_OUTPUT_SUBMITTED, TASK_OUTPUT_STARTED, output} elif output in [TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_FAILED]: - required_custom = [ + required_custom = { msg for msg in self._get_custom_triggers() if msg in self._required.values() - ] - return [ + } + return { TASK_OUTPUT_SUBMITTED, TASK_OUTPUT_STARTED, *required_custom, output - ] + } else: - return [output] + return {output} @staticmethod def is_valid_std_name(name): diff --git a/cylc/flow/task_pool.py b/cylc/flow/task_pool.py index ccc4bde7bf9..c528ad9375e 100644 --- a/cylc/flow/task_pool.py +++ b/cylc/flow/task_pool.py @@ -1426,7 +1426,7 @@ def spawn_on_all_outputs( self.rh_release_and_queue(c_task) def can_be_spawned(self, name: str, point: 'PointBase') -> bool: - """Return True if a requested task is spawnable, else False.""" + """Return True if a point/name is within graph bounds.""" if name not in self.config.taskdefs: LOG.debug('No task definition %s', name) @@ -1576,10 +1576,21 @@ def set( # noqa: A003 ): """Set prerequisites or outputs of target tasks. - By default, set all required outputs. + Default: set all required outputs. + + Setting prerequisites spawns the target task. + Setting outputs spawns children of the target task. + + Use a transient task proxy to spawn children. (Even if the parent was + previously spawned in this flow its children might not have been). + + Task matching: + - globs (cycle and name) only match in the pool + - family names are not expanded + - future tasks must be specified individually Args: - items: identifiers for matching task definitions + items: task ID match patterns prerequisites: prerequisites to set outputs: outputs to set and spawn children of flow: Flow numbers for spawned or merged tasks @@ -1592,53 +1603,60 @@ def set( # noqa: A003 # Illegal flow command opts return - # Note this filters out invalid cycle point for target task. - _, task_items = self.match_taskdefs(items) + itasks, future_tasks, unmatched = self.filter_task_proxies( + items, + future=True, + warn=False, + ) + + # pool tasks + for itask in itasks: + self.merge_flows(itask, flow_nums) + if not outputs and not prerequisites: + # Default: set required outputs. + outputs = itask.tdef.get_required_outputs() + if outputs: + self._set_outputs_itask(itask, outputs) + if prerequisites: + self._set_prereqs_itask( + itask, prerequisites, flow_nums, flow_wait) - for (_, point), taskdef in sorted(task_items.items()): + # future task definitions + for name, point in future_tasks: + taskdef = self.config.get_taskdef(name) if not outputs and not prerequisites: # Default: set required outputs. outputs = taskdef.get_required_outputs() if outputs: - self._set_outputs( - point, taskdef, outputs, flow_nums, flow_wait) + trans = self._spawn_transient_task( + point, taskdef, outputs, flow_nums, flow_wait + ) + if trans is not None: + self._set_outputs_itask(trans, outputs) if prerequisites: - self._set_prereqs( + self._set_prereqs_tdef( point, taskdef, prerequisites, flow_nums, flow_wait) - def _set_outputs(self, point, taskdef, outputs, flow_nums, flow_wait): - """Set given outputs of a task, and spawn associated children. + def _spawn_transient_task( + self, + point: 'PointBase', + taskdef: 'TaskDef', + outputs: List[str], + flow_nums: 'FlowNums', + flow_wait: bool + ) -> Optional['TaskProxy']: + """Spawn a transient task proxy and update its outputs from the DB.""" - Do not spawn the target task if it is not already in the pool, but - update the DB to reflect the set outputs, and spawn the children. - """ - itask = self._get_task_by_id( - Tokens( - cycle=str(point), - task=taskdef.name - ).relative_id + itask = self.spawn_task( + taskdef.name, + point, + flow_nums, + flow_wait=flow_wait, + force=True, + transient=True ) if itask is not None: - # The parent task already exists in the pool. - transient = False - self.merge_flows(itask, flow_nums) - else: - # Spawn a transient task instance to use for spawning children. - transient = True - itask = self.spawn_task( - taskdef.name, - point, - flow_nums, - flow_wait=flow_wait, - force=True, - transient=True - ) - # force=True: spawn it even if previously spawned in this flow, - # because even if it was, its children might not have been. It - # is transient and won't be added to the pool, but its outputs - # will be updated in the DB, and any event handler activity - # will be recorded in the previous-submit log directory. - + # Update outputs that were already completed. for outputs_str, fnums in ( self.workflow_db_mgr.pri_dao.select_task_outputs( itask.tdef.name, str(itask.point)) @@ -1648,83 +1666,110 @@ def _set_outputs(self, point, taskdef, outputs, flow_nums, flow_wait): itask.state.outputs.set_completed_by_msg(msg) break - for output in outputs: - msg = itask.state.outputs.get_msg(output) - if msg is None: + return itask + + def _set_outputs_itask( + self, + itask: 'TaskProxy', + req_outputs: List[str], + ) -> None: + """Set requested outputs on a task and spawn their children.""" + + # TODO TIDIER "set:"" LOG MESSAGES + + # convert from labels to messages + requested = [] + allout: Set[str] = set() # requested plus implied outputs + for output in req_outputs: + o_msg = itask.state.outputs.get_msg(output) + if o_msg is None: LOG.warning( f"Output not found: {itask.identity}:{output}") continue + requested.append(o_msg) + allout = allout.union( + itask.state.outputs.add_implied_outputs(o_msg) + ) - for out in itask.state.outputs.add_implied_outputs(msg): - # Handle outputs as if completed naturally. - if itask.state.outputs.is_completed(out): - # already completed - continue - info = " " - if out != msg: - info = " implied " - LOG.info( - f"Completing{info}output: {itask.identity}:{out}" - f" ({itask.flows_str()})") - self.task_events_mgr.process_message(itask, logging.INFO, out) + for out in allout: + info = f'output "{out}" of {itask.identity}' + if out not in requested: + info = f"implied {info}" + if itask.state.outputs.is_completed(out): + LOG.info(f"set: {info} already completed") + continue + LOG.info(f"set: completing {info}") - if transient: - # tasks states table gets updated from the task pool - self.workflow_db_mgr.put_update_task_state(itask) + self.task_events_mgr.process_message(itask, logging.INFO, out) - def _set_prereqs(self, point, taskdef, prereqs, flow_nums, flow_wait): - """Set given prerequisites of a target task. + if itask.transient: + # (note tasks states table gets updated from the task pool) + LOG.warning(f"TWAT {itask}") + self.workflow_db_mgr.put_update_task_state(itask) - Spawn the task first if not already in the pool. + def _get_valid_prereqs(self, prereqs, taskdef, point): + """Get valid prerequisites for a task. + + Spawn a transient task proxy without incrementing submit + number or checking the flow. """ + available = set() + itask = TaskProxy(self.tokens, taskdef, point, transient=True) + for p in itask.state.prerequisites: + for pp in p.satisfied.keys(): + available.add(pp) + + requested = set() + for p in prereqs: + t = Tokens(p, relative=True) + # Default to :succeeded + t['task_sel'] = t['task_sel'] or TASK_OUTPUT_SUCCEEDED + requested.add((t['cycle'], t['task'], t['task_sel'])) + + good = available & requested + bad = requested - available + if bad: + for b in bad: + LOG.warning( + f"{point}/{taskdef.name} does not depend on" + f" {b[0]}/{b[1]}:{b[2]}" + ) + + return good + + def _set_prereqs_itask(self, itask, prereqs, flow_nums, flow_wait): + """Set prerequisites of a task in the pool.""" + if prereqs == ["all"]: - itask = self.get_or_spawn_task(point, taskdef.name, flow_nums) - if itask is None: - # E.g. already spawned in flow. - return itask.state.set_all_satisfied() else: - # Check if the given prerequisites are valid for the task. - # Spawn a transient task proxy to get the available prerequisites - # without incrementing submit number or checking the flow. - available = set() - for p in TaskProxy( # transient task - self.tokens, taskdef, point, transient=True - ).state.prerequisites: - for pp in p.satisfied.keys(): - available.add(pp) - - requested = set() - for p in prereqs: - t = Tokens(p, relative=True) - # Default to :succeeded - t['task_sel'] = t['task_sel'] or TASK_OUTPUT_SUCCEEDED - requested.add((t['cycle'], t['task'], t['task_sel'])) - - good = available & requested - bad = requested - available - if bad: - for b in bad: - LOG.warning( - f"{point}/{taskdef.name} does not depend on" - f" {b[0]}/{b[1]}:{b[2]}" - ) - if not good: - # No valid prerequisites requested. - return + itask.satisfy_me( + # TODO: IS THIS NEEDED? (JUST LOG BAD ONES FROM SATISFY_ME?) + self._get_valid_prereqs(prereqs, itask.tdef, itask.point) + ) - # Now spawn that sucker for real. - itask = self.get_or_spawn_task( - point, - taskdef.name, - flow_nums, - flow_wait=flow_wait + self.data_store_mgr.delta_task_prerequisite(itask) + + # if ( + # self.runahead_limit_point is not None + # and itask.point <= self.runahead_limit_point + # ): + # self.rh_release_and_queue(itask) + + def _set_prereqs_tdef(self, point, taskdef, prereqs, flow_nums, flow_wait): + """Set given prerequisites of a future task.""" + + itask = self.spawn_task(taskdef.name, point, flow_nums, flow_wait) + if itask is None: + # E.g. already spawned in flow. + return + if prereqs == ["all"]: + itask.state.set_all_satisfied() + else: + itask.satisfy_me( + self._get_valid_prereqs(prereqs, taskdef, point) ) - if itask is None: - # E.g. already spawned in flow. - return - itask.satisfy_me(good) self.data_store_mgr.delta_task_prerequisite(itask) self.add_to_pool(itask) diff --git a/cylc/flow/task_proxy.py b/cylc/flow/task_proxy.py index a927728de9e..c87a82aa615 100644 --- a/cylc/flow/task_proxy.py +++ b/cylc/flow/task_proxy.py @@ -280,18 +280,30 @@ def __init__( def flows_str(self) -> str: """Return string representation of my flow numbers.""" - return f"flows:{','.join(str(i) for i in self.flow_nums) or 'none'}" + return ( + "{" + f"{','.join(str(i) for i in self.flow_nums) or 'none'}" + "}" + ) def __repr__(self) -> str: return f"<{self.__class__.__name__} '{self.tokens}'>" def __str__(self) -> str: - """Stringify with tokens, state, submit_num, and flow_nums.""" + """Stringify with tokens, state, submit_num, and flow_nums. + + Format: "//{} status". + Omit flows if only the default (flow 1). + + """ + if self.flow_nums == {1}: + flows = "" + else: + flows = ( + "(" + f"{','.join(str(i) for i in self.flow_nums) or 'none'}" + ")" + ) return ( - f"{self.identity} " - f"{self.state} " - f"job:{self.submit_num:02d} " - f"{self.flows_str()}" + f"{self.identity}/{self.submit_num:02d}{flows}:{self.state}" ) def copy_to_reload_successor(self, reload_successor, check_output): diff --git a/cylc/flow/task_state.py b/cylc/flow/task_state.py index 38064f1947a..959782fcf94 100644 --- a/cylc/flow/task_state.py +++ b/cylc/flow/task_state.py @@ -307,19 +307,19 @@ def __call__( ) ) - def satisfy_me(self, all_task_outputs): + def satisfy_me(self, outputs): """Attempt to get my prerequisites satisfied.""" goodies = set() for prereqs in [self.prerequisites, self.suicide_prerequisites]: for prereq in prereqs: - satisfied = prereq.satisfy_me(all_task_outputs) + satisfied = prereq.satisfy_me(outputs) if satisfied: self._is_satisfied = None self._suicide_is_satisfied = None - for out in all_task_outputs: + for out in outputs: if out in prereq.satisfied: goodies.add(out) - return all_task_outputs - goodies + return set(outputs) - goodies def xtriggers_all_satisfied(self): """Return True if all xtriggers are satisfied.""" diff --git a/tests/flakyfunctional/cylc-poll/03-poll-all/flow.cylc b/tests/flakyfunctional/cylc-poll/03-poll-all/flow.cylc index 5e21a978fea..709e3bb16e6 100644 --- a/tests/flakyfunctional/cylc-poll/03-poll-all/flow.cylc +++ b/tests/flakyfunctional/cylc-poll/03-poll-all/flow.cylc @@ -41,7 +41,7 @@ cylc poll "${CYLC_WORKFLOW_ID}//*" cylc__job__poll_grep_workflow_log \ - "${CYLC_TASK_CYCLE_POINT}/submit_hold preparing .* => submitted" + "${CYLC_TASK_CYCLE_POINT}/submit_hold/01:preparing.* => submitted" st_file="${CYLC_WORKFLOW_RUN_DIR}/log/job/${CYLC_TASK_CYCLE_POINT}/submit_hold/NN/job.status" pkill -g "$(awk -F= '$1 == "CYLC_JOB_ID" {print $2}' "${st_file}")" diff --git a/tests/flakyfunctional/hold-release/14-hold-kill/flow.cylc b/tests/flakyfunctional/hold-release/14-hold-kill/flow.cylc index 7b82bd3e15c..26637541125 100644 --- a/tests/flakyfunctional/hold-release/14-hold-kill/flow.cylc +++ b/tests/flakyfunctional/hold-release/14-hold-kill/flow.cylc @@ -9,7 +9,7 @@ echo '# killing "sleeper"' cylc kill "${CYLC_WORKFLOW_ID}//1/sleeper" cylc__job__poll_grep_workflow_log -E \ - '1/sleeper\ waiting\(held\) .* job killed' + '1/sleeper/01:waiting\(held\).* job killed' sleep 10 # sleep, should still be held after 10 seconds cylc dump -s -t "${CYLC_WORKFLOW_ID}" >'cylc-dump.out' diff --git a/tests/flakyfunctional/hold-release/15-hold-after/flow.cylc b/tests/flakyfunctional/hold-release/15-hold-after/flow.cylc index 71cb8d14b25..fe9e2067a34 100644 --- a/tests/flakyfunctional/hold-release/15-hold-after/flow.cylc +++ b/tests/flakyfunctional/hold-release/15-hold-after/flow.cylc @@ -23,7 +23,7 @@ script = cylc hold --after '20140101T12' "${CYLC_WORKFLOW_ID}" [[stopper]] script = """ - cylc__job__poll_grep_workflow_log -E '20140101T1200Z/bar .* \(received\)succeeded' + cylc__job__poll_grep_workflow_log -E '20140101T1200Z/bar/01.* \(received\)succeeded' cylc stop "${CYLC_WORKFLOW_ID}" """ [[[job]]] diff --git a/tests/functional/cylc-kill/03-simulation.t b/tests/functional/cylc-kill/03-simulation.t index f9152a84a1f..dadfeede123 100755 --- a/tests/functional/cylc-kill/03-simulation.t +++ b/tests/functional/cylc-kill/03-simulation.t @@ -28,7 +28,7 @@ run_ok "${TEST_NAME_BASE}-validate" cylc validate "${WORKFLOW_NAME}" cylc play --debug -m simulation "${WORKFLOW_NAME}" >/dev/null 2>&1 # wait for simulated job start -poll_grep_workflow_log "1/foo .* running" -E +poll_grep_workflow_log "1/foo.* running" -E # kill it run_ok killer cylc kill "${WORKFLOW_NAME}//1/foo" @@ -37,6 +37,6 @@ run_ok killer cylc kill "${WORKFLOW_NAME}//1/foo" poll_grep_workflow_log "INFO - DONE" # check the sim job was kiled -grep_workflow_log_ok killed "1/foo .* failed" -E +grep_workflow_log_ok killed "1/foo.* failed" -E purge diff --git a/tests/functional/cylc-message/02-multi.t b/tests/functional/cylc-message/02-multi.t index eab3148cde4..17a0e3b4189 100755 --- a/tests/functional/cylc-message/02-multi.t +++ b/tests/functional/cylc-message/02-multi.t @@ -57,19 +57,19 @@ sed -i 's/\(^.*\) at .*$/\1/;' 'sed.out' # Note: the continuation bit gets printed twice, because the message gets a # warning as being unhandled. cmp_ok 'sed.out' <<__LOG__ -DEBUG - [1/foo submitted job:01 flows:1] (received)started -WARNING - [1/foo running job:01 flows:1] (received)Warn this -INFO - [1/foo running job:01 flows:1] (received)Greeting -WARNING - [1/foo running job:01 flows:1] (received)Warn that -DEBUG - [1/foo running job:01 flows:1] (received)Remove stuffs such as +DEBUG - [1/foo/01:submitted] (received)started +WARNING - [1/foo/01:running] (received)Warn this +INFO - [1/foo/01:running] (received)Greeting +WARNING - [1/foo/01:running] (received)Warn that +DEBUG - [1/foo/01:running] (received)Remove stuffs such as ${LOG_INDENT}badness ${LOG_INDENT}slowness ${LOG_INDENT}and other incorrectness. ${LOG_INDENT}badness ${LOG_INDENT}slowness ${LOG_INDENT}and other incorrectness. -INFO - [1/foo running job:01 flows:1] (received)whatever -DEBUG - [1/foo running job:01 flows:1] (received)succeeded +INFO - [1/foo/01:running] (received)whatever +DEBUG - [1/foo/01:running] (received)succeeded __LOG__ purge diff --git a/tests/functional/cylc-poll/05-poll-multi-messages/flow.cylc b/tests/functional/cylc-poll/05-poll-multi-messages/flow.cylc index 9adf11d9107..18fd4112e51 100644 --- a/tests/functional/cylc-poll/05-poll-multi-messages/flow.cylc +++ b/tests/functional/cylc-poll/05-poll-multi-messages/flow.cylc @@ -20,8 +20,8 @@ echo "CYLC_MESSAGE=$(date +%FT%H:%M:%SZ)|INFO|hello1" echo "CYLC_MESSAGE=$(date +%FT%H:%M:%SZ)|INFO|hello2" } >>"${CYLC_TASK_LOG_ROOT}.status" - cylc__job__poll_grep_workflow_log -E '1/speaker1 running .* \(polled\)hello1' - cylc__job__poll_grep_workflow_log -E '1/speaker1 running .* \(polled\)hello2' + cylc__job__poll_grep_workflow_log -E '1/speaker1/01:running.* \(polled\)hello1' + cylc__job__poll_grep_workflow_log -E '1/speaker1/01:running.* \(polled\)hello2' """ [[[outputs]]] hello1 = "hello1" @@ -34,7 +34,7 @@ # get sent back to the workflow echo "CYLC_MESSAGE=$(date +%FT%H:%M:%SZ)|INFO|greet" \ >>"${CYLC_TASK_LOG_ROOT}.status" - cylc__job__poll_grep_workflow_log -E '1/speaker2 running .* \(polled\)greet' + cylc__job__poll_grep_workflow_log -E '1/speaker2/01:running.* \(polled\)greet' """ [[[outputs]]] greet = "greet" diff --git a/tests/functional/cylc-remove/00-simple/flow.cylc b/tests/functional/cylc-remove/00-simple/flow.cylc index 15c0b09dc64..84c740ad421 100644 --- a/tests/functional/cylc-remove/00-simple/flow.cylc +++ b/tests/functional/cylc-remove/00-simple/flow.cylc @@ -15,7 +15,7 @@ script = false [[cleaner]] script = """ -cylc__job__poll_grep_workflow_log -E '1/b\ running .* \(received\)failed' +cylc__job__poll_grep_workflow_log -E '1/b/01:running.* \(received\)failed' # Remove the unhandled failed task cylc remove "$CYLC_WORKFLOW_ID//1/b" # Remove waiting 1/c diff --git a/tests/functional/cylc-remove/02-cycling/flow.cylc b/tests/functional/cylc-remove/02-cycling/flow.cylc index a2a236eb004..3b6c1051493 100644 --- a/tests/functional/cylc-remove/02-cycling/flow.cylc +++ b/tests/functional/cylc-remove/02-cycling/flow.cylc @@ -17,8 +17,8 @@ [runtime] [[remover]] script = """ - cylc__job__poll_grep_workflow_log -E '2020/bar running .* \(received\)failed' - cylc__job__poll_grep_workflow_log -E '2021/baz running .* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '2020/bar/01:running.* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '2021/baz/01:running.* \(received\)failed' # Remove the two unhandled failed tasks. cylc remove "$CYLC_WORKFLOW_ID//*/ba*:failed" # Remove the two unsatisfied waiting tasks. diff --git a/tests/functional/cylc-trigger/02-filter-failed/flow.cylc b/tests/functional/cylc-trigger/02-filter-failed/flow.cylc index e5a3065782e..7416cf5790d 100644 --- a/tests/functional/cylc-trigger/02-filter-failed/flow.cylc +++ b/tests/functional/cylc-trigger/02-filter-failed/flow.cylc @@ -18,9 +18,9 @@ [[fixer]] script = """ cylc__job__wait_cylc_message_started - cylc__job__poll_grep_workflow_log -E '1/fixable1 running .* \(received\)failed' - cylc__job__poll_grep_workflow_log -E '1/fixable2 running .* \(received\)failed' - cylc__job__poll_grep_workflow_log -E '1/fixable3 running .* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '1/fixable1/01:running.* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '1/fixable2/01:running.* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '1/fixable3/01:running.* \(received\)failed' cylc trigger "${CYLC_WORKFLOW_ID}//1/fixable*" """ [[Z]] diff --git a/tests/functional/cylc-trigger/04-filter-names/flow.cylc b/tests/functional/cylc-trigger/04-filter-names/flow.cylc index 5997dcaa201..31839c1b77f 100644 --- a/tests/functional/cylc-trigger/04-filter-names/flow.cylc +++ b/tests/functional/cylc-trigger/04-filter-names/flow.cylc @@ -22,11 +22,11 @@ [[fixer]] script = """ cylc__job__wait_cylc_message_started - cylc__job__poll_grep_workflow_log -E '1/fixable-1a .* \(received\)failed' - cylc__job__poll_grep_workflow_log -E '1/fixable-1b .* \(received\)failed' - cylc__job__poll_grep_workflow_log -E '1/fixable-2a .* \(received\)failed' - cylc__job__poll_grep_workflow_log -E '1/fixable-2b .* \(received\)failed' - cylc__job__poll_grep_workflow_log -E '1/fixable-3 .* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '1/fixable-1a/01.* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '1/fixable-1b/01.* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '1/fixable-2a/01.* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '1/fixable-2b/01.* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '1/fixable-3/01.* \(received\)failed' cylc trigger "${CYLC_WORKFLOW_ID}//" \ '//1/FIXABLE-1' '//1/fixable-2*' '//1/fixable-3' """ diff --git a/tests/functional/cylc-trigger/06-already-active/flow.cylc b/tests/functional/cylc-trigger/06-already-active/flow.cylc index b939aa2d290..c7d99f6a6a8 100644 --- a/tests/functional/cylc-trigger/06-already-active/flow.cylc +++ b/tests/functional/cylc-trigger/06-already-active/flow.cylc @@ -9,14 +9,14 @@ [runtime] [[triggerer]] script = """ - cylc__job__poll_grep_workflow_log "1/triggeree .* running" -E + cylc__job__poll_grep_workflow_log "1/triggeree/01:running" cylc trigger "$CYLC_WORKFLOW_ID//1/triggeree" cylc__job__poll_grep_workflow_log \ - "1/triggeree .* ignoring trigger - already active" -E + "1/triggeree.* ignoring trigger - already active" -E """ [[triggeree]] script = """ cylc__job__poll_grep_workflow_log \ - "1/triggeree .* ignoring trigger - already active" -E + "1/triggeree.* ignoring trigger - already active" -E """ diff --git a/tests/functional/execution-time-limit/04-polling-intervals.t b/tests/functional/execution-time-limit/04-polling-intervals.t index 4b213c70546..e1df403f155 100644 --- a/tests/functional/execution-time-limit/04-polling-intervals.t +++ b/tests/functional/execution-time-limit/04-polling-intervals.t @@ -47,13 +47,13 @@ poll_grep_workflow_log "INFO - DONE" # NOTE: execution timeout polling is delayed by PT1M to let things settle # PT10M = (3*PT3S + PT9M30S) - PT1M -grep_workflow_log_ok grep-limit10M "\[1/limit10M running job:01 flows:1\] health: execution timeout=None, polling intervals=3\*PT30S,PT9M30S,PT2M,PT7M,..." +grep_workflow_log_ok grep-limit10M "\[1/limit10M/01:running\] health: execution timeout=None, polling intervals=3\*PT30S,PT9M30S,PT2M,PT7M,..." # PT60M = (3*PT3S + PT10M + PT49M30S) - PT1M -grep_workflow_log_ok grep-limit1H "\[1/limit1H running job:01 flows:1\] health: execution timeout=None, polling intervals=3\*PT30S,PT10M,PT49M30S,PT2M,PT7M,..." +grep_workflow_log_ok grep-limit1H "\[1/limit1H/01:running\] health: execution timeout=None, polling intervals=3\*PT30S,PT10M,PT49M30S,PT2M,PT7M,..." # PT70S = (2*PT30S + PT1M10S) - PT1M -grep_workflow_log_ok grep-limit70S "\[1/limit70S running job:01 flows:1\] health: execution timeout=None, polling intervals=2\*PT30S,PT1M10S,PT2M,PT7M,..." +grep_workflow_log_ok grep-limit70S "\[1/limit70S/01:running\] health: execution timeout=None, polling intervals=2\*PT30S,PT1M10S,PT2M,PT7M,..." # PT95M = (3*PT3S + PT10M + PT1H + PT24M30S) - PT1M -grep_workflow_log_ok grep-limit95M "\[1/limit95M running job:01 flows:1\] health: execution timeout=None, polling intervals=3\*PT30S,PT10M,PT1H,PT24M30S,PT2M,PT7M,..." -grep_workflow_log_ok grep-no-limit "\[1/nolimit running job:01 flows:1\] health: execution timeout=None, polling intervals=3\*PT30S,PT10M,PT1H,..." +grep_workflow_log_ok grep-limit95M "\[1/limit95M/01:running\] health: execution timeout=None, polling intervals=3\*PT30S,PT10M,PT1H,PT24M30S,PT2M,PT7M,..." +grep_workflow_log_ok grep-no-limit "\[1/nolimit/01:running\] health: execution timeout=None, polling intervals=3\*PT30S,PT10M,PT1H,..." purge diff --git a/tests/functional/ext-trigger/01-no-nudge/flow.cylc b/tests/functional/ext-trigger/01-no-nudge/flow.cylc index f5569601950..1bf744e8ac6 100644 --- a/tests/functional/ext-trigger/01-no-nudge/flow.cylc +++ b/tests/functional/ext-trigger/01-no-nudge/flow.cylc @@ -31,7 +31,7 @@ [[foo]] script = """ cylc kill "$CYLC_WORKFLOW_ID//1/bar" - cylc__job__poll_grep_workflow_log -E '1/bar .* \(internal\)failed' + cylc__job__poll_grep_workflow_log -E '1/bar.* \(internal\)failed' cylc release "$CYLC_WORKFLOW_ID//1/bar" """ [[bar]] diff --git a/tests/functional/flow-triggers/00-new-future/flow.cylc b/tests/functional/flow-triggers/00-new-future/flow.cylc index c67fd0fdd19..94add5a60d0 100644 --- a/tests/functional/flow-triggers/00-new-future/flow.cylc +++ b/tests/functional/flow-triggers/00-new-future/flow.cylc @@ -34,5 +34,5 @@ [[a]] script = """ cylc trigger --flow=new ${CYLC_WORKFLOW_ID}//1/d - cylc__job__poll_grep_workflow_log -E '1/d.*started' + cylc__job__poll_grep_workflow_log -E '1/d/01.*started' """ diff --git a/tests/functional/flow-triggers/03-new-past/flow.cylc b/tests/functional/flow-triggers/03-new-past/flow.cylc index 9c77123d40c..2bc3505022e 100644 --- a/tests/functional/flow-triggers/03-new-past/flow.cylc +++ b/tests/functional/flow-triggers/03-new-past/flow.cylc @@ -37,6 +37,6 @@ script = """ if (( $CYLC_TASK_SUBMIT_NUMBER == 1 )); then cylc trigger --flow=new ${CYLC_WORKFLOW_ID}//1/a - cylc__job__poll_grep_workflow_log -E '1/a submitted job:02 .*started' + cylc__job__poll_grep_workflow_log -E '1/a/02\(2\):submitted.*started' fi """ diff --git a/tests/functional/flow-triggers/04-all-past/flow.cylc b/tests/functional/flow-triggers/04-all-past/flow.cylc index 85721f2b5e5..11b6ac7e5d9 100644 --- a/tests/functional/flow-triggers/04-all-past/flow.cylc +++ b/tests/functional/flow-triggers/04-all-past/flow.cylc @@ -41,6 +41,6 @@ script = """ if (( $CYLC_TASK_SUBMIT_NUMBER == 1 )); then cylc trigger {{OPT}} ${CYLC_WORKFLOW_ID}//1/a - cylc__job__poll_grep_workflow_log -E '1/a running job:02 .*succeeded' + cylc__job__poll_grep_workflow_log -E '1/a/02:running.*succeeded' fi """ diff --git a/tests/functional/flow-triggers/05-none-past/flow.cylc b/tests/functional/flow-triggers/05-none-past/flow.cylc index 6709ebf0d3c..e6f03dc2b7a 100644 --- a/tests/functional/flow-triggers/05-none-past/flow.cylc +++ b/tests/functional/flow-triggers/05-none-past/flow.cylc @@ -34,6 +34,6 @@ script = """ if (( $CYLC_TASK_SUBMIT_NUMBER == 1 )); then cylc trigger --flow=none ${CYLC_WORKFLOW_ID}//1/a - cylc__job__poll_grep_workflow_log -E '1/a submitted job:02 .*started' + cylc__job__poll_grep_workflow_log -E '1/a/02\(none\):submitted.*started' fi """ diff --git a/tests/functional/flow-triggers/06-new-past-switch/flow.cylc b/tests/functional/flow-triggers/06-new-past-switch/flow.cylc index b33855b74ca..1b648319bfd 100644 --- a/tests/functional/flow-triggers/06-new-past-switch/flow.cylc +++ b/tests/functional/flow-triggers/06-new-past-switch/flow.cylc @@ -55,5 +55,5 @@ [[c]] script = """ cylc trigger --flow=new ${CYLC_WORKFLOW_ID}//1/a - cylc__job__poll_grep_workflow_log -E '1/a submitted job:02 .*started' + cylc__job__poll_grep_workflow_log -E '1/a/02\(2\):submitted.*started' """ diff --git a/tests/functional/flow-triggers/07-all-past-switch/flow.cylc b/tests/functional/flow-triggers/07-all-past-switch/flow.cylc index 87ea0445a2d..4965bc77886 100644 --- a/tests/functional/flow-triggers/07-all-past-switch/flow.cylc +++ b/tests/functional/flow-triggers/07-all-past-switch/flow.cylc @@ -64,6 +64,6 @@ script = """ if (( CYLC_TASK_SUBMIT_NUMBER == 1 )); then cylc trigger {{OPT}} ${CYLC_WORKFLOW_ID}//1/a - cylc__job__poll_grep_workflow_log -E '1/a running job:02 .*succeeded' + cylc__job__poll_grep_workflow_log -E '1/a/02:running.*succeeded' fi """ diff --git a/tests/functional/flow-triggers/08-none-past-switch/flow.cylc b/tests/functional/flow-triggers/08-none-past-switch/flow.cylc index 419bf72d67a..2f33482edb1 100644 --- a/tests/functional/flow-triggers/08-none-past-switch/flow.cylc +++ b/tests/functional/flow-triggers/08-none-past-switch/flow.cylc @@ -53,5 +53,5 @@ [[c]] script = """ cylc trigger --flow=none ${CYLC_WORKFLOW_ID}//1/a - cylc__job__poll_grep_workflow_log -E '1/a submitted job:02 .*started' + cylc__job__poll_grep_workflow_log -E '1/a/02\(none\):submitted.*started' """ diff --git a/tests/functional/flow-triggers/09-retrigger/flow.cylc b/tests/functional/flow-triggers/09-retrigger/flow.cylc index a8bdb524870..8a728e1408a 100644 --- a/tests/functional/flow-triggers/09-retrigger/flow.cylc +++ b/tests/functional/flow-triggers/09-retrigger/flow.cylc @@ -15,9 +15,9 @@ [[foo]] script = """ cylc trigger --wait ${CYLC_WORKFLOW_ID}//1/baz - cylc__job__poll_grep_workflow_log "1/baz running job:01 .* succeeded" + cylc__job__poll_grep_workflow_log "1/baz/01:running.*succeeded" cylc trigger --wait ${CYLC_WORKFLOW_ID}//1/baz - cylc__job__poll_grep_workflow_log "1/baz running job:02 .* succeeded" + cylc__job__poll_grep_workflow_log "1/baz/02:running.*succeeded" """ [[baz]] script = """ diff --git a/tests/functional/flow-triggers/10-specific-flow/flow.cylc b/tests/functional/flow-triggers/10-specific-flow/flow.cylc index 3d648ddcac4..c1f51fa4798 100644 --- a/tests/functional/flow-triggers/10-specific-flow/flow.cylc +++ b/tests/functional/flow-triggers/10-specific-flow/flow.cylc @@ -17,6 +17,6 @@ [[trigger-happy]] script = """ cylc trigger --flow=2 --wait ${CYLC_WORKFLOW_ID}//1/f - cylc__job__poll_grep_workflow_log "1/d submitted job:01 .*started" + cylc__job__poll_grep_workflow_log "1/d/01:submitted.*started" cylc trigger --flow=new ${CYLC_WORKFLOW_ID}//1/b """ diff --git a/tests/functional/flow-triggers/11-wait-merge/flow.cylc b/tests/functional/flow-triggers/11-wait-merge/flow.cylc index 9e398c7bf74..b956b45ddf0 100644 --- a/tests/functional/flow-triggers/11-wait-merge/flow.cylc +++ b/tests/functional/flow-triggers/11-wait-merge/flow.cylc @@ -16,7 +16,7 @@ [[a]] script = """ if ((CYLC_TASK_SUBMIT_NUMBER == 2)); then - cylc__job__poll_grep_workflow_log "1/d .*(received)started" + cylc__job__poll_grep_workflow_log "\[1/d/01(1,2):submitted] (received)started" fi """ [[b]] @@ -24,11 +24,12 @@ if ((CYLC_TASK_SUBMIT_NUMBER == 1)); then cylc trigger --flow=new ${CYLC_WORKFLOW_ID}//1/a cylc trigger --flow=2 --wait ${CYLC_WORKFLOW_ID}//1/c - cylc__job__poll_grep_workflow_log "1/c .*(received)x" + cylc__job__poll_grep_workflow_log "\[1/c/01(2):running] (received)x" fi """ [[c]] script = """ + cylc__job__wait_cylc_message_started cylc message x if ((CYLC_TASK_SUBMIT_NUMBER == 1)); then cylc__job__poll_grep_workflow_log "merged" diff --git a/tests/functional/flow-triggers/13-noflow-nomerge.t b/tests/functional/flow-triggers/13-noflow-nomerge.t index c8b4528a2f9..d380268e449 100644 --- a/tests/functional/flow-triggers/13-noflow-nomerge.t +++ b/tests/functional/flow-triggers/13-noflow-nomerge.t @@ -27,7 +27,7 @@ run_ok "${TEST_NAME_BASE}-run" cylc play "${WORKFLOW_NAME}" poll_grep_workflow_log "Workflow stalled" run_ok "${TEST_NAME_BASE}-trigger" cylc trigger --flow=none "${WORKFLOW_NAME}//1/a" -poll_grep_workflow_log -E "1/a running job:02 flows:none.*=> succeeded" +poll_grep_workflow_log -E "1/a/02\(none\):running.*=> succeeded" cylc stop --now --now --max-polls=5 --interval=2 "$WORKFLOW_NAME" diff --git a/tests/functional/hold-release/05-release.t b/tests/functional/hold-release/05-release.t index 0ba676c8e86..2823492bc09 100755 --- a/tests/functional/hold-release/05-release.t +++ b/tests/functional/hold-release/05-release.t @@ -65,7 +65,7 @@ init_workflow "${TEST_NAME_BASE}" <<'__FLOW_CONFIG__' inherit = STOP script = """ cylc__job__poll_grep_workflow_log -E \ - '1/dog1 succeeded .* task proxy removed \(completed\)' + '1/dog1/01:succeeded.* task proxy removed \(completed\)' cylc stop "${CYLC_WORKFLOW_ID}" """ __FLOW_CONFIG__ diff --git a/tests/functional/hold-release/08-hold.t b/tests/functional/hold-release/08-hold.t index 206abc1efdb..be227c8c404 100755 --- a/tests/functional/hold-release/08-hold.t +++ b/tests/functional/hold-release/08-hold.t @@ -33,14 +33,14 @@ init_workflow "${TEST_NAME_BASE}" <<'__FLOW_CONFIG__' [[holdrelease]] script = """ cylc__job__wait_cylc_message_started - cylc__job__poll_grep_workflow_log -E '1/foo .* spawned' - cylc__job__poll_grep_workflow_log -E '1/bar .* spawned' - cylc__job__poll_grep_workflow_log -E '1/cheese .* spawned' - cylc__job__poll_grep_workflow_log -E '1/jam .* spawned' - cylc__job__poll_grep_workflow_log -E '1/cat1 .* spawned' - cylc__job__poll_grep_workflow_log -E '1/cat2 .* spawned' - cylc__job__poll_grep_workflow_log -E '1/dog1 .* spawned' - cylc__job__poll_grep_workflow_log -E '1/dog2 .* spawned' + cylc__job__poll_grep_workflow_log -E '1/foo.* spawned' + cylc__job__poll_grep_workflow_log -E '1/bar.* spawned' + cylc__job__poll_grep_workflow_log -E '1/cheese.* spawned' + cylc__job__poll_grep_workflow_log -E '1/jam.* spawned' + cylc__job__poll_grep_workflow_log -E '1/cat1.* spawned' + cylc__job__poll_grep_workflow_log -E '1/cat2.* spawned' + cylc__job__poll_grep_workflow_log -E '1/dog1.* spawned' + cylc__job__poll_grep_workflow_log -E '1/dog2.* spawned' cylc hold "${CYLC_WORKFLOW_ID}//1/*FF" # inexact fam cylc hold "${CYLC_WORKFLOW_ID}//1/TOAST" # exact fam cylc hold "${CYLC_WORKFLOW_ID}//1/cat*" # inexact tasks diff --git a/tests/functional/hold-release/11-retrying/flow.cylc b/tests/functional/hold-release/11-retrying/flow.cylc index 03cd0f6b039..0fe7d68eee7 100644 --- a/tests/functional/hold-release/11-retrying/flow.cylc +++ b/tests/functional/hold-release/11-retrying/flow.cylc @@ -18,26 +18,26 @@ t-retry-able => t-analyse [[t-hold-release]] script = """ cylc__job__poll_grep_workflow_log -E \ - '1/t-retry-able running job:01.* \(received\)failed' + '1/t-retry-able/01:running.* \(received\)failed' cylc__job__poll_grep_workflow_log -E \ - '1/t-retry-able running job:01.* => waiting' + '1/t-retry-able/01:running.* => waiting' cylc__job__poll_grep_workflow_log -E \ - '1/t-retry-able waiting job:01.* retrying in PT15S' + '1/t-retry-able/01:waiting.* retrying in PT15S' cylc hold "${CYLC_WORKFLOW_ID}//1/t-retry-able" cylc__job__poll_grep_workflow_log -E \ - '1/t-retry-able waiting job:01.* => waiting\(held\)' + '1/t-retry-able/01:waiting.* => waiting\(held\)' cylc release "${CYLC_WORKFLOW_ID}//1/t-retry-able" cylc__job__poll_grep_workflow_log -E \ - '1/t-retry-able waiting\(held\) job:01.* => waiting' + '1/t-retry-able/01:waiting\(held\).* => waiting' cylc__job__poll_grep_workflow_log -E \ - '1/t-retry-able waiting job:01.* => waiting\(queued\)' + '1/t-retry-able/01:waiting.* => waiting\(queued\)' """ [[t-analyse]] script = """ diff --git a/tests/functional/hold-release/18-hold-cycle-globs/flow.cylc b/tests/functional/hold-release/18-hold-cycle-globs/flow.cylc index a74c7a20c3f..3ced961afa3 100644 --- a/tests/functional/hold-release/18-hold-cycle-globs/flow.cylc +++ b/tests/functional/hold-release/18-hold-cycle-globs/flow.cylc @@ -23,9 +23,9 @@ [runtime] [[holder]] script = """ - cylc__job__poll_grep_workflow_log -E '19900101T0000Z/t1 .* spawned' - cylc__job__poll_grep_workflow_log -E '20100101T0000Z/t2 .* spawned' - cylc__job__poll_grep_workflow_log -E '20300101T0000Z/t3 .* spawned' + cylc__job__poll_grep_workflow_log -E '19900101T0000Z/t1.* spawned' + cylc__job__poll_grep_workflow_log -E '20100101T0000Z/t2.* spawned' + cylc__job__poll_grep_workflow_log -E '20300101T0000Z/t3.* spawned' cylc hold "${CYLC_WORKFLOW_ID}//*/t*" """ [[releaser]] diff --git a/tests/functional/hold-release/19-no-reset-prereq-on-waiting/flow.cylc b/tests/functional/hold-release/19-no-reset-prereq-on-waiting/flow.cylc index 9ad270b4e84..e17037372e3 100644 --- a/tests/functional/hold-release/19-no-reset-prereq-on-waiting/flow.cylc +++ b/tests/functional/hold-release/19-no-reset-prereq-on-waiting/flow.cylc @@ -16,7 +16,7 @@ script = true [[holder]] script = """ - cylc__job__poll_grep_workflow_log -E '1/t1 .* spawned' + cylc__job__poll_grep_workflow_log -E '1/t1.* spawned' cylc hold "${CYLC_WORKFLOW_ID}//1/t1" """ [[releaser]] diff --git a/tests/functional/intelligent-host-selection/05-from-platform-group.t b/tests/functional/intelligent-host-selection/05-from-platform-group.t index 527cfaeba43..801372922fb 100644 --- a/tests/functional/intelligent-host-selection/05-from-platform-group.t +++ b/tests/functional/intelligent-host-selection/05-from-platform-group.t @@ -83,7 +83,7 @@ log_scan \ "platform: ${CYLC_TEST_PLATFORM} - Could not connect to unreachable_host." \ "platform: ${CYLC_TEST_PLATFORM} - remote init (on ${CYLC_TEST_HOST})" \ "platform: ${CYLC_TEST_PLATFORM} - remote file install (on ${CYLC_TEST_HOST})" \ - "\[1/ugly preparing job:01 flows:1\] => submitted" + "\[1/ugly/01:preparing\] => submitted" purge exit 0 diff --git a/tests/functional/pause-resume/12-pause-then-retry/flow.cylc b/tests/functional/pause-resume/12-pause-then-retry/flow.cylc index 91719dcb4e9..00d778890a1 100644 --- a/tests/functional/pause-resume/12-pause-then-retry/flow.cylc +++ b/tests/functional/pause-resume/12-pause-then-retry/flow.cylc @@ -27,19 +27,19 @@ rm -f "${CYLC_WORKFLOW_RUN_DIR}/file" cylc__job__poll_grep_workflow_log -E \ - '1/t-retry-able running .* => waiting' + '1/t-retry-able/01:running.* => waiting' cylc__job__poll_grep_workflow_log -E \ - '1/t-submit-retry-able submitted .* => waiting' + '1/t-submit-retry-able/01:submitted.* => waiting' # Resume the workflow cylc play "${CYLC_WORKFLOW_ID}" cylc__job__poll_grep_workflow_log -E \ - '1/t-retry-able waiting .* => waiting\(queued\)' + '1/t-retry-able/01:waiting.* => waiting\(queued\)' cylc__job__poll_grep_workflow_log -E \ - '1/t-submit-retry-able waiting .* => waiting\(queued\)' + '1/t-submit-retry-able/01:waiting.* => waiting\(queued\)' """ [[t-retry-able]] script = """ diff --git a/tests/functional/reload/11-retrying/flow.cylc b/tests/functional/reload/11-retrying/flow.cylc index d5b278b2798..ed4694a5294 100644 --- a/tests/functional/reload/11-retrying/flow.cylc +++ b/tests/functional/reload/11-retrying/flow.cylc @@ -22,7 +22,7 @@ execution retry delays = PT0S [[reloader]] script = """ - cylc__job__poll_grep_workflow_log -E '1/retrier running\(held\) .* => waiting\(held\)' + cylc__job__poll_grep_workflow_log -E '1/retrier/01:running\(held\).* => waiting\(held\)' cylc reload "${CYLC_WORKFLOW_ID}" cylc reload "${CYLC_WORKFLOW_ID}" cylc__job__poll_grep_workflow_log -F 'Reload completed' diff --git a/tests/functional/reload/14-waiting/flow.cylc b/tests/functional/reload/14-waiting/flow.cylc index f81ac3533b0..29b5e0fa031 100644 --- a/tests/functional/reload/14-waiting/flow.cylc +++ b/tests/functional/reload/14-waiting/flow.cylc @@ -24,7 +24,7 @@ done [[reloader]] script = """ cylc reload "${CYLC_WORKFLOW_ID}" -cylc__job__poll_grep_workflow_log -E '1/waiter .* reloaded task definition' +cylc__job__poll_grep_workflow_log -E '1/waiter/01.* reloaded task definition' rm -f "${CYLC_WORKFLOW_WORK_DIR}/1/sleeping-waiter/file" rm -f "${CYLC_WORKFLOW_WORK_DIR}/1/starter/file" """ diff --git a/tests/functional/reload/19-remote-kill/flow.cylc b/tests/functional/reload/19-remote-kill/flow.cylc index 2bd548b15f7..4a9e965a73c 100644 --- a/tests/functional/reload/19-remote-kill/flow.cylc +++ b/tests/functional/reload/19-remote-kill/flow.cylc @@ -17,7 +17,7 @@ cylc reload "${CYLC_WORKFLOW_ID}" cylc__job__poll_grep_workflow_log -F 'Reload completed' cylc kill "${CYLC_WORKFLOW_ID}//1/foo" - cylc__job__poll_grep_workflow_log -E '1/foo failed\(held\) job:01.* job killed' + cylc__job__poll_grep_workflow_log -E '1/foo/01:failed\(held\).* job killed' """ [[[job]]] execution time limit = PT1M diff --git a/tests/functional/reload/25-xtriggers.t b/tests/functional/reload/25-xtriggers.t index 8e979832a2d..51848726e00 100644 --- a/tests/functional/reload/25-xtriggers.t +++ b/tests/functional/reload/25-xtriggers.t @@ -42,8 +42,7 @@ init_workflow "${TEST_NAME_BASE}" <<'__FLOW_CONFIG__' [[reload]] script = """ # wait for "broken" to fail - cylc__job__poll_grep_workflow_log \ - '1/broken .* (received)failed/ERR' + cylc__job__poll_grep_workflow_log '1/broken/01.* (received)failed/ERR' # fix "broken" to allow it to pass sed -i 's/false/true/' "${CYLC_WORKFLOW_RUN_DIR}/flow.cylc" # reload the workflow diff --git a/tests/functional/reload/runahead/flow.cylc b/tests/functional/reload/runahead/flow.cylc index 60d11e6477b..c65b5e11d6d 100644 --- a/tests/functional/reload/runahead/flow.cylc +++ b/tests/functional/reload/runahead/flow.cylc @@ -20,7 +20,7 @@ script = true [[reloader]] script = """ - cylc__job__poll_grep_workflow_log -E "${CYLC_TASK_CYCLE_POINT}/foo running .*\(received\)failed" + cylc__job__poll_grep_workflow_log -E "${CYLC_TASK_CYCLE_POINT}/foo/01:running.*\(received\)failed" perl -pi -e 's/(runahead limit = )P1( # marker)/\1 P3\2/' $CYLC_WORKFLOW_RUN_DIR/flow.cylc cylc reload $CYLC_WORKFLOW_ID """ diff --git a/tests/functional/remote/09-restart-running-file-install.t b/tests/functional/remote/09-restart-running-file-install.t index deb9cf72b4d..c3249b2f5db 100644 --- a/tests/functional/remote/09-restart-running-file-install.t +++ b/tests/functional/remote/09-restart-running-file-install.t @@ -68,7 +68,7 @@ workflow_run_ok "${TEST_NAME_BASE}-restart" \ cylc play --debug --no-detach "${WORKFLOW_NAME}" LOG="${WORKFLOW_RUN_DIR}/log/scheduler/log" grep_ok "remote file install complete" "${LOG}" -grep_ok "\[1/starter running job:01 flows:1\] (received)succeeded" "${LOG}" +grep_ok "\[1/starter/01:running\] (received)succeeded" "${LOG}" ls "${WORKFLOW_RUN_DIR}/log/remote-install" > 'ls.out' cmp_ok ls.out <<__RLOGS__ 01-start-${CYLC_TEST_INSTALL_TARGET}.log diff --git a/tests/functional/restart/22-hold/flow.cylc b/tests/functional/restart/22-hold/flow.cylc index 4f2b44bdde8..0729f7eec24 100644 --- a/tests/functional/restart/22-hold/flow.cylc +++ b/tests/functional/restart/22-hold/flow.cylc @@ -17,7 +17,7 @@ [[t1]] script = """ if [[ "${CYLC_TASK_CYCLE_POINT}" == '2016' ]]; then - cylc__job__poll_grep_workflow_log -E '2016/t2 .* spawned' + cylc__job__poll_grep_workflow_log -E '2016/t2.* spawned' cylc hold "${CYLC_WORKFLOW_ID}//" //2016/t2 //2017/t2 cylc stop "${CYLC_WORKFLOW_ID}" else diff --git a/tests/functional/restart/50-two-flows/flow.cylc b/tests/functional/restart/50-two-flows/flow.cylc index 8837e8835f4..7fbeffec312 100644 --- a/tests/functional/restart/50-two-flows/flow.cylc +++ b/tests/functional/restart/50-two-flows/flow.cylc @@ -14,7 +14,7 @@ [[a]] script = """ if ((CYLC_TASK_FLOW_NUMBERS == 2)); then - cylc__job__poll_grep_workflow_log "\[1/c .* succeeded" + cylc__job__poll_grep_workflow_log "\[1/c.* succeeded" fi """ [[b, d]] @@ -23,7 +23,7 @@ if ((CYLC_TASK_FLOW_NUMBERS == 1)); then cylc trigger --flow=new --meta="cheese wizard" \ "$CYLC_WORKFLOW_ID//1/a" - cylc__job__poll_grep_workflow_log "\[1/a submitted job:02 flows:2\] => running" + cylc__job__poll_grep_workflow_log "\[1/a/02(2)submitted\] => running" cylc stop $CYLC_WORKFLOW_ID fi """ diff --git a/tests/functional/restart/58-removed-task.t b/tests/functional/restart/58-removed-task.t index 17dc19f626e..1c3b79efe05 100755 --- a/tests/functional/restart/58-removed-task.t +++ b/tests/functional/restart/58-removed-task.t @@ -39,10 +39,10 @@ workflow_run_ok "${TEST_NAME}" cylc play --no-detach "${WORKFLOW_NAME}" TEST_NAME="${TEST_NAME_BASE}-restart" workflow_run_ok "${TEST_NAME}" cylc play --set="INCL_B_C=False" --no-detach "${WORKFLOW_NAME}" -grep_workflow_log_ok "grep-3" "\[1/a running job:01 flows:1\] (polled)started" -grep_workflow_log_ok "grep-4" "\[1/b failed job:01 flows:1\] (polled)failed" +grep_workflow_log_ok "grep-3" "\[1/a/01:running\] (polled)started" +grep_workflow_log_ok "grep-4" "\[1/b/01:failed\] (polled)failed" # Failed (but not incomplete) task c should not have been polled. -grep_fail "\[1/c failed job:01 flows:1\] (polled)failed" "${WORKFLOW_RUN_DIR}/log/scheduler/log" +grep_fail "\[1/c/01:failed\] (polled)failed" "${WORKFLOW_RUN_DIR}/log/scheduler/log" purge diff --git a/tests/functional/restart/58-removed-task/flow.cylc b/tests/functional/restart/58-removed-task/flow.cylc index 94c5cf27b24..0584d4b54fc 100644 --- a/tests/functional/restart/58-removed-task/flow.cylc +++ b/tests/functional/restart/58-removed-task/flow.cylc @@ -22,11 +22,11 @@ [runtime] [[a]] script = """ - cylc__job__poll_grep_workflow_log "1/b .*failed" - cylc__job__poll_grep_workflow_log "1/c .*failed" + cylc__job__poll_grep_workflow_log "1/b.*failed" + cylc__job__poll_grep_workflow_log "1/c.*failed" cylc stop --now $CYLC_WORKFLOW_ID - cylc__job__poll_grep_workflow_log "1/a .*(polled)started" - cylc__job__poll_grep_workflow_log "1/b .*(polled)failed" + cylc__job__poll_grep_workflow_log "1/a.*(polled)started" + cylc__job__poll_grep_workflow_log "1/b.*(polled)failed" """ [[b, c]] script = "false" diff --git a/tests/functional/restart/58-waiting-manual-triggered.t b/tests/functional/restart/58-waiting-manual-triggered.t index efba9f42b70..455cb289592 100644 --- a/tests/functional/restart/58-waiting-manual-triggered.t +++ b/tests/functional/restart/58-waiting-manual-triggered.t @@ -41,7 +41,7 @@ __EOF__ # It should restart and shut down normally, not stall with 2/foo waiting on 1/foo. workflow_run_ok "${TEST_NAME_BASE}-restart" cylc play --no-detach "${WORKFLOW_NAME}" # Check that 2/foo job 02 did run before shutdown. -grep_workflow_log_ok "${TEST_NAME_BASE}-grep" "\[2\/foo running job:02 flows:1\] => succeeded" +grep_workflow_log_ok "${TEST_NAME_BASE}-grep" "\[2\/foo\/02:running\] => succeeded" purge exit diff --git a/tests/functional/spawn-on-demand/06-stop-flow-2/flow.cylc b/tests/functional/spawn-on-demand/06-stop-flow-2/flow.cylc index 72f50fe18ba..8aefe4c5a44 100644 --- a/tests/functional/spawn-on-demand/06-stop-flow-2/flow.cylc +++ b/tests/functional/spawn-on-demand/06-stop-flow-2/flow.cylc @@ -21,6 +21,6 @@ fi script = """ if (( CYLC_TASK_SUBMIT_NUMBER == 1 )); then cylc trigger --flow=new --meta=other "${CYLC_WORKFLOW_ID}//1/foo" - cylc__job__poll_grep_workflow_log -E "1/bar running job:02.* => succeeded" + cylc__job__poll_grep_workflow_log -E "1/bar/02\(2\):running.* => succeeded" fi """ diff --git a/tests/functional/spawn-on-demand/07-abs-triggers/flow.cylc b/tests/functional/spawn-on-demand/07-abs-triggers/flow.cylc index 8084e5c0abe..4ea90d3667a 100644 --- a/tests/functional/spawn-on-demand/07-abs-triggers/flow.cylc +++ b/tests/functional/spawn-on-demand/07-abs-triggers/flow.cylc @@ -16,7 +16,7 @@ script = """ # Ensure that 1,2/bar are spawned by 1,2/foo and not by 2/start # (so the scheduler must update their prereqs when 2/start finishes). - cylc__job__poll_grep_workflow_log -E "2/bar .* spawned" + cylc__job__poll_grep_workflow_log -E "2/bar.* spawned" """ [[foo]] [[bar]] diff --git a/tests/functional/spawn-on-demand/09-set-outputs/flow.cylc b/tests/functional/spawn-on-demand/09-set-outputs/flow.cylc index 775bb3d602f..8254a912206 100644 --- a/tests/functional/spawn-on-demand/09-set-outputs/flow.cylc +++ b/tests/functional/spawn-on-demand/09-set-outputs/flow.cylc @@ -35,7 +35,7 @@ [[foo]] # Hang about until setter is finished. script = """ - cylc__job__poll_grep_workflow_log -E "1/setter .* => succeeded" + cylc__job__poll_grep_workflow_log -E "1/setter.* => succeeded" """ [[bar]] script = true @@ -46,7 +46,7 @@ cylc set --flow=2 --output=out1 --output=out2 "${CYLC_WORKFLOW_ID}//1/foo" # Set bar outputs after it is gone from the pool. - cylc__job__poll_grep_workflow_log -E "1/bar .*task proxy removed" + cylc__job__poll_grep_workflow_log -E "1/bar.*task proxy removed" cylc set --flow=2 --output=out1 --output=out2 "${CYLC_WORKFLOW_ID}//1/bar" """ [[qux, quw, fux, fuw]] diff --git a/tests/functional/spawn-on-demand/10-retrigger/flow.cylc b/tests/functional/spawn-on-demand/10-retrigger/flow.cylc index 2bdd4365a07..7e9149ce3c9 100644 --- a/tests/functional/spawn-on-demand/10-retrigger/flow.cylc +++ b/tests/functional/spawn-on-demand/10-retrigger/flow.cylc @@ -18,7 +18,7 @@ """ [[triggerer]] script = """ - cylc__job__poll_grep_workflow_log -E '1/oops running .* \(received\)failed' + cylc__job__poll_grep_workflow_log -E '1/oops/01:running.* \(received\)failed' cylc trigger "${CYLC_WORKFLOW_ID}//1/oops" """ [[foo, bar]] diff --git a/tests/functional/spawn-on-demand/11-hold-not-spawned/flow.cylc b/tests/functional/spawn-on-demand/11-hold-not-spawned/flow.cylc index 890d73f78be..c47ca3c93c4 100644 --- a/tests/functional/spawn-on-demand/11-hold-not-spawned/flow.cylc +++ b/tests/functional/spawn-on-demand/11-hold-not-spawned/flow.cylc @@ -15,6 +15,6 @@ script = true [[stopper]] script = """ - cylc__job__poll_grep_workflow_log "\[1/holdee .* holding \(as requested earlier\)" -E + cylc__job__poll_grep_workflow_log "\[1/holdee.* holding \(as requested earlier\)" -E cylc stop $CYLC_WORKFLOW_ID """ diff --git a/tests/functional/spawn-on-demand/14-trigger-flow-blocker/flow.cylc b/tests/functional/spawn-on-demand/14-trigger-flow-blocker/flow.cylc index 0849b69bb75..9c205e301c5 100644 --- a/tests/functional/spawn-on-demand/14-trigger-flow-blocker/flow.cylc +++ b/tests/functional/spawn-on-demand/14-trigger-flow-blocker/flow.cylc @@ -23,7 +23,7 @@ cylc trigger --flow=none $CYLC_WORKFLOW_ID//3/foo elif ((CYLC_TASK_CYCLE_POINT == 3)); then # Run until I get merged. - cylc__job__poll_grep_workflow_log -E "3/foo .* merged in flow\(s\) 1" + cylc__job__poll_grep_workflow_log -E "3/foo.* merged in flow\(s\) 1" fi """ [[bar]] diff --git a/tests/functional/spawn-on-demand/18-submitted.t b/tests/functional/spawn-on-demand/18-submitted.t index de5041f4ca1..30f022ebafd 100644 --- a/tests/functional/spawn-on-demand/18-submitted.t +++ b/tests/functional/spawn-on-demand/18-submitted.t @@ -40,7 +40,7 @@ reftest_run for number in 1 2 3; do grep_workflow_log_ok \ "${TEST_NAME_BASE}-a${number}" \ - "${number}/a${number} .* did not complete required outputs: \['submitted'\]" + "${number}/a${number}.* did not complete required outputs: \['submitted'\]" done purge diff --git a/tests/functional/spawn-on-demand/19-submitted-compat.t b/tests/functional/spawn-on-demand/19-submitted-compat.t index d529dfb4183..98c603d55a7 100644 --- a/tests/functional/spawn-on-demand/19-submitted-compat.t +++ b/tests/functional/spawn-on-demand/19-submitted-compat.t @@ -51,7 +51,7 @@ grep_workflow_log_ok \ 'Backward compatibility mode ON' grep_workflow_log_ok \ "${TEST_NAME_BASE}-a-complete" \ - '\[1/a running job:01 flows:1\] => succeeded' + '\[1/a/01:running\] => succeeded' grep_workflow_log_ok \ "${TEST_NAME_BASE}-b-incomplete" \ "1/b did not complete required outputs: \['submitted', 'succeeded'\]" diff --git a/tests/functional/special/08-clock-trigger-retry.t b/tests/functional/special/08-clock-trigger-retry.t index d4f591d4870..d04d9fd4623 100644 --- a/tests/functional/special/08-clock-trigger-retry.t +++ b/tests/functional/special/08-clock-trigger-retry.t @@ -42,7 +42,7 @@ workflow_run_ok "${TEST_NAME_BASE}-run" cylc play --no-detach "$WORKFLOW_NAME" log_scan "${TEST_NAME_BASE}-log-scan" \ "${WORKFLOW_RUN_DIR}/log/scheduler/log" 2 1 \ - "\[20150101.*/foo .* job:01 .* retrying in PT5S" \ + "\[20150101.*/foo/01.* retrying in PT5S" \ "xtrigger satisfied: _cylc_retry_20150101" # (if task resubmits immediately instead of waiting PT5S, xtrigger msg will not appear) diff --git a/tests/functional/triggering/19-and-suicide/flow.cylc b/tests/functional/triggering/19-and-suicide/flow.cylc index cf7ae49d129..670c361fc96 100644 --- a/tests/functional/triggering/19-and-suicide/flow.cylc +++ b/tests/functional/triggering/19-and-suicide/flow.cylc @@ -16,7 +16,7 @@ [[t0]] # https://github.com/cylc/cylc-flow/issues/2655 # "1/t2" should not suicide on "1/t1:failed" - script = cylc__job__poll_grep_workflow_log -E '1/t1 .* \(received\)failed' + script = cylc__job__poll_grep_workflow_log -E '1/t1.* \(received\)failed' [[t1]] script = false [[t2]] diff --git a/tests/integration/test_reload.py b/tests/integration/test_reload.py index 9ee592e57c7..6f3c0a7155d 100644 --- a/tests/integration/test_reload.py +++ b/tests/integration/test_reload.py @@ -99,11 +99,11 @@ def change_state(_=0): [ # the task should have entered the preparing state before the # reload was requested - '[1/foo waiting(queued) job:00 flows:1] => preparing(queued)', + '[1/foo/00:waiting(queued)] => preparing(queued)', # the reload should have put the workflow into the paused state 'Pausing the workflow: Reloading workflow', # reload should have waited for the task to submit - '[1/foo preparing(queued) job:00 flows:1]' + '[1/foo/00:preparing(queued)]' ' => submitted(queued)', # before then reloading the workflow config 'Reloading the workflow definition.', diff --git a/tests/integration/test_task_pool.py b/tests/integration/test_task_pool.py index 3d48ad8ddc8..adad15ebf4f 100644 --- a/tests/integration/test_task_pool.py +++ b/tests/integration/test_task_pool.py @@ -1301,17 +1301,17 @@ async def test_set_failed_complete( schd.pool.task_events_mgr.process_message(foo, 1, "failed") assert log_filter( - log, regex="1/foo .* handling missed event: submitted") + log, regex="1/foo.* handling missed event: submitted") assert log_filter( - log, regex="1/foo .* handling missed event: started") + log, regex="1/foo.* handling missed event: started") assert log_filter( - log, regex="1/foo failed.* did not complete required outputs") + log, regex="failed.* did not complete required outputs") # Set failed task complete via default "set" args. schd.pool.set([foo.identity], None, None, ['all']) assert log_filter( - log, contains="Completing output: 1/foo:succeeded") + log, contains='completing output "succeeded" of 1/foo') db_outputs = db_select( schd, True, 'task_outputs', 'outputs', @@ -1434,7 +1434,7 @@ async def test_set_outputs_live( # it should complete the implied output y too. assert log_filter( - log, contains="Completing implied output: 1/foo:y") + log, contains='completing implied output "y" of 1/foo') async def test_set_outputs_future(