From 13448362f48f754035af4a90bd4aa474f1e24386 Mon Sep 17 00:00:00 2001 From: WXTIM <26465611+wxtim@users.noreply.github.com> Date: Thu, 25 Apr 2024 13:56:33 +0100 Subject: [PATCH 01/29] Implement Skip Mode * Add `[runtime][]run mode` and `[runtime][][skip]`. * Spin run mode functionality into separate modules. * Run sim mode check with every main loop - we don't know if any tasks are in sim mode from the scheduler, but it doesn't cost much to check if none are. * Implemented separate job "submission" pathway switching. * Implemented skip mode, including output control logic. * Add a linter and a validation check for tasks in nonlive modes, and for combinations of outputs * Enabled setting outputs as if task ran in skip mode using `cylc set --out skip`. * Testing for the above. Schema: use `Enum` for task run mode instead of `String` (#61) * Schema: use `Enum` for task run mode instead of `String` * Tidy fixup merge fix broken functional test Improve cylc set --out skip * Improve documentation of feature in cylc set --help * Allow cylc set --out=skip,optional_output * Test the above Remove test: We don't want users opting out of validating [runtime][ns][simulation/skip] because we can now changes these in a running workflow. stop users opting out of validating workflows without validating ski/simulation taskdef sections added tests for db entries in nonlive mode ensure db entries for all four modes are correct. move the change file toi the correct name get localhost of platforms 'simulation' 'skip' or 'dummy' not defined. (They probably shouldn't be, but that's a site specific choice...) fix tests with extra messages surfaces by using log_filter make cylc clean and remote tidy not try to clean or tidy platforms stop dummy mode appearing to submit twice Prevent cleanup from attempting to remote clean platforms skip and simulation Update cylc/flow/run_modes/skip.py Co-authored-by: Oliver Sanders fix small issues from OS review response to review: Make satisfaction method correct according the proposal Response to review * Allow only run modes skip and live to be selectable in the config. * Disallow workflow run modes sim and dummy from being overridden. * Attach Run mode to task_proxy rather than the task def. Response to review * Allow only run modes skip and live to be selectable in the config. * Disallow workflow run modes sim and dummy from being overridden. * Attach Run mode to task_proxy rather than the task def. don't run sim time check unless workflow in sim mode test that skip mode is only applied to a single task. remove now illegal items from test Response to review: - Remove Workflow Mode for tasks and make them default to live. - Ensure that we are checking (assert) log_filters. - Remove need for polling in functional test. :) usin enums Apply suggestions from code review Co-authored-by: Oliver Sanders --- changes.d/6039.feat.md | 1 + cylc/flow/cfgspec/workflow.py | 59 +++ cylc/flow/commands.py | 9 +- cylc/flow/config.py | 10 +- cylc/flow/data_messages.proto | 1 + cylc/flow/data_messages_pb2.py | 100 ++-- cylc/flow/data_messages_pb2.pyi | 68 +-- cylc/flow/data_store_mgr.py | 1 + cylc/flow/network/schema.py | 42 +- cylc/flow/platforms.py | 15 +- cylc/flow/prerequisite.py | 21 +- cylc/flow/run_modes/dummy.py | 125 +++++ cylc/flow/run_modes/nonlive.py | 55 +++ cylc/flow/{ => run_modes}/simulation.py | 203 +++++--- cylc/flow/run_modes/skip.py | 161 ++++++ cylc/flow/scheduler.py | 28 +- cylc/flow/scheduler_cli.py | 12 +- cylc/flow/scripts/lint.py | 46 +- cylc/flow/scripts/set.py | 9 +- cylc/flow/scripts/validate.py | 8 +- cylc/flow/task_events_mgr.py | 28 +- cylc/flow/task_job_mgr.py | 120 +++-- cylc/flow/task_outputs.py | 26 +- cylc/flow/task_pool.py | 28 +- cylc/flow/task_proxy.py | 7 +- cylc/flow/task_state.py | 74 ++- cylc/flow/unicode_rules.py | 4 +- cylc/flow/workflow_status.py | 20 - .../cylc-config/00-simple/section2.stdout | 52 ++ tests/functional/cylc-set/09-set-skip.t | 28 ++ .../functional/cylc-set/09-set-skip/flow.cylc | 50 ++ .../cylc-set/09-set-skip/reference.log | 8 + .../{modes => run_modes}/01-dummy.t | 0 .../{modes => run_modes}/01-dummy/flow.cylc | 0 .../01-dummy/reference.log | 0 .../02-dummy-message-outputs.t | 0 .../02-dummy-message-outputs/flow.cylc | 0 .../02-dummy-message-outputs/reference.log | 0 .../{modes => run_modes}/03-simulation.t | 0 .../03-simulation/flow.cylc | 0 .../03-simulation/reference.log | 0 .../04-simulation-runtime.t | 0 .../04-simulation-runtime/flow.cylc | 0 .../04-simulation-runtime/reference.log | 0 .../{modes => run_modes}/05-sim-trigger.t | 0 .../05-sim-trigger/flow.cylc | 0 .../05-sim-trigger/reference.log | 0 .../run_modes/06-run-mode-overrides.t | 66 +++ .../run_modes/06-run-mode-overrides/flow.cylc | 28 ++ .../{modes => run_modes}/test_header | 0 .../run_modes/test_mode_overrides.py | 152 ++++++ tests/integration/run_modes/test_nonlive.py | 120 +++++ .../integration/run_modes/test_simulation.py | 437 ++++++++++++++++- tests/integration/run_modes/test_skip.py | 249 ++++++++++ .../scripts/test_validate_integration.py | 17 +- tests/integration/test_config.py | 52 +- tests/integration/test_simulation.py | 459 ------------------ tests/integration/test_task_events_mgr.py | 2 +- tests/integration/test_task_pool.py | 85 +++- tests/integration/utils/flow_tools.py | 8 + tests/unit/run_modes/test_dummy.py | 40 ++ tests/unit/run_modes/test_nonlive.py | 51 ++ tests/unit/{ => run_modes}/test_simulation.py | 26 +- tests/unit/run_modes/test_skip.py | 101 ++++ tests/unit/scripts/test_lint.py | 3 + tests/unit/test_config.py | 36 +- tests/unit/test_platforms.py | 3 +- tests/unit/test_task_outputs.py | 28 +- tests/unit/test_task_remote_mgr.py | 4 + tests/unit/test_task_state.py | 30 ++ 70 files changed, 2586 insertions(+), 830 deletions(-) create mode 100644 changes.d/6039.feat.md create mode 100644 cylc/flow/run_modes/dummy.py create mode 100644 cylc/flow/run_modes/nonlive.py rename cylc/flow/{ => run_modes}/simulation.py (62%) create mode 100644 cylc/flow/run_modes/skip.py create mode 100644 tests/functional/cylc-set/09-set-skip.t create mode 100644 tests/functional/cylc-set/09-set-skip/flow.cylc create mode 100644 tests/functional/cylc-set/09-set-skip/reference.log rename tests/functional/{modes => run_modes}/01-dummy.t (100%) rename tests/functional/{modes => run_modes}/01-dummy/flow.cylc (100%) rename tests/functional/{modes => run_modes}/01-dummy/reference.log (100%) rename tests/functional/{modes => run_modes}/02-dummy-message-outputs.t (100%) rename tests/functional/{modes => run_modes}/02-dummy-message-outputs/flow.cylc (100%) rename tests/functional/{modes => run_modes}/02-dummy-message-outputs/reference.log (100%) rename tests/functional/{modes => run_modes}/03-simulation.t (100%) rename tests/functional/{modes => run_modes}/03-simulation/flow.cylc (100%) rename tests/functional/{modes => run_modes}/03-simulation/reference.log (100%) rename tests/functional/{modes => run_modes}/04-simulation-runtime.t (100%) rename tests/functional/{modes => run_modes}/04-simulation-runtime/flow.cylc (100%) rename tests/functional/{modes => run_modes}/04-simulation-runtime/reference.log (100%) rename tests/functional/{modes => run_modes}/05-sim-trigger.t (100%) rename tests/functional/{modes => run_modes}/05-sim-trigger/flow.cylc (100%) rename tests/functional/{modes => run_modes}/05-sim-trigger/reference.log (100%) create mode 100644 tests/functional/run_modes/06-run-mode-overrides.t create mode 100644 tests/functional/run_modes/06-run-mode-overrides/flow.cylc rename tests/functional/{modes => run_modes}/test_header (100%) create mode 100644 tests/integration/run_modes/test_mode_overrides.py create mode 100644 tests/integration/run_modes/test_nonlive.py create mode 100644 tests/integration/run_modes/test_skip.py delete mode 100644 tests/integration/test_simulation.py create mode 100644 tests/unit/run_modes/test_dummy.py create mode 100644 tests/unit/run_modes/test_nonlive.py rename tests/unit/{ => run_modes}/test_simulation.py (86%) create mode 100644 tests/unit/run_modes/test_skip.py diff --git a/changes.d/6039.feat.md b/changes.d/6039.feat.md new file mode 100644 index 00000000000..d3a39814e2c --- /dev/null +++ b/changes.d/6039.feat.md @@ -0,0 +1 @@ +Add a new mode task run mode "skip" which overrides workflow live mode task submission. \ No newline at end of file diff --git a/cylc/flow/cfgspec/workflow.py b/cylc/flow/cfgspec/workflow.py index 934897bdbb4..dce1b0316a0 100644 --- a/cylc/flow/cfgspec/workflow.py +++ b/cylc/flow/cfgspec/workflow.py @@ -57,6 +57,8 @@ fail_if_platform_and_host_conflict, get_platform_deprecated_settings, is_platform_definition_subshell) from cylc.flow.task_events_mgr import EventData +from cylc.flow.task_state import RunMode + # Regex to check whether a string is a command REC_COMMAND = re.compile(r'(`|\$\()\s*(.*)\s*([`)])$') @@ -1334,6 +1336,27 @@ def get_script_common_text(this: str, example: Optional[str] = None): "[platforms][]submission retry delays" ) ) + Conf( + 'run mode', VDR.V_STRING, + options=list(RunMode.OVERRIDING_MODES.value) + [''], + default='', + desc=f''' + For a workflow run in live mode run this task in skip + mode. + + {RunMode.LIVE.value}: + {RunMode.LIVE.describe()} + {RunMode.SKIP.value}: + {RunMode.SKIP.describe()} + + + .. seealso:: + + :ref:`task-run-modes` + + .. versionadded:: 8.4.0 + + ''') with Conf('meta', desc=r''' Metadata for the task or task family. @@ -1406,7 +1429,43 @@ def get_script_common_text(this: str, example: Optional[str] = None): determine how an event handler responds to task failure events. ''') + with Conf('skip', desc=''' + Task configuration for task :ref:`task-run-modes.skip`. + For a full description of skip run mode see + :ref:`task-run-modes.skip`. + + .. versionadded:: 8.4.0 + '''): + Conf( + 'outputs', + VDR.V_STRING_LIST, + desc=''' + Outputs to be emitted by a task in skip mode. + + * By default, all required outputs will be generated + plus succeeded if success is optional. + * If skip-mode outputs is specified and does not + include either succeeded or failed then succeeded + will be produced. + * The outputs submitted and started are always + produced and do not need to be defined in outputs. + + .. versionadded:: 8.4.0 + ''' + ) + Conf( + 'disable task event handlers', + VDR.V_BOOLEAN, + default=True, + desc=''' + Task event handlers are turned off by default for + skip mode tasks. Changing this setting to ``False`` + will re-enable task event handlers. + + .. versionadded:: 8.4.0 + ''' + ) with Conf('simulation', desc=''' Task configuration for workflow *simulation* and *dummy* run modes. diff --git a/cylc/flow/commands.py b/cylc/flow/commands.py index b8b777e0957..d7f3ffc5b4e 100644 --- a/cylc/flow/commands.py +++ b/cylc/flow/commands.py @@ -77,8 +77,9 @@ from cylc.flow.network.schema import WorkflowStopMode from cylc.flow.parsec.exceptions import ParsecError from cylc.flow.task_id import TaskID -from cylc.flow.task_state import TASK_STATUSES_ACTIVE, TASK_STATUS_FAILED -from cylc.flow.workflow_status import RunMode, StopMode +from cylc.flow.task_state import ( + TASK_STATUSES_ACTIVE, TASK_STATUS_FAILED, RunMode) +from cylc.flow.workflow_status import StopMode from metomi.isodatetime.parsers import TimePointParser @@ -247,7 +248,7 @@ async def poll_tasks(schd: 'Scheduler', tasks: Iterable[str]): """Poll pollable tasks or a task or family if options are provided.""" validate.is_tasks(tasks) yield - if schd.get_run_mode() == RunMode.SIMULATION: + if schd.get_run_mode() == RunMode.SIMULATION.value: yield 0 itasks, _, bad_items = schd.pool.filter_task_proxies(tasks) schd.task_job_mgr.poll_task_jobs(schd.workflow, itasks) @@ -260,7 +261,7 @@ async def kill_tasks(schd: 'Scheduler', tasks: Iterable[str]): validate.is_tasks(tasks) yield itasks, _, bad_items = schd.pool.filter_task_proxies(tasks) - if schd.get_run_mode() == RunMode.SIMULATION: + if schd.get_run_mode() == RunMode.SIMULATION.value: for itask in itasks: if itask.state(*TASK_STATUSES_ACTIVE): itask.state_reset(TASK_STATUS_FAILED) diff --git a/cylc/flow/config.py b/cylc/flow/config.py index 786095a215d..ef6300b7d43 100644 --- a/cylc/flow/config.py +++ b/cylc/flow/config.py @@ -82,7 +82,7 @@ ) from cylc.flow.print_tree import print_tree from cylc.flow.task_qualifiers import ALT_QUALIFIERS -from cylc.flow.simulation import configure_sim_modes +from cylc.flow.run_modes.nonlive import run_mode_validate_checks from cylc.flow.subprocctx import SubFuncContext from cylc.flow.task_events_mgr import ( EventData, @@ -99,6 +99,7 @@ get_trigger_completion_variable_maps, trigger_to_completion_variable, ) +from cylc.flow.task_state import RunMode from cylc.flow.task_trigger import TaskTrigger, Dependency from cylc.flow.taskdef import TaskDef from cylc.flow.unicode_rules import ( @@ -114,7 +115,6 @@ WorkflowFiles, check_deprecation, ) -from cylc.flow.workflow_status import RunMode from cylc.flow.xtrigger_mgr import XtriggerCollator if TYPE_CHECKING: @@ -513,10 +513,6 @@ def __init__( self.process_runahead_limit() - run_mode = self.run_mode() - if run_mode in {RunMode.SIMULATION, RunMode.DUMMY}: - configure_sim_modes(self.taskdefs.values(), run_mode) - self.configure_workflow_state_polling_tasks() self._check_task_event_handlers() @@ -567,6 +563,8 @@ def __init__( self.mem_log("config.py: end init config") + run_mode_validate_checks(self.taskdefs) + @staticmethod def _warn_if_queues_have_implicit_tasks( config, taskdefs, max_warning_lines diff --git a/cylc/flow/data_messages.proto b/cylc/flow/data_messages.proto index c0af5094c0d..f259a735f0a 100644 --- a/cylc/flow/data_messages.proto +++ b/cylc/flow/data_messages.proto @@ -128,6 +128,7 @@ message PbRuntime { optional string environment = 16; optional string outputs = 17; optional string completion = 18; + optional string run_mode = 19; } diff --git a/cylc/flow/data_messages_pb2.py b/cylc/flow/data_messages_pb2.py index 7fb5ae84d24..0f16888d6bd 100644 --- a/cylc/flow/data_messages_pb2.py +++ b/cylc/flow/data_messages_pb2.py @@ -14,7 +14,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x64\x61ta_messages.proto\"\x96\x01\n\x06PbMeta\x12\x12\n\x05title\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x10\n\x03URL\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x19\n\x0cuser_defined\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_titleB\x0e\n\x0c_descriptionB\x06\n\x04_URLB\x0f\n\r_user_defined\"\xaa\x01\n\nPbTimeZone\x12\x12\n\x05hours\x18\x01 \x01(\x05H\x00\x88\x01\x01\x12\x14\n\x07minutes\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x19\n\x0cstring_basic\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1c\n\x0fstring_extended\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_hoursB\n\n\x08_minutesB\x0f\n\r_string_basicB\x12\n\x10_string_extended\"\'\n\x0fPbTaskProxyRefs\x12\x14\n\x0ctask_proxies\x18\x01 \x03(\t\"\xd4\x0c\n\nPbWorkflow\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06status\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x11\n\x04host\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x11\n\x04port\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x12\n\x05owner\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\r\n\x05tasks\x18\x08 \x03(\t\x12\x10\n\x08\x66\x61milies\x18\t \x03(\t\x12\x1c\n\x05\x65\x64ges\x18\n \x01(\x0b\x32\x08.PbEdgesH\x07\x88\x01\x01\x12\x18\n\x0b\x61pi_version\x18\x0b \x01(\x05H\x08\x88\x01\x01\x12\x19\n\x0c\x63ylc_version\x18\x0c \x01(\tH\t\x88\x01\x01\x12\x19\n\x0clast_updated\x18\r \x01(\x01H\n\x88\x01\x01\x12\x1a\n\x04meta\x18\x0e \x01(\x0b\x32\x07.PbMetaH\x0b\x88\x01\x01\x12&\n\x19newest_active_cycle_point\x18\x10 \x01(\tH\x0c\x88\x01\x01\x12&\n\x19oldest_active_cycle_point\x18\x11 \x01(\tH\r\x88\x01\x01\x12\x15\n\x08reloaded\x18\x12 \x01(\x08H\x0e\x88\x01\x01\x12\x15\n\x08run_mode\x18\x13 \x01(\tH\x0f\x88\x01\x01\x12\x19\n\x0c\x63ycling_mode\x18\x14 \x01(\tH\x10\x88\x01\x01\x12\x32\n\x0cstate_totals\x18\x15 \x03(\x0b\x32\x1c.PbWorkflow.StateTotalsEntry\x12\x1d\n\x10workflow_log_dir\x18\x16 \x01(\tH\x11\x88\x01\x01\x12(\n\x0etime_zone_info\x18\x17 \x01(\x0b\x32\x0b.PbTimeZoneH\x12\x88\x01\x01\x12\x17\n\ntree_depth\x18\x18 \x01(\x05H\x13\x88\x01\x01\x12\x15\n\rjob_log_names\x18\x19 \x03(\t\x12\x14\n\x0cns_def_order\x18\x1a \x03(\t\x12\x0e\n\x06states\x18\x1b \x03(\t\x12\x14\n\x0ctask_proxies\x18\x1c \x03(\t\x12\x16\n\x0e\x66\x61mily_proxies\x18\x1d \x03(\t\x12\x17\n\nstatus_msg\x18\x1e \x01(\tH\x14\x88\x01\x01\x12\x1a\n\ris_held_total\x18\x1f \x01(\x05H\x15\x88\x01\x01\x12\x0c\n\x04jobs\x18 \x03(\t\x12\x15\n\x08pub_port\x18! \x01(\x05H\x16\x88\x01\x01\x12\x17\n\nbroadcasts\x18\" \x01(\tH\x17\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18# \x01(\x05H\x18\x88\x01\x01\x12=\n\x12latest_state_tasks\x18$ \x03(\x0b\x32!.PbWorkflow.LatestStateTasksEntry\x12\x13\n\x06pruned\x18% \x01(\x08H\x19\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18& \x01(\x05H\x1a\x88\x01\x01\x12\x1b\n\x0estates_updated\x18\' \x01(\x08H\x1b\x88\x01\x01\x12\x1c\n\x0fn_edge_distance\x18( \x01(\x05H\x1c\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1aI\n\x15LatestStateTasksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1f\n\x05value\x18\x02 \x01(\x0b\x32\x10.PbTaskProxyRefs:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\t\n\x07_statusB\x07\n\x05_hostB\x07\n\x05_portB\x08\n\x06_ownerB\x08\n\x06_edgesB\x0e\n\x0c_api_versionB\x0f\n\r_cylc_versionB\x0f\n\r_last_updatedB\x07\n\x05_metaB\x1c\n\x1a_newest_active_cycle_pointB\x1c\n\x1a_oldest_active_cycle_pointB\x0b\n\t_reloadedB\x0b\n\t_run_modeB\x0f\n\r_cycling_modeB\x13\n\x11_workflow_log_dirB\x11\n\x0f_time_zone_infoB\r\n\x0b_tree_depthB\r\n\x0b_status_msgB\x10\n\x0e_is_held_totalB\x0b\n\t_pub_portB\r\n\x0b_broadcastsB\x12\n\x10_is_queued_totalB\t\n\x07_prunedB\x14\n\x12_is_runahead_totalB\x11\n\x0f_states_updatedB\x12\n\x10_n_edge_distance\"\xe1\x06\n\tPbRuntime\x12\x15\n\x08platform\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06script\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0binit_script\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x17\n\nenv_script\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\nerr_script\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x18\n\x0b\x65xit_script\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x17\n\npre_script\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x18\n\x0bpost_script\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x19\n\x0cwork_sub_dir\x18\t \x01(\tH\x08\x88\x01\x01\x12(\n\x1b\x65xecution_polling_intervals\x18\n \x01(\tH\t\x88\x01\x01\x12#\n\x16\x65xecution_retry_delays\x18\x0b \x01(\tH\n\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0c \x01(\tH\x0b\x88\x01\x01\x12)\n\x1csubmission_polling_intervals\x18\r \x01(\tH\x0c\x88\x01\x01\x12$\n\x17submission_retry_delays\x18\x0e \x01(\tH\r\x88\x01\x01\x12\x17\n\ndirectives\x18\x0f \x01(\tH\x0e\x88\x01\x01\x12\x18\n\x0b\x65nvironment\x18\x10 \x01(\tH\x0f\x88\x01\x01\x12\x14\n\x07outputs\x18\x11 \x01(\tH\x10\x88\x01\x01\x12\x17\n\ncompletion\x18\x12 \x01(\tH\x11\x88\x01\x01\x42\x0b\n\t_platformB\t\n\x07_scriptB\x0e\n\x0c_init_scriptB\r\n\x0b_env_scriptB\r\n\x0b_err_scriptB\x0e\n\x0c_exit_scriptB\r\n\x0b_pre_scriptB\x0e\n\x0c_post_scriptB\x0f\n\r_work_sub_dirB\x1e\n\x1c_execution_polling_intervalsB\x19\n\x17_execution_retry_delaysB\x17\n\x15_execution_time_limitB\x1f\n\x1d_submission_polling_intervalsB\x1a\n\x18_submission_retry_delaysB\r\n\x0b_directivesB\x0e\n\x0c_environmentB\n\n\x08_outputsB\r\n\x0b_completion\"\x9d\x05\n\x05PbJob\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nsubmit_num\x18\x03 \x01(\x05H\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\ntask_proxy\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x1b\n\x0esubmitted_time\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x19\n\x0cstarted_time\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x1a\n\rfinished_time\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x06job_id\x18\t \x01(\tH\x08\x88\x01\x01\x12\x1c\n\x0fjob_runner_name\x18\n \x01(\tH\t\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0e \x01(\x02H\n\x88\x01\x01\x12\x15\n\x08platform\x18\x0f \x01(\tH\x0b\x88\x01\x01\x12\x18\n\x0bjob_log_dir\x18\x11 \x01(\tH\x0c\x88\x01\x01\x12\x11\n\x04name\x18\x1e \x01(\tH\r\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x1f \x01(\tH\x0e\x88\x01\x01\x12\x10\n\x08messages\x18 \x03(\t\x12 \n\x07runtime\x18! \x01(\x0b\x32\n.PbRuntimeH\x0f\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\r\n\x0b_submit_numB\x08\n\x06_stateB\r\n\x0b_task_proxyB\x11\n\x0f_submitted_timeB\x0f\n\r_started_timeB\x10\n\x0e_finished_timeB\t\n\x07_job_idB\x12\n\x10_job_runner_nameB\x17\n\x15_execution_time_limitB\x0b\n\t_platformB\x0e\n\x0c_job_log_dirB\x07\n\x05_nameB\x0e\n\x0c_cycle_pointB\n\n\x08_runtimeJ\x04\x08\x1d\x10\x1e\"\xe2\x02\n\x06PbTask\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x1e\n\x11mean_elapsed_time\x18\x05 \x01(\x02H\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x0f\n\x07proxies\x18\x07 \x03(\t\x12\x11\n\tnamespace\x18\x08 \x03(\t\x12\x0f\n\x07parents\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x06\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x07\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x14\n\x12_mean_elapsed_timeB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xd8\x01\n\nPbPollTask\x12\x18\n\x0blocal_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08workflow\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x19\n\x0cremote_proxy\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\treq_state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x19\n\x0cgraph_string\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x0e\n\x0c_local_proxyB\x0b\n\t_workflowB\x0f\n\r_remote_proxyB\x0c\n\n_req_stateB\x0f\n\r_graph_string\"\xcb\x01\n\x0bPbCondition\x12\x17\n\ntask_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x17\n\nexpr_alias\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\treq_state\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x14\n\x07message\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\r\n\x0b_task_proxyB\r\n\x0b_expr_aliasB\x0c\n\n_req_stateB\x0c\n\n_satisfiedB\n\n\x08_message\"\x96\x01\n\x0ePbPrerequisite\x12\x17\n\nexpression\x18\x01 \x01(\tH\x00\x88\x01\x01\x12 \n\nconditions\x18\x02 \x03(\x0b\x32\x0c.PbCondition\x12\x14\n\x0c\x63ycle_points\x18\x03 \x03(\t\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\r\n\x0b_expressionB\x0c\n\n_satisfied\"\x8c\x01\n\x08PbOutput\x12\x12\n\x05label\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07message\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\tsatisfied\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12\x11\n\x04time\x18\x04 \x01(\x01H\x03\x88\x01\x01\x42\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\xa5\x01\n\tPbTrigger\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05label\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x14\n\x07message\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x11\n\x04time\x18\x05 \x01(\x01H\x04\x88\x01\x01\x42\x05\n\x03_idB\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\x91\x08\n\x0bPbTaskProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04task\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x18\n\x0bjob_submits\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12*\n\x07outputs\x18\t \x03(\x0b\x32\x19.PbTaskProxy.OutputsEntry\x12\x11\n\tnamespace\x18\x0b \x03(\t\x12&\n\rprerequisites\x18\x0c \x03(\x0b\x32\x0f.PbPrerequisite\x12\x0c\n\x04jobs\x18\r \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\x0f \x01(\tH\x07\x88\x01\x01\x12\x11\n\x04name\x18\x10 \x01(\tH\x08\x88\x01\x01\x12\x14\n\x07is_held\x18\x11 \x01(\x08H\t\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x12 \x03(\t\x12\x11\n\tancestors\x18\x13 \x03(\t\x12\x16\n\tflow_nums\x18\x14 \x01(\tH\n\x88\x01\x01\x12=\n\x11\x65xternal_triggers\x18\x17 \x03(\x0b\x32\".PbTaskProxy.ExternalTriggersEntry\x12.\n\txtriggers\x18\x18 \x03(\x0b\x32\x1b.PbTaskProxy.XtriggersEntry\x12\x16\n\tis_queued\x18\x19 \x01(\x08H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x1a \x01(\x08H\x0c\x88\x01\x01\x12\x16\n\tflow_wait\x18\x1b \x01(\x08H\r\x88\x01\x01\x12 \n\x07runtime\x18\x1c \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x1d \x01(\x05H\x0f\x88\x01\x01\x1a\x39\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x18\n\x05value\x18\x02 \x01(\x0b\x32\t.PbOutput:\x02\x38\x01\x1a\x43\n\x15\x45xternalTriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x1a<\n\x0eXtriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_taskB\x08\n\x06_stateB\x0e\n\x0c_cycle_pointB\x08\n\x06_depthB\x0e\n\x0c_job_submitsB\x0f\n\r_first_parentB\x07\n\x05_nameB\n\n\x08_is_heldB\x0c\n\n_flow_numsB\x0c\n\n_is_queuedB\x0e\n\x0c_is_runaheadB\x0c\n\n_flow_waitB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xc8\x02\n\x08PbFamily\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x05 \x01(\x05H\x04\x88\x01\x01\x12\x0f\n\x07proxies\x18\x06 \x03(\t\x12\x0f\n\x07parents\x18\x07 \x03(\t\x12\x13\n\x0b\x63hild_tasks\x18\x08 \x03(\t\x12\x16\n\x0e\x63hild_families\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x05\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x06\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xae\x06\n\rPbFamilyProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x11\n\x04name\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x13\n\x06\x66\x61mily\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05state\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12\x19\n\x0c\x66irst_parent\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x0b\x63hild_tasks\x18\n \x03(\t\x12\x16\n\x0e\x63hild_families\x18\x0b \x03(\t\x12\x14\n\x07is_held\x18\x0c \x01(\x08H\x08\x88\x01\x01\x12\x11\n\tancestors\x18\r \x03(\t\x12\x0e\n\x06states\x18\x0e \x03(\t\x12\x35\n\x0cstate_totals\x18\x0f \x03(\x0b\x32\x1f.PbFamilyProxy.StateTotalsEntry\x12\x1a\n\ris_held_total\x18\x10 \x01(\x05H\t\x88\x01\x01\x12\x16\n\tis_queued\x18\x11 \x01(\x08H\n\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18\x12 \x01(\x05H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x13 \x01(\x08H\x0c\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18\x14 \x01(\x05H\r\x88\x01\x01\x12 \n\x07runtime\x18\x15 \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x16 \x01(\x05H\x0f\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x0e\n\x0c_cycle_pointB\x07\n\x05_nameB\t\n\x07_familyB\x08\n\x06_stateB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_is_heldB\x10\n\x0e_is_held_totalB\x0c\n\n_is_queuedB\x12\n\x10_is_queued_totalB\x0e\n\x0c_is_runaheadB\x14\n\x12_is_runahead_totalB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xbc\x01\n\x06PbEdge\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x13\n\x06source\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06target\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x14\n\x07suicide\x18\x05 \x01(\x08H\x04\x88\x01\x01\x12\x11\n\x04\x63ond\x18\x06 \x01(\x08H\x05\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\t\n\x07_sourceB\t\n\x07_targetB\n\n\x08_suicideB\x07\n\x05_cond\"{\n\x07PbEdges\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x02 \x03(\t\x12+\n\x16workflow_polling_tasks\x18\x03 \x03(\x0b\x32\x0b.PbPollTask\x12\x0e\n\x06leaves\x18\x04 \x03(\t\x12\x0c\n\x04\x66\x65\x65t\x18\x05 \x03(\tB\x05\n\x03_id\"\xf2\x01\n\x10PbEntireWorkflow\x12\"\n\x08workflow\x18\x01 \x01(\x0b\x32\x0b.PbWorkflowH\x00\x88\x01\x01\x12\x16\n\x05tasks\x18\x02 \x03(\x0b\x32\x07.PbTask\x12\"\n\x0ctask_proxies\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x14\n\x04jobs\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x1b\n\x08\x66\x61milies\x18\x05 \x03(\x0b\x32\t.PbFamily\x12&\n\x0e\x66\x61mily_proxies\x18\x06 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x16\n\x05\x65\x64ges\x18\x07 \x03(\x0b\x32\x07.PbEdgeB\x0b\n\t_workflow\"\xaf\x01\n\x07\x45\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbEdge\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbEdge\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xb3\x01\n\x07\x46\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x18\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\t.PbFamily\x12\x1a\n\x07updated\x18\x04 \x03(\x0b\x32\t.PbFamily\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xbe\x01\n\x08\x46PDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1d\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x1f\n\x07updated\x18\x04 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xad\x01\n\x07JDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x15\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x06.PbJob\x12\x17\n\x07updated\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xaf\x01\n\x07TDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbTask\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbTask\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xba\x01\n\x08TPDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1b\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x1d\n\x07updated\x18\x04 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xc3\x01\n\x07WDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x1f\n\x05\x61\x64\x64\x65\x64\x18\x02 \x01(\x0b\x32\x0b.PbWorkflowH\x01\x88\x01\x01\x12!\n\x07updated\x18\x03 \x01(\x0b\x32\x0b.PbWorkflowH\x02\x88\x01\x01\x12\x15\n\x08reloaded\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x13\n\x06pruned\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x07\n\x05_timeB\x08\n\x06_addedB\n\n\x08_updatedB\x0b\n\t_reloadedB\t\n\x07_pruned\"\xd1\x01\n\tAllDeltas\x12\x1a\n\x08\x66\x61milies\x18\x01 \x01(\x0b\x32\x08.FDeltas\x12!\n\x0e\x66\x61mily_proxies\x18\x02 \x01(\x0b\x32\t.FPDeltas\x12\x16\n\x04jobs\x18\x03 \x01(\x0b\x32\x08.JDeltas\x12\x17\n\x05tasks\x18\x04 \x01(\x0b\x32\x08.TDeltas\x12\x1f\n\x0ctask_proxies\x18\x05 \x01(\x0b\x32\t.TPDeltas\x12\x17\n\x05\x65\x64ges\x18\x06 \x01(\x0b\x32\x08.EDeltas\x12\x1a\n\x08workflow\x18\x07 \x01(\x0b\x32\x08.WDeltasb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x64\x61ta_messages.proto\"\x96\x01\n\x06PbMeta\x12\x12\n\x05title\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x10\n\x03URL\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x19\n\x0cuser_defined\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_titleB\x0e\n\x0c_descriptionB\x06\n\x04_URLB\x0f\n\r_user_defined\"\xaa\x01\n\nPbTimeZone\x12\x12\n\x05hours\x18\x01 \x01(\x05H\x00\x88\x01\x01\x12\x14\n\x07minutes\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x19\n\x0cstring_basic\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1c\n\x0fstring_extended\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x08\n\x06_hoursB\n\n\x08_minutesB\x0f\n\r_string_basicB\x12\n\x10_string_extended\"\'\n\x0fPbTaskProxyRefs\x12\x14\n\x0ctask_proxies\x18\x01 \x03(\t\"\xd4\x0c\n\nPbWorkflow\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06status\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x11\n\x04host\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x11\n\x04port\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x12\n\x05owner\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\r\n\x05tasks\x18\x08 \x03(\t\x12\x10\n\x08\x66\x61milies\x18\t \x03(\t\x12\x1c\n\x05\x65\x64ges\x18\n \x01(\x0b\x32\x08.PbEdgesH\x07\x88\x01\x01\x12\x18\n\x0b\x61pi_version\x18\x0b \x01(\x05H\x08\x88\x01\x01\x12\x19\n\x0c\x63ylc_version\x18\x0c \x01(\tH\t\x88\x01\x01\x12\x19\n\x0clast_updated\x18\r \x01(\x01H\n\x88\x01\x01\x12\x1a\n\x04meta\x18\x0e \x01(\x0b\x32\x07.PbMetaH\x0b\x88\x01\x01\x12&\n\x19newest_active_cycle_point\x18\x10 \x01(\tH\x0c\x88\x01\x01\x12&\n\x19oldest_active_cycle_point\x18\x11 \x01(\tH\r\x88\x01\x01\x12\x15\n\x08reloaded\x18\x12 \x01(\x08H\x0e\x88\x01\x01\x12\x15\n\x08run_mode\x18\x13 \x01(\tH\x0f\x88\x01\x01\x12\x19\n\x0c\x63ycling_mode\x18\x14 \x01(\tH\x10\x88\x01\x01\x12\x32\n\x0cstate_totals\x18\x15 \x03(\x0b\x32\x1c.PbWorkflow.StateTotalsEntry\x12\x1d\n\x10workflow_log_dir\x18\x16 \x01(\tH\x11\x88\x01\x01\x12(\n\x0etime_zone_info\x18\x17 \x01(\x0b\x32\x0b.PbTimeZoneH\x12\x88\x01\x01\x12\x17\n\ntree_depth\x18\x18 \x01(\x05H\x13\x88\x01\x01\x12\x15\n\rjob_log_names\x18\x19 \x03(\t\x12\x14\n\x0cns_def_order\x18\x1a \x03(\t\x12\x0e\n\x06states\x18\x1b \x03(\t\x12\x14\n\x0ctask_proxies\x18\x1c \x03(\t\x12\x16\n\x0e\x66\x61mily_proxies\x18\x1d \x03(\t\x12\x17\n\nstatus_msg\x18\x1e \x01(\tH\x14\x88\x01\x01\x12\x1a\n\ris_held_total\x18\x1f \x01(\x05H\x15\x88\x01\x01\x12\x0c\n\x04jobs\x18 \x03(\t\x12\x15\n\x08pub_port\x18! \x01(\x05H\x16\x88\x01\x01\x12\x17\n\nbroadcasts\x18\" \x01(\tH\x17\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18# \x01(\x05H\x18\x88\x01\x01\x12=\n\x12latest_state_tasks\x18$ \x03(\x0b\x32!.PbWorkflow.LatestStateTasksEntry\x12\x13\n\x06pruned\x18% \x01(\x08H\x19\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18& \x01(\x05H\x1a\x88\x01\x01\x12\x1b\n\x0estates_updated\x18\' \x01(\x08H\x1b\x88\x01\x01\x12\x1c\n\x0fn_edge_distance\x18( \x01(\x05H\x1c\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1aI\n\x15LatestStateTasksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1f\n\x05value\x18\x02 \x01(\x0b\x32\x10.PbTaskProxyRefs:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\t\n\x07_statusB\x07\n\x05_hostB\x07\n\x05_portB\x08\n\x06_ownerB\x08\n\x06_edgesB\x0e\n\x0c_api_versionB\x0f\n\r_cylc_versionB\x0f\n\r_last_updatedB\x07\n\x05_metaB\x1c\n\x1a_newest_active_cycle_pointB\x1c\n\x1a_oldest_active_cycle_pointB\x0b\n\t_reloadedB\x0b\n\t_run_modeB\x0f\n\r_cycling_modeB\x13\n\x11_workflow_log_dirB\x11\n\x0f_time_zone_infoB\r\n\x0b_tree_depthB\r\n\x0b_status_msgB\x10\n\x0e_is_held_totalB\x0b\n\t_pub_portB\r\n\x0b_broadcastsB\x12\n\x10_is_queued_totalB\t\n\x07_prunedB\x14\n\x12_is_runahead_totalB\x11\n\x0f_states_updatedB\x12\n\x10_n_edge_distance\"\x85\x07\n\tPbRuntime\x12\x15\n\x08platform\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06script\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0binit_script\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x17\n\nenv_script\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\nerr_script\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x18\n\x0b\x65xit_script\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x17\n\npre_script\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x18\n\x0bpost_script\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x19\n\x0cwork_sub_dir\x18\t \x01(\tH\x08\x88\x01\x01\x12(\n\x1b\x65xecution_polling_intervals\x18\n \x01(\tH\t\x88\x01\x01\x12#\n\x16\x65xecution_retry_delays\x18\x0b \x01(\tH\n\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0c \x01(\tH\x0b\x88\x01\x01\x12)\n\x1csubmission_polling_intervals\x18\r \x01(\tH\x0c\x88\x01\x01\x12$\n\x17submission_retry_delays\x18\x0e \x01(\tH\r\x88\x01\x01\x12\x17\n\ndirectives\x18\x0f \x01(\tH\x0e\x88\x01\x01\x12\x18\n\x0b\x65nvironment\x18\x10 \x01(\tH\x0f\x88\x01\x01\x12\x14\n\x07outputs\x18\x11 \x01(\tH\x10\x88\x01\x01\x12\x17\n\ncompletion\x18\x12 \x01(\tH\x11\x88\x01\x01\x12\x15\n\x08run_mode\x18\x13 \x01(\tH\x12\x88\x01\x01\x42\x0b\n\t_platformB\t\n\x07_scriptB\x0e\n\x0c_init_scriptB\r\n\x0b_env_scriptB\r\n\x0b_err_scriptB\x0e\n\x0c_exit_scriptB\r\n\x0b_pre_scriptB\x0e\n\x0c_post_scriptB\x0f\n\r_work_sub_dirB\x1e\n\x1c_execution_polling_intervalsB\x19\n\x17_execution_retry_delaysB\x17\n\x15_execution_time_limitB\x1f\n\x1d_submission_polling_intervalsB\x1a\n\x18_submission_retry_delaysB\r\n\x0b_directivesB\x0e\n\x0c_environmentB\n\n\x08_outputsB\r\n\x0b_completionB\x0b\n\t_run_mode\"\x9d\x05\n\x05PbJob\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x17\n\nsubmit_num\x18\x03 \x01(\x05H\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\ntask_proxy\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x1b\n\x0esubmitted_time\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x19\n\x0cstarted_time\x18\x07 \x01(\tH\x06\x88\x01\x01\x12\x1a\n\rfinished_time\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x06job_id\x18\t \x01(\tH\x08\x88\x01\x01\x12\x1c\n\x0fjob_runner_name\x18\n \x01(\tH\t\x88\x01\x01\x12!\n\x14\x65xecution_time_limit\x18\x0e \x01(\x02H\n\x88\x01\x01\x12\x15\n\x08platform\x18\x0f \x01(\tH\x0b\x88\x01\x01\x12\x18\n\x0bjob_log_dir\x18\x11 \x01(\tH\x0c\x88\x01\x01\x12\x11\n\x04name\x18\x1e \x01(\tH\r\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x1f \x01(\tH\x0e\x88\x01\x01\x12\x10\n\x08messages\x18 \x03(\t\x12 \n\x07runtime\x18! \x01(\x0b\x32\n.PbRuntimeH\x0f\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\r\n\x0b_submit_numB\x08\n\x06_stateB\r\n\x0b_task_proxyB\x11\n\x0f_submitted_timeB\x0f\n\r_started_timeB\x10\n\x0e_finished_timeB\t\n\x07_job_idB\x12\n\x10_job_runner_nameB\x17\n\x15_execution_time_limitB\x0b\n\t_platformB\x0e\n\x0c_job_log_dirB\x07\n\x05_nameB\x0e\n\x0c_cycle_pointB\n\n\x08_runtimeJ\x04\x08\x1d\x10\x1e\"\xe2\x02\n\x06PbTask\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x1e\n\x11mean_elapsed_time\x18\x05 \x01(\x02H\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x0f\n\x07proxies\x18\x07 \x03(\t\x12\x11\n\tnamespace\x18\x08 \x03(\t\x12\x0f\n\x07parents\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x06\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x07\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x14\n\x12_mean_elapsed_timeB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xd8\x01\n\nPbPollTask\x12\x18\n\x0blocal_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08workflow\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x19\n\x0cremote_proxy\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\treq_state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x19\n\x0cgraph_string\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x0e\n\x0c_local_proxyB\x0b\n\t_workflowB\x0f\n\r_remote_proxyB\x0c\n\n_req_stateB\x0f\n\r_graph_string\"\xcb\x01\n\x0bPbCondition\x12\x17\n\ntask_proxy\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x17\n\nexpr_alias\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\treq_state\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x14\n\x07message\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\r\n\x0b_task_proxyB\r\n\x0b_expr_aliasB\x0c\n\n_req_stateB\x0c\n\n_satisfiedB\n\n\x08_message\"\x96\x01\n\x0ePbPrerequisite\x12\x17\n\nexpression\x18\x01 \x01(\tH\x00\x88\x01\x01\x12 \n\nconditions\x18\x02 \x03(\x0b\x32\x0c.PbCondition\x12\x14\n\x0c\x63ycle_points\x18\x03 \x03(\t\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\r\n\x0b_expressionB\x0c\n\n_satisfied\"\x8c\x01\n\x08PbOutput\x12\x12\n\x05label\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x14\n\x07message\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x16\n\tsatisfied\x18\x03 \x01(\x08H\x02\x88\x01\x01\x12\x11\n\x04time\x18\x04 \x01(\x01H\x03\x88\x01\x01\x42\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\xa5\x01\n\tPbTrigger\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05label\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x14\n\x07message\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tsatisfied\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x11\n\x04time\x18\x05 \x01(\x01H\x04\x88\x01\x01\x42\x05\n\x03_idB\x08\n\x06_labelB\n\n\x08_messageB\x0c\n\n_satisfiedB\x07\n\x05_time\"\x91\x08\n\x0bPbTaskProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04task\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x12\n\x05state\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x06 \x01(\x05H\x05\x88\x01\x01\x12\x18\n\x0bjob_submits\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12*\n\x07outputs\x18\t \x03(\x0b\x32\x19.PbTaskProxy.OutputsEntry\x12\x11\n\tnamespace\x18\x0b \x03(\t\x12&\n\rprerequisites\x18\x0c \x03(\x0b\x32\x0f.PbPrerequisite\x12\x0c\n\x04jobs\x18\r \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\x0f \x01(\tH\x07\x88\x01\x01\x12\x11\n\x04name\x18\x10 \x01(\tH\x08\x88\x01\x01\x12\x14\n\x07is_held\x18\x11 \x01(\x08H\t\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x12 \x03(\t\x12\x11\n\tancestors\x18\x13 \x03(\t\x12\x16\n\tflow_nums\x18\x14 \x01(\tH\n\x88\x01\x01\x12=\n\x11\x65xternal_triggers\x18\x17 \x03(\x0b\x32\".PbTaskProxy.ExternalTriggersEntry\x12.\n\txtriggers\x18\x18 \x03(\x0b\x32\x1b.PbTaskProxy.XtriggersEntry\x12\x16\n\tis_queued\x18\x19 \x01(\x08H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x1a \x01(\x08H\x0c\x88\x01\x01\x12\x16\n\tflow_wait\x18\x1b \x01(\x08H\r\x88\x01\x01\x12 \n\x07runtime\x18\x1c \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x1d \x01(\x05H\x0f\x88\x01\x01\x1a\x39\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x18\n\x05value\x18\x02 \x01(\x0b\x32\t.PbOutput:\x02\x38\x01\x1a\x43\n\x15\x45xternalTriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x1a<\n\x0eXtriggersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.PbTrigger:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_taskB\x08\n\x06_stateB\x0e\n\x0c_cycle_pointB\x08\n\x06_depthB\x0e\n\x0c_job_submitsB\x0f\n\r_first_parentB\x07\n\x05_nameB\n\n\x08_is_heldB\x0c\n\n_flow_numsB\x0c\n\n_is_queuedB\x0e\n\x0c_is_runaheadB\x0c\n\n_flow_waitB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xc8\x02\n\x08PbFamily\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x1a\n\x04meta\x18\x04 \x01(\x0b\x32\x07.PbMetaH\x03\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x05 \x01(\x05H\x04\x88\x01\x01\x12\x0f\n\x07proxies\x18\x06 \x03(\t\x12\x0f\n\x07parents\x18\x07 \x03(\t\x12\x13\n\x0b\x63hild_tasks\x18\x08 \x03(\t\x12\x16\n\x0e\x63hild_families\x18\t \x03(\t\x12\x19\n\x0c\x66irst_parent\x18\n \x01(\tH\x05\x88\x01\x01\x12 \n\x07runtime\x18\x0b \x01(\x0b\x32\n.PbRuntimeH\x06\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x07\n\x05_nameB\x07\n\x05_metaB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_runtime\"\xae\x06\n\rPbFamilyProxy\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0b\x63ycle_point\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x11\n\x04name\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x13\n\x06\x66\x61mily\x18\x05 \x01(\tH\x04\x88\x01\x01\x12\x12\n\x05state\x18\x06 \x01(\tH\x05\x88\x01\x01\x12\x12\n\x05\x64\x65pth\x18\x07 \x01(\x05H\x06\x88\x01\x01\x12\x19\n\x0c\x66irst_parent\x18\x08 \x01(\tH\x07\x88\x01\x01\x12\x13\n\x0b\x63hild_tasks\x18\n \x03(\t\x12\x16\n\x0e\x63hild_families\x18\x0b \x03(\t\x12\x14\n\x07is_held\x18\x0c \x01(\x08H\x08\x88\x01\x01\x12\x11\n\tancestors\x18\r \x03(\t\x12\x0e\n\x06states\x18\x0e \x03(\t\x12\x35\n\x0cstate_totals\x18\x0f \x03(\x0b\x32\x1f.PbFamilyProxy.StateTotalsEntry\x12\x1a\n\ris_held_total\x18\x10 \x01(\x05H\t\x88\x01\x01\x12\x16\n\tis_queued\x18\x11 \x01(\x08H\n\x88\x01\x01\x12\x1c\n\x0fis_queued_total\x18\x12 \x01(\x05H\x0b\x88\x01\x01\x12\x18\n\x0bis_runahead\x18\x13 \x01(\x08H\x0c\x88\x01\x01\x12\x1e\n\x11is_runahead_total\x18\x14 \x01(\x05H\r\x88\x01\x01\x12 \n\x07runtime\x18\x15 \x01(\x0b\x32\n.PbRuntimeH\x0e\x88\x01\x01\x12\x18\n\x0bgraph_depth\x18\x16 \x01(\x05H\x0f\x88\x01\x01\x1a\x32\n\x10StateTotalsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\x0e\n\x0c_cycle_pointB\x07\n\x05_nameB\t\n\x07_familyB\x08\n\x06_stateB\x08\n\x06_depthB\x0f\n\r_first_parentB\n\n\x08_is_heldB\x10\n\x0e_is_held_totalB\x0c\n\n_is_queuedB\x12\n\x10_is_queued_totalB\x0e\n\x0c_is_runaheadB\x14\n\x12_is_runahead_totalB\n\n\x08_runtimeB\x0e\n\x0c_graph_depth\"\xbc\x01\n\x06PbEdge\x12\x12\n\x05stamp\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x02id\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x13\n\x06source\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x13\n\x06target\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x14\n\x07suicide\x18\x05 \x01(\x08H\x04\x88\x01\x01\x12\x11\n\x04\x63ond\x18\x06 \x01(\x08H\x05\x88\x01\x01\x42\x08\n\x06_stampB\x05\n\x03_idB\t\n\x07_sourceB\t\n\x07_targetB\n\n\x08_suicideB\x07\n\x05_cond\"{\n\x07PbEdges\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\r\n\x05\x65\x64ges\x18\x02 \x03(\t\x12+\n\x16workflow_polling_tasks\x18\x03 \x03(\x0b\x32\x0b.PbPollTask\x12\x0e\n\x06leaves\x18\x04 \x03(\t\x12\x0c\n\x04\x66\x65\x65t\x18\x05 \x03(\tB\x05\n\x03_id\"\xf2\x01\n\x10PbEntireWorkflow\x12\"\n\x08workflow\x18\x01 \x01(\x0b\x32\x0b.PbWorkflowH\x00\x88\x01\x01\x12\x16\n\x05tasks\x18\x02 \x03(\x0b\x32\x07.PbTask\x12\"\n\x0ctask_proxies\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x14\n\x04jobs\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x1b\n\x08\x66\x61milies\x18\x05 \x03(\x0b\x32\t.PbFamily\x12&\n\x0e\x66\x61mily_proxies\x18\x06 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x16\n\x05\x65\x64ges\x18\x07 \x03(\x0b\x32\x07.PbEdgeB\x0b\n\t_workflow\"\xaf\x01\n\x07\x45\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbEdge\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbEdge\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xb3\x01\n\x07\x46\x44\x65ltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x18\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\t.PbFamily\x12\x1a\n\x07updated\x18\x04 \x03(\x0b\x32\t.PbFamily\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xbe\x01\n\x08\x46PDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1d\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x1f\n\x07updated\x18\x04 \x03(\x0b\x32\x0e.PbFamilyProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xad\x01\n\x07JDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x15\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x06.PbJob\x12\x17\n\x07updated\x18\x04 \x03(\x0b\x32\x06.PbJob\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xaf\x01\n\x07TDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x16\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x07.PbTask\x12\x18\n\x07updated\x18\x04 \x03(\x0b\x32\x07.PbTask\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xba\x01\n\x08TPDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\x08\x63hecksum\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1b\n\x05\x61\x64\x64\x65\x64\x18\x03 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x1d\n\x07updated\x18\x04 \x03(\x0b\x32\x0c.PbTaskProxy\x12\x0e\n\x06pruned\x18\x05 \x03(\t\x12\x15\n\x08reloaded\x18\x06 \x01(\x08H\x02\x88\x01\x01\x42\x07\n\x05_timeB\x0b\n\t_checksumB\x0b\n\t_reloaded\"\xc3\x01\n\x07WDeltas\x12\x11\n\x04time\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x1f\n\x05\x61\x64\x64\x65\x64\x18\x02 \x01(\x0b\x32\x0b.PbWorkflowH\x01\x88\x01\x01\x12!\n\x07updated\x18\x03 \x01(\x0b\x32\x0b.PbWorkflowH\x02\x88\x01\x01\x12\x15\n\x08reloaded\x18\x04 \x01(\x08H\x03\x88\x01\x01\x12\x13\n\x06pruned\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x07\n\x05_timeB\x08\n\x06_addedB\n\n\x08_updatedB\x0b\n\t_reloadedB\t\n\x07_pruned\"\xd1\x01\n\tAllDeltas\x12\x1a\n\x08\x66\x61milies\x18\x01 \x01(\x0b\x32\x08.FDeltas\x12!\n\x0e\x66\x61mily_proxies\x18\x02 \x01(\x0b\x32\t.FPDeltas\x12\x16\n\x04jobs\x18\x03 \x01(\x0b\x32\x08.JDeltas\x12\x17\n\x05tasks\x18\x04 \x01(\x0b\x32\x08.TDeltas\x12\x1f\n\x0ctask_proxies\x18\x05 \x01(\x0b\x32\t.TPDeltas\x12\x17\n\x05\x65\x64ges\x18\x06 \x01(\x0b\x32\x08.EDeltas\x12\x1a\n\x08workflow\x18\x07 \x01(\x0b\x32\x08.WDeltasb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -46,55 +46,55 @@ _globals['_PBWORKFLOW_LATESTSTATETASKSENTRY']._serialized_start=1493 _globals['_PBWORKFLOW_LATESTSTATETASKSENTRY']._serialized_end=1566 _globals['_PBRUNTIME']._serialized_start=2014 - _globals['_PBRUNTIME']._serialized_end=2879 - _globals['_PBJOB']._serialized_start=2882 - _globals['_PBJOB']._serialized_end=3551 - _globals['_PBTASK']._serialized_start=3554 - _globals['_PBTASK']._serialized_end=3908 - _globals['_PBPOLLTASK']._serialized_start=3911 - _globals['_PBPOLLTASK']._serialized_end=4127 - _globals['_PBCONDITION']._serialized_start=4130 - _globals['_PBCONDITION']._serialized_end=4333 - _globals['_PBPREREQUISITE']._serialized_start=4336 - _globals['_PBPREREQUISITE']._serialized_end=4486 - _globals['_PBOUTPUT']._serialized_start=4489 - _globals['_PBOUTPUT']._serialized_end=4629 - _globals['_PBTRIGGER']._serialized_start=4632 - _globals['_PBTRIGGER']._serialized_end=4797 - _globals['_PBTASKPROXY']._serialized_start=4800 - _globals['_PBTASKPROXY']._serialized_end=5841 - _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_start=5451 - _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_end=5508 - _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_start=5510 - _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_end=5577 - _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_start=5579 - _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_end=5639 - _globals['_PBFAMILY']._serialized_start=5844 - _globals['_PBFAMILY']._serialized_end=6172 - _globals['_PBFAMILYPROXY']._serialized_start=6175 - _globals['_PBFAMILYPROXY']._serialized_end=6989 + _globals['_PBRUNTIME']._serialized_end=2915 + _globals['_PBJOB']._serialized_start=2918 + _globals['_PBJOB']._serialized_end=3587 + _globals['_PBTASK']._serialized_start=3590 + _globals['_PBTASK']._serialized_end=3944 + _globals['_PBPOLLTASK']._serialized_start=3947 + _globals['_PBPOLLTASK']._serialized_end=4163 + _globals['_PBCONDITION']._serialized_start=4166 + _globals['_PBCONDITION']._serialized_end=4369 + _globals['_PBPREREQUISITE']._serialized_start=4372 + _globals['_PBPREREQUISITE']._serialized_end=4522 + _globals['_PBOUTPUT']._serialized_start=4525 + _globals['_PBOUTPUT']._serialized_end=4665 + _globals['_PBTRIGGER']._serialized_start=4668 + _globals['_PBTRIGGER']._serialized_end=4833 + _globals['_PBTASKPROXY']._serialized_start=4836 + _globals['_PBTASKPROXY']._serialized_end=5877 + _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_start=5487 + _globals['_PBTASKPROXY_OUTPUTSENTRY']._serialized_end=5544 + _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_start=5546 + _globals['_PBTASKPROXY_EXTERNALTRIGGERSENTRY']._serialized_end=5613 + _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_start=5615 + _globals['_PBTASKPROXY_XTRIGGERSENTRY']._serialized_end=5675 + _globals['_PBFAMILY']._serialized_start=5880 + _globals['_PBFAMILY']._serialized_end=6208 + _globals['_PBFAMILYPROXY']._serialized_start=6211 + _globals['_PBFAMILYPROXY']._serialized_end=7025 _globals['_PBFAMILYPROXY_STATETOTALSENTRY']._serialized_start=1441 _globals['_PBFAMILYPROXY_STATETOTALSENTRY']._serialized_end=1491 - _globals['_PBEDGE']._serialized_start=6992 - _globals['_PBEDGE']._serialized_end=7180 - _globals['_PBEDGES']._serialized_start=7182 - _globals['_PBEDGES']._serialized_end=7305 - _globals['_PBENTIREWORKFLOW']._serialized_start=7308 - _globals['_PBENTIREWORKFLOW']._serialized_end=7550 - _globals['_EDELTAS']._serialized_start=7553 - _globals['_EDELTAS']._serialized_end=7728 - _globals['_FDELTAS']._serialized_start=7731 - _globals['_FDELTAS']._serialized_end=7910 - _globals['_FPDELTAS']._serialized_start=7913 - _globals['_FPDELTAS']._serialized_end=8103 - _globals['_JDELTAS']._serialized_start=8106 - _globals['_JDELTAS']._serialized_end=8279 - _globals['_TDELTAS']._serialized_start=8282 - _globals['_TDELTAS']._serialized_end=8457 - _globals['_TPDELTAS']._serialized_start=8460 - _globals['_TPDELTAS']._serialized_end=8646 - _globals['_WDELTAS']._serialized_start=8649 - _globals['_WDELTAS']._serialized_end=8844 - _globals['_ALLDELTAS']._serialized_start=8847 - _globals['_ALLDELTAS']._serialized_end=9056 + _globals['_PBEDGE']._serialized_start=7028 + _globals['_PBEDGE']._serialized_end=7216 + _globals['_PBEDGES']._serialized_start=7218 + _globals['_PBEDGES']._serialized_end=7341 + _globals['_PBENTIREWORKFLOW']._serialized_start=7344 + _globals['_PBENTIREWORKFLOW']._serialized_end=7586 + _globals['_EDELTAS']._serialized_start=7589 + _globals['_EDELTAS']._serialized_end=7764 + _globals['_FDELTAS']._serialized_start=7767 + _globals['_FDELTAS']._serialized_end=7946 + _globals['_FPDELTAS']._serialized_start=7949 + _globals['_FPDELTAS']._serialized_end=8139 + _globals['_JDELTAS']._serialized_start=8142 + _globals['_JDELTAS']._serialized_end=8315 + _globals['_TDELTAS']._serialized_start=8318 + _globals['_TDELTAS']._serialized_end=8493 + _globals['_TPDELTAS']._serialized_start=8496 + _globals['_TPDELTAS']._serialized_end=8682 + _globals['_WDELTAS']._serialized_start=8685 + _globals['_WDELTAS']._serialized_end=8880 + _globals['_ALLDELTAS']._serialized_start=8883 + _globals['_ALLDELTAS']._serialized_end=9092 # @@protoc_insertion_point(module_scope) diff --git a/cylc/flow/data_messages_pb2.pyi b/cylc/flow/data_messages_pb2.pyi index 4e96c6ed2da..8c80f7f8f10 100644 --- a/cylc/flow/data_messages_pb2.pyi +++ b/cylc/flow/data_messages_pb2.pyi @@ -6,7 +6,7 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map DESCRIPTOR: _descriptor.FileDescriptor class PbMeta(_message.Message): - __slots__ = ["title", "description", "URL", "user_defined"] + __slots__ = ("title", "description", "URL", "user_defined") TITLE_FIELD_NUMBER: _ClassVar[int] DESCRIPTION_FIELD_NUMBER: _ClassVar[int] URL_FIELD_NUMBER: _ClassVar[int] @@ -18,7 +18,7 @@ class PbMeta(_message.Message): def __init__(self, title: _Optional[str] = ..., description: _Optional[str] = ..., URL: _Optional[str] = ..., user_defined: _Optional[str] = ...) -> None: ... class PbTimeZone(_message.Message): - __slots__ = ["hours", "minutes", "string_basic", "string_extended"] + __slots__ = ("hours", "minutes", "string_basic", "string_extended") HOURS_FIELD_NUMBER: _ClassVar[int] MINUTES_FIELD_NUMBER: _ClassVar[int] STRING_BASIC_FIELD_NUMBER: _ClassVar[int] @@ -30,22 +30,22 @@ class PbTimeZone(_message.Message): def __init__(self, hours: _Optional[int] = ..., minutes: _Optional[int] = ..., string_basic: _Optional[str] = ..., string_extended: _Optional[str] = ...) -> None: ... class PbTaskProxyRefs(_message.Message): - __slots__ = ["task_proxies"] + __slots__ = ("task_proxies",) TASK_PROXIES_FIELD_NUMBER: _ClassVar[int] task_proxies: _containers.RepeatedScalarFieldContainer[str] def __init__(self, task_proxies: _Optional[_Iterable[str]] = ...) -> None: ... class PbWorkflow(_message.Message): - __slots__ = ["stamp", "id", "name", "status", "host", "port", "owner", "tasks", "families", "edges", "api_version", "cylc_version", "last_updated", "meta", "newest_active_cycle_point", "oldest_active_cycle_point", "reloaded", "run_mode", "cycling_mode", "state_totals", "workflow_log_dir", "time_zone_info", "tree_depth", "job_log_names", "ns_def_order", "states", "task_proxies", "family_proxies", "status_msg", "is_held_total", "jobs", "pub_port", "broadcasts", "is_queued_total", "latest_state_tasks", "pruned", "is_runahead_total", "states_updated", "n_edge_distance"] + __slots__ = ("stamp", "id", "name", "status", "host", "port", "owner", "tasks", "families", "edges", "api_version", "cylc_version", "last_updated", "meta", "newest_active_cycle_point", "oldest_active_cycle_point", "reloaded", "run_mode", "cycling_mode", "state_totals", "workflow_log_dir", "time_zone_info", "tree_depth", "job_log_names", "ns_def_order", "states", "task_proxies", "family_proxies", "status_msg", "is_held_total", "jobs", "pub_port", "broadcasts", "is_queued_total", "latest_state_tasks", "pruned", "is_runahead_total", "states_updated", "n_edge_distance") class StateTotalsEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str value: int def __init__(self, key: _Optional[str] = ..., value: _Optional[int] = ...) -> None: ... class LatestStateTasksEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str @@ -132,7 +132,7 @@ class PbWorkflow(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., name: _Optional[str] = ..., status: _Optional[str] = ..., host: _Optional[str] = ..., port: _Optional[int] = ..., owner: _Optional[str] = ..., tasks: _Optional[_Iterable[str]] = ..., families: _Optional[_Iterable[str]] = ..., edges: _Optional[_Union[PbEdges, _Mapping]] = ..., api_version: _Optional[int] = ..., cylc_version: _Optional[str] = ..., last_updated: _Optional[float] = ..., meta: _Optional[_Union[PbMeta, _Mapping]] = ..., newest_active_cycle_point: _Optional[str] = ..., oldest_active_cycle_point: _Optional[str] = ..., reloaded: bool = ..., run_mode: _Optional[str] = ..., cycling_mode: _Optional[str] = ..., state_totals: _Optional[_Mapping[str, int]] = ..., workflow_log_dir: _Optional[str] = ..., time_zone_info: _Optional[_Union[PbTimeZone, _Mapping]] = ..., tree_depth: _Optional[int] = ..., job_log_names: _Optional[_Iterable[str]] = ..., ns_def_order: _Optional[_Iterable[str]] = ..., states: _Optional[_Iterable[str]] = ..., task_proxies: _Optional[_Iterable[str]] = ..., family_proxies: _Optional[_Iterable[str]] = ..., status_msg: _Optional[str] = ..., is_held_total: _Optional[int] = ..., jobs: _Optional[_Iterable[str]] = ..., pub_port: _Optional[int] = ..., broadcasts: _Optional[str] = ..., is_queued_total: _Optional[int] = ..., latest_state_tasks: _Optional[_Mapping[str, PbTaskProxyRefs]] = ..., pruned: bool = ..., is_runahead_total: _Optional[int] = ..., states_updated: bool = ..., n_edge_distance: _Optional[int] = ...) -> None: ... class PbRuntime(_message.Message): - __slots__ = ["platform", "script", "init_script", "env_script", "err_script", "exit_script", "pre_script", "post_script", "work_sub_dir", "execution_polling_intervals", "execution_retry_delays", "execution_time_limit", "submission_polling_intervals", "submission_retry_delays", "directives", "environment", "outputs", "completion"] + __slots__ = ("platform", "script", "init_script", "env_script", "err_script", "exit_script", "pre_script", "post_script", "work_sub_dir", "execution_polling_intervals", "execution_retry_delays", "execution_time_limit", "submission_polling_intervals", "submission_retry_delays", "directives", "environment", "outputs", "completion", "run_mode") PLATFORM_FIELD_NUMBER: _ClassVar[int] SCRIPT_FIELD_NUMBER: _ClassVar[int] INIT_SCRIPT_FIELD_NUMBER: _ClassVar[int] @@ -151,6 +151,7 @@ class PbRuntime(_message.Message): ENVIRONMENT_FIELD_NUMBER: _ClassVar[int] OUTPUTS_FIELD_NUMBER: _ClassVar[int] COMPLETION_FIELD_NUMBER: _ClassVar[int] + RUN_MODE_FIELD_NUMBER: _ClassVar[int] platform: str script: str init_script: str @@ -169,10 +170,11 @@ class PbRuntime(_message.Message): environment: str outputs: str completion: str - def __init__(self, platform: _Optional[str] = ..., script: _Optional[str] = ..., init_script: _Optional[str] = ..., env_script: _Optional[str] = ..., err_script: _Optional[str] = ..., exit_script: _Optional[str] = ..., pre_script: _Optional[str] = ..., post_script: _Optional[str] = ..., work_sub_dir: _Optional[str] = ..., execution_polling_intervals: _Optional[str] = ..., execution_retry_delays: _Optional[str] = ..., execution_time_limit: _Optional[str] = ..., submission_polling_intervals: _Optional[str] = ..., submission_retry_delays: _Optional[str] = ..., directives: _Optional[str] = ..., environment: _Optional[str] = ..., outputs: _Optional[str] = ..., completion: _Optional[str] = ...) -> None: ... + run_mode: str + def __init__(self, platform: _Optional[str] = ..., script: _Optional[str] = ..., init_script: _Optional[str] = ..., env_script: _Optional[str] = ..., err_script: _Optional[str] = ..., exit_script: _Optional[str] = ..., pre_script: _Optional[str] = ..., post_script: _Optional[str] = ..., work_sub_dir: _Optional[str] = ..., execution_polling_intervals: _Optional[str] = ..., execution_retry_delays: _Optional[str] = ..., execution_time_limit: _Optional[str] = ..., submission_polling_intervals: _Optional[str] = ..., submission_retry_delays: _Optional[str] = ..., directives: _Optional[str] = ..., environment: _Optional[str] = ..., outputs: _Optional[str] = ..., completion: _Optional[str] = ..., run_mode: _Optional[str] = ...) -> None: ... class PbJob(_message.Message): - __slots__ = ["stamp", "id", "submit_num", "state", "task_proxy", "submitted_time", "started_time", "finished_time", "job_id", "job_runner_name", "execution_time_limit", "platform", "job_log_dir", "name", "cycle_point", "messages", "runtime"] + __slots__ = ("stamp", "id", "submit_num", "state", "task_proxy", "submitted_time", "started_time", "finished_time", "job_id", "job_runner_name", "execution_time_limit", "platform", "job_log_dir", "name", "cycle_point", "messages", "runtime") STAMP_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int] SUBMIT_NUM_FIELD_NUMBER: _ClassVar[int] @@ -210,7 +212,7 @@ class PbJob(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., submit_num: _Optional[int] = ..., state: _Optional[str] = ..., task_proxy: _Optional[str] = ..., submitted_time: _Optional[str] = ..., started_time: _Optional[str] = ..., finished_time: _Optional[str] = ..., job_id: _Optional[str] = ..., job_runner_name: _Optional[str] = ..., execution_time_limit: _Optional[float] = ..., platform: _Optional[str] = ..., job_log_dir: _Optional[str] = ..., name: _Optional[str] = ..., cycle_point: _Optional[str] = ..., messages: _Optional[_Iterable[str]] = ..., runtime: _Optional[_Union[PbRuntime, _Mapping]] = ...) -> None: ... class PbTask(_message.Message): - __slots__ = ["stamp", "id", "name", "meta", "mean_elapsed_time", "depth", "proxies", "namespace", "parents", "first_parent", "runtime"] + __slots__ = ("stamp", "id", "name", "meta", "mean_elapsed_time", "depth", "proxies", "namespace", "parents", "first_parent", "runtime") STAMP_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] @@ -236,7 +238,7 @@ class PbTask(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., name: _Optional[str] = ..., meta: _Optional[_Union[PbMeta, _Mapping]] = ..., mean_elapsed_time: _Optional[float] = ..., depth: _Optional[int] = ..., proxies: _Optional[_Iterable[str]] = ..., namespace: _Optional[_Iterable[str]] = ..., parents: _Optional[_Iterable[str]] = ..., first_parent: _Optional[str] = ..., runtime: _Optional[_Union[PbRuntime, _Mapping]] = ...) -> None: ... class PbPollTask(_message.Message): - __slots__ = ["local_proxy", "workflow", "remote_proxy", "req_state", "graph_string"] + __slots__ = ("local_proxy", "workflow", "remote_proxy", "req_state", "graph_string") LOCAL_PROXY_FIELD_NUMBER: _ClassVar[int] WORKFLOW_FIELD_NUMBER: _ClassVar[int] REMOTE_PROXY_FIELD_NUMBER: _ClassVar[int] @@ -250,7 +252,7 @@ class PbPollTask(_message.Message): def __init__(self, local_proxy: _Optional[str] = ..., workflow: _Optional[str] = ..., remote_proxy: _Optional[str] = ..., req_state: _Optional[str] = ..., graph_string: _Optional[str] = ...) -> None: ... class PbCondition(_message.Message): - __slots__ = ["task_proxy", "expr_alias", "req_state", "satisfied", "message"] + __slots__ = ("task_proxy", "expr_alias", "req_state", "satisfied", "message") TASK_PROXY_FIELD_NUMBER: _ClassVar[int] EXPR_ALIAS_FIELD_NUMBER: _ClassVar[int] REQ_STATE_FIELD_NUMBER: _ClassVar[int] @@ -264,7 +266,7 @@ class PbCondition(_message.Message): def __init__(self, task_proxy: _Optional[str] = ..., expr_alias: _Optional[str] = ..., req_state: _Optional[str] = ..., satisfied: bool = ..., message: _Optional[str] = ...) -> None: ... class PbPrerequisite(_message.Message): - __slots__ = ["expression", "conditions", "cycle_points", "satisfied"] + __slots__ = ("expression", "conditions", "cycle_points", "satisfied") EXPRESSION_FIELD_NUMBER: _ClassVar[int] CONDITIONS_FIELD_NUMBER: _ClassVar[int] CYCLE_POINTS_FIELD_NUMBER: _ClassVar[int] @@ -276,7 +278,7 @@ class PbPrerequisite(_message.Message): def __init__(self, expression: _Optional[str] = ..., conditions: _Optional[_Iterable[_Union[PbCondition, _Mapping]]] = ..., cycle_points: _Optional[_Iterable[str]] = ..., satisfied: bool = ...) -> None: ... class PbOutput(_message.Message): - __slots__ = ["label", "message", "satisfied", "time"] + __slots__ = ("label", "message", "satisfied", "time") LABEL_FIELD_NUMBER: _ClassVar[int] MESSAGE_FIELD_NUMBER: _ClassVar[int] SATISFIED_FIELD_NUMBER: _ClassVar[int] @@ -288,7 +290,7 @@ class PbOutput(_message.Message): def __init__(self, label: _Optional[str] = ..., message: _Optional[str] = ..., satisfied: bool = ..., time: _Optional[float] = ...) -> None: ... class PbTrigger(_message.Message): - __slots__ = ["id", "label", "message", "satisfied", "time"] + __slots__ = ("id", "label", "message", "satisfied", "time") ID_FIELD_NUMBER: _ClassVar[int] LABEL_FIELD_NUMBER: _ClassVar[int] MESSAGE_FIELD_NUMBER: _ClassVar[int] @@ -302,23 +304,23 @@ class PbTrigger(_message.Message): def __init__(self, id: _Optional[str] = ..., label: _Optional[str] = ..., message: _Optional[str] = ..., satisfied: bool = ..., time: _Optional[float] = ...) -> None: ... class PbTaskProxy(_message.Message): - __slots__ = ["stamp", "id", "task", "state", "cycle_point", "depth", "job_submits", "outputs", "namespace", "prerequisites", "jobs", "first_parent", "name", "is_held", "edges", "ancestors", "flow_nums", "external_triggers", "xtriggers", "is_queued", "is_runahead", "flow_wait", "runtime", "graph_depth"] + __slots__ = ("stamp", "id", "task", "state", "cycle_point", "depth", "job_submits", "outputs", "namespace", "prerequisites", "jobs", "first_parent", "name", "is_held", "edges", "ancestors", "flow_nums", "external_triggers", "xtriggers", "is_queued", "is_runahead", "flow_wait", "runtime", "graph_depth") class OutputsEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str value: PbOutput def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[PbOutput, _Mapping]] = ...) -> None: ... class ExternalTriggersEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str value: PbTrigger def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[PbTrigger, _Mapping]] = ...) -> None: ... class XtriggersEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str @@ -375,7 +377,7 @@ class PbTaskProxy(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., task: _Optional[str] = ..., state: _Optional[str] = ..., cycle_point: _Optional[str] = ..., depth: _Optional[int] = ..., job_submits: _Optional[int] = ..., outputs: _Optional[_Mapping[str, PbOutput]] = ..., namespace: _Optional[_Iterable[str]] = ..., prerequisites: _Optional[_Iterable[_Union[PbPrerequisite, _Mapping]]] = ..., jobs: _Optional[_Iterable[str]] = ..., first_parent: _Optional[str] = ..., name: _Optional[str] = ..., is_held: bool = ..., edges: _Optional[_Iterable[str]] = ..., ancestors: _Optional[_Iterable[str]] = ..., flow_nums: _Optional[str] = ..., external_triggers: _Optional[_Mapping[str, PbTrigger]] = ..., xtriggers: _Optional[_Mapping[str, PbTrigger]] = ..., is_queued: bool = ..., is_runahead: bool = ..., flow_wait: bool = ..., runtime: _Optional[_Union[PbRuntime, _Mapping]] = ..., graph_depth: _Optional[int] = ...) -> None: ... class PbFamily(_message.Message): - __slots__ = ["stamp", "id", "name", "meta", "depth", "proxies", "parents", "child_tasks", "child_families", "first_parent", "runtime"] + __slots__ = ("stamp", "id", "name", "meta", "depth", "proxies", "parents", "child_tasks", "child_families", "first_parent", "runtime") STAMP_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] @@ -401,9 +403,9 @@ class PbFamily(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., name: _Optional[str] = ..., meta: _Optional[_Union[PbMeta, _Mapping]] = ..., depth: _Optional[int] = ..., proxies: _Optional[_Iterable[str]] = ..., parents: _Optional[_Iterable[str]] = ..., child_tasks: _Optional[_Iterable[str]] = ..., child_families: _Optional[_Iterable[str]] = ..., first_parent: _Optional[str] = ..., runtime: _Optional[_Union[PbRuntime, _Mapping]] = ...) -> None: ... class PbFamilyProxy(_message.Message): - __slots__ = ["stamp", "id", "cycle_point", "name", "family", "state", "depth", "first_parent", "child_tasks", "child_families", "is_held", "ancestors", "states", "state_totals", "is_held_total", "is_queued", "is_queued_total", "is_runahead", "is_runahead_total", "runtime", "graph_depth"] + __slots__ = ("stamp", "id", "cycle_point", "name", "family", "state", "depth", "first_parent", "child_tasks", "child_families", "is_held", "ancestors", "states", "state_totals", "is_held_total", "is_queued", "is_queued_total", "is_runahead", "is_runahead_total", "runtime", "graph_depth") class StateTotalsEntry(_message.Message): - __slots__ = ["key", "value"] + __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str @@ -454,7 +456,7 @@ class PbFamilyProxy(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., cycle_point: _Optional[str] = ..., name: _Optional[str] = ..., family: _Optional[str] = ..., state: _Optional[str] = ..., depth: _Optional[int] = ..., first_parent: _Optional[str] = ..., child_tasks: _Optional[_Iterable[str]] = ..., child_families: _Optional[_Iterable[str]] = ..., is_held: bool = ..., ancestors: _Optional[_Iterable[str]] = ..., states: _Optional[_Iterable[str]] = ..., state_totals: _Optional[_Mapping[str, int]] = ..., is_held_total: _Optional[int] = ..., is_queued: bool = ..., is_queued_total: _Optional[int] = ..., is_runahead: bool = ..., is_runahead_total: _Optional[int] = ..., runtime: _Optional[_Union[PbRuntime, _Mapping]] = ..., graph_depth: _Optional[int] = ...) -> None: ... class PbEdge(_message.Message): - __slots__ = ["stamp", "id", "source", "target", "suicide", "cond"] + __slots__ = ("stamp", "id", "source", "target", "suicide", "cond") STAMP_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int] SOURCE_FIELD_NUMBER: _ClassVar[int] @@ -470,7 +472,7 @@ class PbEdge(_message.Message): def __init__(self, stamp: _Optional[str] = ..., id: _Optional[str] = ..., source: _Optional[str] = ..., target: _Optional[str] = ..., suicide: bool = ..., cond: bool = ...) -> None: ... class PbEdges(_message.Message): - __slots__ = ["id", "edges", "workflow_polling_tasks", "leaves", "feet"] + __slots__ = ("id", "edges", "workflow_polling_tasks", "leaves", "feet") ID_FIELD_NUMBER: _ClassVar[int] EDGES_FIELD_NUMBER: _ClassVar[int] WORKFLOW_POLLING_TASKS_FIELD_NUMBER: _ClassVar[int] @@ -484,7 +486,7 @@ class PbEdges(_message.Message): def __init__(self, id: _Optional[str] = ..., edges: _Optional[_Iterable[str]] = ..., workflow_polling_tasks: _Optional[_Iterable[_Union[PbPollTask, _Mapping]]] = ..., leaves: _Optional[_Iterable[str]] = ..., feet: _Optional[_Iterable[str]] = ...) -> None: ... class PbEntireWorkflow(_message.Message): - __slots__ = ["workflow", "tasks", "task_proxies", "jobs", "families", "family_proxies", "edges"] + __slots__ = ("workflow", "tasks", "task_proxies", "jobs", "families", "family_proxies", "edges") WORKFLOW_FIELD_NUMBER: _ClassVar[int] TASKS_FIELD_NUMBER: _ClassVar[int] TASK_PROXIES_FIELD_NUMBER: _ClassVar[int] @@ -502,7 +504,7 @@ class PbEntireWorkflow(_message.Message): def __init__(self, workflow: _Optional[_Union[PbWorkflow, _Mapping]] = ..., tasks: _Optional[_Iterable[_Union[PbTask, _Mapping]]] = ..., task_proxies: _Optional[_Iterable[_Union[PbTaskProxy, _Mapping]]] = ..., jobs: _Optional[_Iterable[_Union[PbJob, _Mapping]]] = ..., families: _Optional[_Iterable[_Union[PbFamily, _Mapping]]] = ..., family_proxies: _Optional[_Iterable[_Union[PbFamilyProxy, _Mapping]]] = ..., edges: _Optional[_Iterable[_Union[PbEdge, _Mapping]]] = ...) -> None: ... class EDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -518,7 +520,7 @@ class EDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbEdge, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbEdge, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class FDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -534,7 +536,7 @@ class FDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbFamily, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbFamily, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class FPDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -550,7 +552,7 @@ class FPDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbFamilyProxy, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbFamilyProxy, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class JDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -566,7 +568,7 @@ class JDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbJob, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbJob, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class TDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -582,7 +584,7 @@ class TDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbTask, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbTask, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class TPDeltas(_message.Message): - __slots__ = ["time", "checksum", "added", "updated", "pruned", "reloaded"] + __slots__ = ("time", "checksum", "added", "updated", "pruned", "reloaded") TIME_FIELD_NUMBER: _ClassVar[int] CHECKSUM_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] @@ -598,7 +600,7 @@ class TPDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., checksum: _Optional[int] = ..., added: _Optional[_Iterable[_Union[PbTaskProxy, _Mapping]]] = ..., updated: _Optional[_Iterable[_Union[PbTaskProxy, _Mapping]]] = ..., pruned: _Optional[_Iterable[str]] = ..., reloaded: bool = ...) -> None: ... class WDeltas(_message.Message): - __slots__ = ["time", "added", "updated", "reloaded", "pruned"] + __slots__ = ("time", "added", "updated", "reloaded", "pruned") TIME_FIELD_NUMBER: _ClassVar[int] ADDED_FIELD_NUMBER: _ClassVar[int] UPDATED_FIELD_NUMBER: _ClassVar[int] @@ -612,7 +614,7 @@ class WDeltas(_message.Message): def __init__(self, time: _Optional[float] = ..., added: _Optional[_Union[PbWorkflow, _Mapping]] = ..., updated: _Optional[_Union[PbWorkflow, _Mapping]] = ..., reloaded: bool = ..., pruned: _Optional[str] = ...) -> None: ... class AllDeltas(_message.Message): - __slots__ = ["families", "family_proxies", "jobs", "tasks", "task_proxies", "edges", "workflow"] + __slots__ = ("families", "family_proxies", "jobs", "tasks", "task_proxies", "edges", "workflow") FAMILIES_FIELD_NUMBER: _ClassVar[int] FAMILY_PROXIES_FIELD_NUMBER: _ClassVar[int] JOBS_FIELD_NUMBER: _ClassVar[int] diff --git a/cylc/flow/data_store_mgr.py b/cylc/flow/data_store_mgr.py index a4b28d44fdc..a9eee3f175c 100644 --- a/cylc/flow/data_store_mgr.py +++ b/cylc/flow/data_store_mgr.py @@ -260,6 +260,7 @@ def runtime_from_config(rtconfig): pre_script=rtconfig['pre-script'], post_script=rtconfig['post-script'], work_sub_dir=rtconfig['work sub-directory'], + run_mode=rtconfig['run mode'], execution_time_limit=str(rtconfig['execution time limit'] or ''), execution_polling_intervals=listjoin( rtconfig['execution polling intervals'] diff --git a/cylc/flow/network/schema.py b/cylc/flow/network/schema.py index ab34def7f75..97886545b37 100644 --- a/cylc/flow/network/schema.py +++ b/cylc/flow/network/schema.py @@ -51,6 +51,7 @@ from cylc.flow.id import Tokens from cylc.flow.task_outputs import SORT_ORDERS from cylc.flow.task_state import ( + RunMode, TASK_STATUSES_ORDERED, TASK_STATUS_DESC, TASK_STATUS_WAITING, @@ -66,6 +67,7 @@ from cylc.flow.workflow_status import StopMode if TYPE_CHECKING: + from enum import Enum from graphql import ResolveInfo from graphql.type.definition import ( GraphQLNamedType, @@ -596,6 +598,29 @@ class Meta: string_extended = String() +def describe_run_mode(run_mode: Optional['Enum']) -> str: + """Returns description for a workflow/task run mode.""" + if not run_mode: + return "" + return getattr(RunMode, run_mode.value.upper()).__doc__ + + +WorkflowRunMode = graphene.Enum( + 'WorkflowRunMode', + [(m.capitalize(), m) for m in RunMode.WORKFLOW_MODES.value], + description=describe_run_mode, +) +"""The run mode for the workflow.""" + + +TaskRunMode = graphene.Enum( + 'TaskRunMode', + [(m.capitalize(), m) for m in RunMode.WORKFLOW_MODES.value], + description=describe_run_mode, +) +"""The run mode for tasks.""" + + class Workflow(ObjectType): class Meta: description = 'Global workflow info.' @@ -823,6 +848,7 @@ class Meta: directives = graphene.List(RuntimeSetting, resolver=resolve_json_dump) environment = graphene.List(RuntimeSetting, resolver=resolve_json_dump) outputs = graphene.List(RuntimeSetting, resolver=resolve_json_dump) + run_mode = TaskRunMode(default_value=TaskRunMode.Live.name) RUNTIME_FIELD_TO_CFG_MAP = { @@ -1503,9 +1529,9 @@ class RuntimeConfiguration(String): class BroadcastMode(graphene.Enum): - Set = 'put_broadcast' - Clear = 'clear_broadcast' - Expire = 'expire_broadcast' + Set = cast('Enum', 'put_broadcast') + Clear = cast('Enum', 'clear_broadcast') + Expire = cast('Enum', 'expire_broadcast') @property def description(self): @@ -1630,10 +1656,10 @@ class WorkflowStopMode(graphene.Enum): # * Graphene requires special enums. # * We only want to offer a subset of stop modes (REQUEST_* only). - Clean = StopMode.REQUEST_CLEAN.value # type: graphene.Enum - Kill = StopMode.REQUEST_KILL.value # type: graphene.Enum - Now = StopMode.REQUEST_NOW.value # type: graphene.Enum - NowNow = StopMode.REQUEST_NOW_NOW.value # type: graphene.Enum + Clean = cast('Enum', StopMode.REQUEST_CLEAN.value) + Kill = cast('Enum', StopMode.REQUEST_KILL.value) + Now = cast('Enum', StopMode.REQUEST_NOW.value) + NowNow = cast('Enum', StopMode.REQUEST_NOW_NOW.value) @property def description(self): @@ -1690,7 +1716,7 @@ class Arguments: mode = BroadcastMode( # use the enum name as the default value # https://github.com/graphql-python/graphql-core-legacy/issues/166 - default_value=BroadcastMode.Set.name, # type: ignore + default_value=BroadcastMode.Set.name, description='What type of broadcast is this?', required=True ) diff --git a/cylc/flow/platforms.py b/cylc/flow/platforms.py index d06c84ade92..fa49e598ec2 100644 --- a/cylc/flow/platforms.py +++ b/cylc/flow/platforms.py @@ -31,6 +31,7 @@ PlatformLookupError, CylcError, NoHostsError, NoPlatformsError) from cylc.flow.cfgspec.glbl_cfg import glbl_cfg from cylc.flow.hostuserutil import is_remote_host +from cylc.flow.task_state import RunMode if TYPE_CHECKING: from cylc.flow.parsec.OrderedDict import OrderedDictWithDefaults @@ -265,6 +266,10 @@ def platform_from_name( platform_data['name'] = platform_name return platform_data + # If platform name in run mode and not otherwise defined: + if platform_name in RunMode.JOBLESS_MODES.value: + return platforms['localhost'] + raise PlatformLookupError( f"No matching platform \"{platform_name}\" found") @@ -647,7 +652,7 @@ def get_install_target_to_platforms_map( Return {install_target_1: [platform_1_dict, platform_2_dict, ...], ...} """ ret: Dict[str, List[Dict[str, Any]]] = {} - for p_name in set(platform_names): + for p_name in set(platform_names) - set(RunMode.JOBLESS_MODES.value): try: platform = platform_from_name(p_name) except PlatformLookupError as exc: @@ -656,6 +661,14 @@ def get_install_target_to_platforms_map( else: install_target = get_install_target_from_platform(platform) ret.setdefault(install_target, []).append(platform) + + # Map jobless modes to localhost. + if 'localhost' in ret: + ret['localhost'] += [ + {'name': mode} for mode in RunMode.JOBLESS_MODES.value] + else: + ret['localhost'] = [ + {'name': mode} for mode in RunMode.JOBLESS_MODES.value] return ret diff --git a/cylc/flow/prerequisite.py b/cylc/flow/prerequisite.py index 486c7e84ab3..04ea4596c09 100644 --- a/cylc/flow/prerequisite.py +++ b/cylc/flow/prerequisite.py @@ -72,6 +72,8 @@ def coerce(tuple_: AnyPrereqMessage) -> 'PrereqMessage': SatisfiedState = Literal[ 'satisfied naturally', 'satisfied from database', + 'satisfied by skip mode', + 'satisfied by simulation mode', 'force satisfied', False ] @@ -101,6 +103,12 @@ class Prerequisite: SATISFIED_TEMPLATE = 'bool(self._satisfied[("%s", "%s", "%s")])' MESSAGE_TEMPLATE = r'%s/%s %s' + DEP_STATE_SATISFIED: SatisfiedState = 'satisfied naturally' + DEP_STATE_SATISFIED_BY = 'satisfied by {} mode' + DEP_STATE_OVERRIDDEN = 'force satisfied' + DEP_STATE_UNSATISFIED = False + SATISFIED_MODE_RE = re.compile(r'satisfied by .* mode') + def __init__(self, point: 'PointBase'): # The cycle point to which this prerequisite belongs. # cylc.flow.cycling.PointBase @@ -253,13 +261,22 @@ def _eval_satisfied(self) -> bool: ) from None return res - def satisfy_me(self, outputs: Iterable['Tokens']) -> 'Set[Tokens]': + def satisfy_me( + self, outputs: Iterable['Tokens'], + mode: Literal['skip', 'live', 'simulation', 'skip'] = 'live' + ) -> 'Set[Tokens]': """Attempt to satisfy me with given outputs. Updates cache with the result. Return outputs that match. """ + satisfied_message: SatisfiedState + if mode != 'live': + satisfied_message = self.DEP_STATE_SATISFIED_BY.format( + mode) # type: ignore + else: + satisfied_message = self.DEP_STATE_SATISFIED valid = set() for output in outputs: prereq = PrereqMessage( @@ -268,7 +285,7 @@ def satisfy_me(self, outputs: Iterable['Tokens']) -> 'Set[Tokens]': if prereq not in self._satisfied: continue valid.add(output) - self[prereq] = 'satisfied naturally' + self[prereq] = satisfied_message return valid def api_dump(self) -> Optional[PbPrerequisite]: diff --git a/cylc/flow/run_modes/dummy.py b/cylc/flow/run_modes/dummy.py new file mode 100644 index 00000000000..91935ee5c3b --- /dev/null +++ b/cylc/flow/run_modes/dummy.py @@ -0,0 +1,125 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Utilities supporting dummy mode. + +Dummy mode shares settings with simulation mode. +""" + +from typing import TYPE_CHECKING, Any, Dict, Tuple + +from cylc.flow.run_modes.simulation import ( + ModeSettings, + disable_platforms, + get_simulated_run_len, + parse_fail_cycle_points +) +from cylc.flow.task_state import RunMode +from cylc.flow.platforms import get_platform + + +if TYPE_CHECKING: + from cylc.flow.task_job_mgr import TaskJobManager + from cylc.flow.task_proxy import TaskProxy + from typing_extensions import Literal + + +CLEAR_THESE_SCRIPTS = [ + 'init-script', + 'env-script', + 'pre-script', + 'post-script', + 'err-script', + 'exit-script', +] + + +def submit_task_job( + task_job_mgr: 'TaskJobManager', + itask: 'TaskProxy', + rtconfig: Dict[str, Any], + workflow: str, + now: Tuple[float, str] +) -> 'Literal[False]': + """Submit a task in dummy mode. + + Returns: + False indicating that TaskJobManager needs to continue running the + live mode path. + """ + configure_dummy_mode( + rtconfig, itask.tdef.rtconfig['simulation']['fail cycle points']) + + itask.summary['started_time'] = now[0] + task_job_mgr._set_retry_timers(itask, rtconfig) + + itask.mode_settings = ModeSettings( + itask, + task_job_mgr.workflow_db_mgr, + rtconfig + ) + + itask.platform = get_platform() + itask.platform['name'] = RunMode.DUMMY.value + itask.summary['job_runner_name'] = RunMode.DUMMY.value + itask.summary[task_job_mgr.KEY_EXECUTE_TIME_LIMIT] = ( + itask.mode_settings.simulated_run_length) + itask.jobs.append( + task_job_mgr.get_simulation_job_conf(itask, workflow)) + task_job_mgr.workflow_db_mgr.put_insert_task_jobs( + itask, { + 'time_submit': now[1], + 'try_num': itask.get_try_num(), + } + ) + + return False + + +def configure_dummy_mode(rtc: Dict[str, Any], fallback: str) -> None: + """Adjust task defs for dummy mode. + """ + rtc['submission retry delays'] = [1] + # Generate dummy scripting. + + for script in CLEAR_THESE_SCRIPTS: + rtc[script] = '' + + rtc['script'] = build_dummy_script( + rtc, get_simulated_run_len(rtc)) + disable_platforms(rtc) + # Disable environment, in case it depends on env-script. + rtc['environment'] = {} + rtc["simulation"][ + "fail cycle points" + ] = parse_fail_cycle_points( + rtc["simulation"]["fail cycle points"], fallback + ) + + +def build_dummy_script(rtc: Dict[str, Any], sleep_sec: int) -> str: + """Create fake scripting for dummy mode script. + """ + script = "sleep %d" % sleep_sec + # Dummy message outputs. + for msg in rtc['outputs'].values(): + script += "\ncylc message '%s'" % msg + if rtc['simulation']['fail try 1 only']: + arg1 = "true" + else: + arg1 = "false" + arg2 = " ".join(rtc['simulation']['fail cycle points']) + script += "\ncylc__job__dummy_result %s %s || exit 1" % (arg1, arg2) + return script diff --git a/cylc/flow/run_modes/nonlive.py b/cylc/flow/run_modes/nonlive.py new file mode 100644 index 00000000000..5bea9f70be5 --- /dev/null +++ b/cylc/flow/run_modes/nonlive.py @@ -0,0 +1,55 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Utilities supporting all nonlive modes +""" +from typing import TYPE_CHECKING, Dict, List + +from cylc.flow import LOG +from cylc.flow.run_modes.skip import check_task_skip_config +from cylc.flow.task_state import RunMode + +if TYPE_CHECKING: + from cylc.flow.taskdef import TaskDef + + +def run_mode_validate_checks(taskdefs: 'Dict[str, TaskDef]') -> None: + """Warn user if any tasks have "run mode" set to skip. + """ + warn_nonlive: Dict[str, List[str]] = { + RunMode.SKIP.value: [], + } + + # Run through taskdefs looking for those with nonlive modes + for taskdef in taskdefs.values(): + # Add to list of tasks to be run in non-live modes: + if ( + taskdef.rtconfig.get('run mode', None) + in { + RunMode.SIMULATION.value, + RunMode.SKIP.value, + RunMode.DUMMY.value + } + ): + warn_nonlive[taskdef.rtconfig['run mode']].append(taskdef.name) + + # Run any mode specific validation checks: + check_task_skip_config(taskdef) + + if any(warn_nonlive.values()): + message = 'The following tasks are set to run in skip mode:' + for taskname in warn_nonlive[RunMode.SKIP.value]: + message += f'\n * {taskname}' + LOG.warning(message) diff --git a/cylc/flow/simulation.py b/cylc/flow/run_modes/simulation.py similarity index 62% rename from cylc/flow/simulation.py rename to cylc/flow/run_modes/simulation.py index 8ec4d279cb9..122277bcf4c 100644 --- a/cylc/flow/simulation.py +++ b/cylc/flow/run_modes/simulation.py @@ -13,40 +13,100 @@ # # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Utilities supporting simulation and skip modes +"""Utilities supporting simulation mode """ from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from logging import INFO +from typing import ( + TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union) from time import time from metomi.isodatetime.parsers import DurationParser from cylc.flow import LOG +from cylc.flow.cycling import PointBase from cylc.flow.cycling.loader import get_point from cylc.flow.exceptions import PointParsingError from cylc.flow.platforms import FORBIDDEN_WITH_PLATFORM +from cylc.flow.task_outputs import TASK_OUTPUT_SUBMITTED from cylc.flow.task_state import ( TASK_STATUS_RUNNING, TASK_STATUS_FAILED, TASK_STATUS_SUCCEEDED, ) from cylc.flow.wallclock import get_unix_time_from_time_string -from cylc.flow.workflow_status import RunMode +from cylc.flow.task_state import RunMode if TYPE_CHECKING: from cylc.flow.task_events_mgr import TaskEventsManager + from cylc.flow.task_job_mgr import TaskJobManager from cylc.flow.task_proxy import TaskProxy from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager - from cylc.flow.cycling import PointBase + from typing_extensions import Literal + + +def submit_task_job( + task_job_mgr: 'TaskJobManager', + itask: 'TaskProxy', + rtconfig: Dict[str, Any], + workflow: str, + now: Tuple[float, str] +) -> 'Literal[True]': + """Submit a task in simulation mode. + + Returns: + True - indicating that TaskJobManager need take no further action. + """ + configure_sim_mode( + rtconfig, + itask.tdef.rtconfig['simulation']['fail cycle points']) + itask.summary['started_time'] = now[0] + task_job_mgr._set_retry_timers(itask, rtconfig) + itask.mode_settings = ModeSettings( + itask, + task_job_mgr.workflow_db_mgr, + rtconfig + ) + itask.waiting_on_job_prep = False + itask.submit_num += 1 + + itask.platform = { + 'name': RunMode.SIMULATION.value, 'install target': 'localhost'} + itask.platform['name'] = RunMode.SIMULATION.value + itask.summary['job_runner_name'] = RunMode.SIMULATION.value + itask.summary[task_job_mgr.KEY_EXECUTE_TIME_LIMIT] = ( + itask.mode_settings.simulated_run_length + ) + itask.jobs.append( + task_job_mgr.get_simulation_job_conf(itask, workflow) + ) + task_job_mgr.task_events_mgr.process_message( + itask, INFO, TASK_OUTPUT_SUBMITTED, + ) + task_job_mgr.workflow_db_mgr.put_insert_task_jobs( + itask, { + 'time_submit': now[1], + 'try_num': itask.get_try_num(), + 'flow_nums': str(list(itask.flow_nums)), + 'is_manual_submit': itask.is_manual_submit, + 'job_runner_name': RunMode.SIMULATION.value, + 'platform_name': RunMode.SIMULATION.value, + 'submit_status': 0 # Submission has succeeded + } + ) + itask.state.status = TASK_STATUS_RUNNING + return True @dataclass class ModeSettings: """A store of state for simulation modes. - Used instead of modifying the runtime config. + Used instead of modifying the runtime config. We want to leave the + config unchanged so that clearing a broadcast change of run mode + clears the run mode settings. Args: itask: @@ -79,20 +139,18 @@ def __init__( db_mgr: 'WorkflowDatabaseManager', rtconfig: Dict[str, Any] ): - # itask.summary['started_time'] and mode_settings.timeout need # repopulating from the DB on workflow restart: started_time = itask.summary['started_time'] try_num = None if started_time is None: - # Get DB info + # This is a restart - Get DB info db_info = db_mgr.pri_dao.select_task_job( itask.tokens['cycle'], itask.tokens['task'], itask.tokens['job'], ) - # Get the started time: if db_info['time_submit']: started_time = get_unix_time_from_time_string( db_info["time_submit"]) @@ -100,28 +158,20 @@ def __init__( else: started_time = time() - # Get the try number: try_num = db_info["try_num"] # Parse fail cycle points: - if rtconfig != itask.tdef.rtconfig: - try: - rtconfig["simulation"][ - "fail cycle points" - ] = parse_fail_cycle_points( - rtconfig["simulation"]["fail cycle points"] - ) - except PointParsingError as exc: - # Broadcast Fail CP didn't parse - LOG.warning( - 'Broadcast fail cycle point was invalid:\n' - f' {exc.args[0]}' - ) - rtconfig['simulation'][ - 'fail cycle points' - ] = itask.tdef.rtconfig['simulation']['fail cycle points'] + if not rtconfig: + rtconfig = itask.tdef.rtconfig + if rtconfig and rtconfig != itask.tdef.rtconfig: + rtconfig["simulation"][ + "fail cycle points" + ] = parse_fail_cycle_points( + rtconfig["simulation"]["fail cycle points"], + itask.tdef.rtconfig['simulation']['fail cycle points'] + ) - # Calculate simulation info: + # Calculate simulation outcome and run-time: self.simulated_run_length = ( get_simulated_run_len(rtconfig)) self.sim_task_fails = sim_task_failed( @@ -132,37 +182,39 @@ def __init__( self.timeout = started_time + self.simulated_run_length -def configure_sim_modes(taskdefs, sim_mode): - """Adjust task defs for simulation and dummy mode. - +def configure_sim_mode(rtc, fallback): + """Adjust task defs for simulation mode. + + Example: + >>> this = configure_sim_mode + >>> rtc = { + ... 'submission retry delays': [42, 24, 23], + ... 'environment': {'DoNot': '"WantThis"'}, + ... 'simulation': {'fail cycle points': ['all']} + ... } + >>> this(rtc, [53]) + >>> rtc['submission retry delays'] + [1] + >>> rtc['environment'] + {} + >>> rtc['simulation'] + {'fail cycle points': None} + >>> rtc['platform'] + 'localhost' """ - dummy_mode = (sim_mode == RunMode.DUMMY) - - for tdef in taskdefs: - # Compute simulated run time by scaling the execution limit. - rtc = tdef.rtconfig - - rtc['submission retry delays'] = [1] + rtc['submission retry delays'] = [1] - if dummy_mode: - # Generate dummy scripting. - rtc['init-script'] = "" - rtc['env-script'] = "" - rtc['pre-script'] = "" - rtc['post-script'] = "" - rtc['script'] = build_dummy_script( - rtc, get_simulated_run_len(rtc)) + disable_platforms(rtc) - disable_platforms(rtc) + # Disable environment, in case it depends on env-script. + rtc['environment'] = {} - # Disable environment, in case it depends on env-script. - rtc['environment'] = {} - - rtc["simulation"][ - "fail cycle points" - ] = parse_fail_cycle_points( - rtc["simulation"]["fail cycle points"] - ) + rtc["simulation"][ + "fail cycle points" + ] = parse_fail_cycle_points( + rtc["simulation"]["fail cycle points"], + fallback + ) def get_simulated_run_len(rtc: Dict[str, Any]) -> int: @@ -184,24 +236,6 @@ def get_simulated_run_len(rtc: Dict[str, Any]) -> int: return sleep_sec -def build_dummy_script(rtc: Dict[str, Any], sleep_sec: int) -> str: - """Create fake scripting for dummy mode. - - This is for Dummy mode only. - """ - script = "sleep %d" % sleep_sec - # Dummy message outputs. - for msg in rtc['outputs'].values(): - script += "\ncylc message '%s'" % msg - if rtc['simulation']['fail try 1 only']: - arg1 = "true" - else: - arg1 = "false" - arg2 = " ".join(rtc['simulation']['fail cycle points']) - script += "\ncylc__job__dummy_result %s %s || exit 1" % (arg1, arg2) - return script - - def disable_platforms( rtc: Dict[str, Any] ) -> None: @@ -222,7 +256,7 @@ def disable_platforms( def parse_fail_cycle_points( - f_pts_orig: List[str] + f_pts_orig: List[str], fallback ) -> 'Union[None, List[PointBase]]': """Parse `[simulation][fail cycle points]`. @@ -231,11 +265,11 @@ def parse_fail_cycle_points( Examples: >>> this = parse_fail_cycle_points - >>> this(['all']) is None + >>> this(['all'], ['42']) is None True - >>> this([]) + >>> this([], ['42']) [] - >>> this(None) is None + >>> this(None, ['42']) is None True """ f_pts: 'Optional[List[PointBase]]' = [] @@ -247,7 +281,16 @@ def parse_fail_cycle_points( elif f_pts_orig: f_pts = [] for point_str in f_pts_orig: - f_pts.append(get_point(point_str).standardise()) + if isinstance(point_str, PointBase): + f_pts.append(point_str) + else: + try: + f_pts.append(get_point(point_str).standardise()) + except PointParsingError: + LOG.warning( + f'Invalid ISO 8601 date representation: {point_str}' + ) + return fallback return f_pts @@ -266,13 +309,19 @@ def sim_time_check( now = time() sim_task_state_changed: bool = False for itask in itasks: - if itask.state.status != TASK_STATUS_RUNNING: + if ( + itask.state.status != TASK_STATUS_RUNNING + or itask.run_mode and itask.run_mode != RunMode.SIMULATION.value + ): continue # This occurs if the workflow has been restarted. if itask.mode_settings is None: rtconfig = task_events_manager.broadcast_mgr.get_updated_rtconfig( itask) + rtconfig = configure_sim_mode( + rtconfig, + itask.tdef.rtconfig['simulation']['fail cycle points']) itask.mode_settings = ModeSettings( itask, db_mgr, diff --git a/cylc/flow/run_modes/skip.py b/cylc/flow/run_modes/skip.py new file mode 100644 index 00000000000..960301bfabc --- /dev/null +++ b/cylc/flow/run_modes/skip.py @@ -0,0 +1,161 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Utilities supporting skip modes +""" +from logging import INFO +from typing import ( + TYPE_CHECKING, Dict, List, Tuple) + +from cylc.flow.exceptions import WorkflowConfigError +from cylc.flow.task_outputs import ( + TASK_OUTPUT_SUBMITTED, + TASK_OUTPUT_SUCCEEDED, + TASK_OUTPUT_FAILED, + TASK_OUTPUT_STARTED +) +from cylc.flow.task_state import RunMode + +if TYPE_CHECKING: + from cylc.flow.taskdef import TaskDef + from cylc.flow.task_job_mgr import TaskJobManager + from cylc.flow.task_proxy import TaskProxy + from typing_extensions import Literal + + +def submit_task_job( + task_job_mgr: 'TaskJobManager', + itask: 'TaskProxy', + rtconfig: Dict, + now: Tuple[float, str] +) -> 'Literal[True]': + """Submit a task in skip mode. + + Returns: + True - indicating that TaskJobManager need take no further action. + """ + # Don't do anything if task is held: + if itask.state.is_held: + return True + + task_job_mgr._set_retry_timers(itask, rtconfig) + itask.summary['started_time'] = now[0] + itask.waiting_on_job_prep = False + itask.submit_num += 1 + + itask.platform = { + 'name': RunMode.SKIP.value, + 'install target': 'localhost', + 'hosts': ['localhost'], + 'disable task event handlers': + rtconfig['skip']['disable task event handlers'], + 'execution polling intervals': [] + } + itask.platform['name'] = RunMode.SKIP.value + itask.summary['job_runner_name'] = RunMode.SKIP.value + itask.run_mode = RunMode.SKIP.value + task_job_mgr.workflow_db_mgr.put_insert_task_jobs( + itask, { + 'time_submit': now[1], + 'try_num': itask.get_try_num(), + 'flow_nums': str(list(itask.flow_nums)), + 'is_manual_submit': itask.is_manual_submit, + 'job_runner_name': RunMode.SIMULATION.value, + 'platform_name': RunMode.SIMULATION.value, + 'submit_status': 0 # Submission has succeeded + } + ) + for output in process_outputs(itask, rtconfig): + task_job_mgr.task_events_mgr.process_message(itask, INFO, output) + + return True + + +def process_outputs(itask: 'TaskProxy', rtconfig: Dict) -> List[str]: + """Process Skip Mode Outputs: + + * By default, all required outputs will be generated plus succeeded + if success is optional. + * The outputs submitted and started are always produced and do not + need to be defined in outputs. + * If outputs is specified and does not include either + succeeded or failed then succeeded will be produced. + + Return: + A list of outputs to emit. + + """ + # Always produce `submitted` & `started` outputs first: + result: List[str] = [TASK_OUTPUT_SUBMITTED, TASK_OUTPUT_STARTED] + + conf_outputs = list(rtconfig['skip']['outputs']) + + # Send the rest of our outputs, unless they are succeeded or failed, + # which we hold back, to prevent warnings about pre-requisites being + # unmet being shown because a "finished" output happens to come first. + for message in itask.state.outputs.iter_required_messages( + exclude=( + TASK_OUTPUT_SUCCEEDED if TASK_OUTPUT_FAILED + in conf_outputs else TASK_OUTPUT_FAILED + ) + ): + trigger = itask.state.outputs._message_to_trigger[message] + # Send message unless it be succeeded/failed. + if ( + trigger not in { + TASK_OUTPUT_SUCCEEDED, + TASK_OUTPUT_FAILED, + TASK_OUTPUT_SUBMITTED, + TASK_OUTPUT_STARTED, + } + and (not conf_outputs or trigger in conf_outputs) + ): + result.append(message) + + # Add optional outputs specified in skip settings: + for message, trigger in itask.state.outputs._message_to_trigger.items(): + if trigger in conf_outputs and trigger not in result: + result.append(message) + + # Send succeeded/failed last. + if TASK_OUTPUT_FAILED in conf_outputs: + result.append(TASK_OUTPUT_FAILED) + elif TASK_OUTPUT_SUCCEEDED and TASK_OUTPUT_SUCCEEDED not in result: + result.append(TASK_OUTPUT_SUCCEEDED) + + return result + + +def check_task_skip_config(tdef: 'TaskDef') -> None: + """Validate Skip Mode configuration. + + Raises: + * Error if outputs include succeeded and failed. + """ + skip_config = tdef.rtconfig.get('skip', {}) + if not skip_config: + return + skip_outputs = skip_config.get('outputs', {}) + if not skip_outputs: + return + + # Error if outputs include succeded and failed: + if ( + TASK_OUTPUT_SUCCEEDED in skip_outputs + and TASK_OUTPUT_FAILED in skip_outputs + ): + raise WorkflowConfigError( + f'Skip mode settings for task {tdef.name} has' + ' mutually exclusive outputs: succeeded AND failed.') diff --git a/cylc/flow/scheduler.py b/cylc/flow/scheduler.py index d3f7fd18a36..d4b5d0310a5 100644 --- a/cylc/flow/scheduler.py +++ b/cylc/flow/scheduler.py @@ -108,8 +108,13 @@ ) from cylc.flow.profiler import Profiler from cylc.flow.resources import get_resources -from cylc.flow.simulation import sim_time_check +from cylc.flow.run_modes.simulation import sim_time_check from cylc.flow.subprocpool import SubProcPool +from cylc.flow.templatevars import eval_var +from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager +from cylc.flow.workflow_events import WorkflowEventHandler +from cylc.flow.workflow_status import StopMode, AutoRestartMode +from cylc.flow.taskdef import TaskDef from cylc.flow.task_events_mgr import TaskEventsManager from cylc.flow.task_job_mgr import TaskJobManager from cylc.flow.task_pool import TaskPool @@ -128,9 +133,7 @@ TASK_STATUS_RUNNING, TASK_STATUS_SUBMITTED, TASK_STATUS_WAITING, -) -from cylc.flow.taskdef import TaskDef -from cylc.flow.templatevars import eval_var + RunMode) from cylc.flow.templatevars import get_template_vars from cylc.flow.timer import Timer from cylc.flow.util import cli_format @@ -139,9 +142,6 @@ get_time_string_from_unix_time as time2str, get_utc_mode, ) -from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager -from cylc.flow.workflow_events import WorkflowEventHandler -from cylc.flow.workflow_status import AutoRestartMode, RunMode, StopMode from cylc.flow.xtrigger_mgr import XtriggerManager if TYPE_CHECKING: @@ -1154,7 +1154,7 @@ def _set_workflow_params( LOG.info('LOADING workflow parameters') for key, value in params: if key == self.workflow_db_mgr.KEY_RUN_MODE: - self.options.run_mode = value or RunMode.LIVE + self.options.run_mode = value or RunMode.LIVE.value LOG.info(f"+ run mode = {value}") if value is None: continue @@ -1219,9 +1219,9 @@ def _load_template_vars(self, _, row): def run_event_handlers(self, event, reason=""): """Run a workflow event handler. - Run workflow events in simulation and dummy mode ONLY if enabled. + Run workflow events only in live mode or skip mode. """ - if self.get_run_mode() in {RunMode.SIMULATION, RunMode.DUMMY}: + if self.get_run_mode() in RunMode.NON_OVERRIDABLE_MODES.value: return self.workflow_event_handler.handle(self, event, str(reason)) @@ -1295,7 +1295,7 @@ def release_queued_tasks(self) -> bool: pre_prep_tasks, self.server.curve_auth, self.server.client_pub_key_dir, - is_simulation=(self.get_run_mode() == RunMode.SIMULATION) + run_mode=self.get_run_mode() ): if itask.flow_nums: flow = ','.join(str(i) for i in itask.flow_nums) @@ -1346,7 +1346,7 @@ def timeout_check(self): """Check workflow and task timers.""" self.check_workflow_timers() # check submission and execution timeout and polling timers - if self.get_run_mode() != RunMode.SIMULATION: + if self.get_run_mode() != RunMode.SIMULATION.value: self.task_job_mgr.check_task_jobs(self.workflow, self.pool) async def workflow_shutdown(self): @@ -1542,12 +1542,10 @@ async def _main_loop(self) -> None: if self.xtrigger_mgr.do_housekeeping: self.xtrigger_mgr.housekeep(self.pool.get_tasks()) - self.pool.clock_expire_tasks() self.release_queued_tasks() - if ( - self.get_run_mode() == RunMode.SIMULATION + self.options.run_mode == RunMode.SIMULATION.value and sim_time_check( self.task_events_mgr, self.pool.get_tasks(), diff --git a/cylc/flow/scheduler_cli.py b/cylc/flow/scheduler_cli.py index 601dd3112b6..aa5a821558c 100644 --- a/cylc/flow/scheduler_cli.py +++ b/cylc/flow/scheduler_cli.py @@ -54,6 +54,7 @@ from cylc.flow.remote import cylc_server_cmd from cylc.flow.scheduler import Scheduler, SchedulerError from cylc.flow.scripts.common import cylc_header +from cylc.flow.task_state import RunMode from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager from cylc.flow.workflow_files import ( SUITERC_DEPR_MSG, @@ -65,7 +66,6 @@ is_terminal, prompt, ) -from cylc.flow.workflow_status import RunMode if TYPE_CHECKING: from optparse import Values @@ -129,9 +129,15 @@ RUN_MODE = OptionSettings( ["-m", "--mode"], - help="Run mode: live, dummy, simulation (default live).", + help=( + f"Run mode: {RunMode.WORKFLOW_MODES.value} (default live)." + " Live mode executes the tasks as defined in the runtime section." + " Simulation, skip and dummy modes ignore part of tasks'" + " runtime configurations. Simulation and dummy modes are" + " designed for testing, and skip mode is for flow control." + ), metavar="STRING", action='store', dest="run_mode", - choices=[RunMode.LIVE, RunMode.DUMMY, RunMode.SIMULATION], + choices=list(RunMode.WORKFLOW_MODES.value), ) PLAY_RUN_MODE = deepcopy(RUN_MODE) diff --git a/cylc/flow/scripts/lint.py b/cylc/flow/scripts/lint.py index 0dd991b8fa9..bdccd6c4722 100755 --- a/cylc/flow/scripts/lint.py +++ b/cylc/flow/scripts/lint.py @@ -96,6 +96,10 @@ ) from cylc.flow.parsec.config import ParsecConfig from cylc.flow.scripts.cylc import DEAD_ENDS +from cylc.flow.task_outputs import ( + TASK_OUTPUT_SUCCEEDED, + TASK_OUTPUT_FAILED, +) from cylc.flow.terminal import cli_function @@ -379,6 +383,38 @@ def check_for_deprecated_task_event_template_vars( return None +BAD_SKIP_OUTS = re.compile(r'outputs\s*=\s*(.*)') + + +def check_skip_mode_outputs(line: str) -> Dict: + """Ensure skip mode output setting doesn't include: + + * succeeded _and_ failed: Mutually exclusive. + * submitted and started: These are emitted by skip mode anyway. + + n.b. + + This should be separable from ``[[outputs]]`` because it's a key + value pair not a section heading. + + Examples: + >>> this = check_skip_mode_outputs + >>> this('outputs = succeeded, failed') + {'description': 'are ... together', 'outputs': 'failed...succeeded'} + """ + + outputs = BAD_SKIP_OUTS.findall(line) + if outputs: + outputs = [i.strip() for i in outputs[0].split(',')] + if TASK_OUTPUT_FAILED in outputs and TASK_OUTPUT_SUCCEEDED in outputs: + return { + 'description': + 'are mutually exclusive and cannot be used together', + 'outputs': f'{TASK_OUTPUT_FAILED} and {TASK_OUTPUT_SUCCEEDED}' + } + return {} + + INDENTATION = re.compile(r'^(\s*)(.*)') @@ -619,7 +655,15 @@ def list_wrapper(line: str, check: Callable) -> Optional[Dict[str, str]]: for job_runner, directive in WALLCLOCK_DIRECTIVES.items() )), FUNCTION: check_wallclock_directives, - } + }, + 'S015': { + 'short': 'Task outputs {outputs}: {description}.', + FUNCTION: check_skip_mode_outputs + }, + 'S016': { + 'short': 'Run mode is not live: This task will only appear to run.', + FUNCTION: re.compile(r'run mode\s*=\s*[^l][^i][^v][^e]$').findall + }, } # Subset of deprecations which are tricky (impossible?) to scrape from the # upgrader. diff --git a/cylc/flow/scripts/set.py b/cylc/flow/scripts/set.py index b64cf74aba0..888ba20890e 100755 --- a/cylc/flow/scripts/set.py +++ b/cylc/flow/scripts/set.py @@ -65,6 +65,9 @@ # complete the succeeded output of 3/bar: $ cylc set --out=succeeded my_workflow//3/bar + # complete the outputs defined in [runtime][task][skip] + $ cylc set --out=skip my_workflow//3/bar + # satisfy the 3/foo:succeeded prerequisite of 3/bar: $ cylc set --pre=3/foo my_workflow//3/bar # or: @@ -154,8 +157,10 @@ def get_option_parser() -> COP: "-o", "--out", "--output", metavar="OUTPUT(s)", help=( "Complete task outputs. For multiple outputs re-use the" - " option, or give a comma-separated list of outputs, or" - ' use "--out=required" to complete all required outputs.' + " option, or give a comma-separated list of outputs." + ' Use "--out=required" to complete all required outputs.' + ' Use "--out=skip" to complete outputs defined in the tasks.' + ' [skip] configuration.' " OUTPUT format: trigger names as used in the graph." ), action="append", default=None, dest="outputs" diff --git a/cylc/flow/scripts/validate.py b/cylc/flow/scripts/validate.py index bd3e6098906..9e4f8f7cb89 100755 --- a/cylc/flow/scripts/validate.py +++ b/cylc/flow/scripts/validate.py @@ -54,15 +54,12 @@ from cylc.flow.task_proxy import TaskProxy from cylc.flow.templatevars import get_template_vars from cylc.flow.terminal import cli_function -from cylc.flow.scheduler_cli import RUN_MODE -from cylc.flow.workflow_status import RunMode +from cylc.flow.task_state import RunMode if TYPE_CHECKING: from cylc.flow.option_parsers import Values -VALIDATE_RUN_MODE = deepcopy(RUN_MODE) -VALIDATE_RUN_MODE.sources = {'validate'} VALIDATE_ICP_OPTION = deepcopy(ICP_OPTION) VALIDATE_ICP_OPTION.sources = {'validate'} VALIDATE_AGAINST_SOURCE_OPTION = deepcopy(AGAINST_SOURCE_OPTION) @@ -98,7 +95,6 @@ dest="profile_mode", sources={'validate'} ), - VALIDATE_RUN_MODE, VALIDATE_ICP_OPTION, ] @@ -128,7 +124,7 @@ def get_option_parser(): { 'check_circular': False, 'profile_mode': False, - 'run_mode': RunMode.LIVE + 'run_mode': RunMode.LIVE.value } ) diff --git a/cylc/flow/task_events_mgr.py b/cylc/flow/task_events_mgr.py index 1706e83bbaf..8285ef68e50 100644 --- a/cylc/flow/task_events_mgr.py +++ b/cylc/flow/task_events_mgr.py @@ -79,7 +79,8 @@ TASK_STATUS_FAILED, TASK_STATUS_EXPIRED, TASK_STATUS_SUCCEEDED, - TASK_STATUS_WAITING + TASK_STATUS_WAITING, + RunMode, ) from cylc.flow.task_outputs import ( TASK_OUTPUT_EXPIRED, @@ -99,7 +100,6 @@ get_template_variables as get_workflow_template_variables, process_mail_footer, ) -from cylc.flow.workflow_status import RunMode if TYPE_CHECKING: @@ -772,7 +772,7 @@ def process_message( # ... but either way update the job ID in the job proxy (it only # comes in via the submission message). - if itask.tdef.run_mode != RunMode.SIMULATION: + if itask.run_mode != RunMode.SIMULATION.value: job_tokens = itask.tokens.duplicate( job=str(itask.submit_num) ) @@ -895,7 +895,7 @@ def _process_message_check( if ( itask.state(TASK_STATUS_WAITING) # Polling in live mode only: - and itask.tdef.run_mode == RunMode.LIVE + and itask.run_mode == RunMode.LIVE.value and ( ( # task has a submit-retry lined up @@ -940,7 +940,7 @@ def _process_message_check( def setup_event_handlers(self, itask, event, message): """Set up handlers for a task event.""" - if itask.tdef.run_mode != RunMode.LIVE: + if RunMode.disable_task_event_handlers(itask): return msg = "" if message != f"job {event}": @@ -1385,8 +1385,12 @@ def _process_message_succeeded(self, itask, event_time, forced): "run_status": 0, "time_run_exit": event_time, }) - # Update mean elapsed time only on task succeeded. - if itask.summary['started_time'] is not None: + # Update mean elapsed time only on task succeeded, + # and only if task is running in live mode: + if ( + itask.summary['started_time'] is not None + and itask.run_mode == RunMode.LIVE.value + ): itask.tdef.elapsed_times.append( itask.summary['finished_time'] - itask.summary['started_time']) @@ -1465,7 +1469,7 @@ def _process_message_submitted( ) itask.set_summary_time('submitted', event_time) - if itask.tdef.run_mode == RunMode.SIMULATION: + if itask.run_mode == RunMode.SIMULATION.value: # Simulate job started as well. itask.set_summary_time('started', event_time) if itask.state_reset(TASK_STATUS_RUNNING, forced=forced): @@ -1502,7 +1506,7 @@ def _process_message_submitted( 'submitted', event_time, ) - if itask.tdef.run_mode == RunMode.SIMULATION: + if itask.run_mode == RunMode.SIMULATION.value: # Simulate job started as well. self.data_store_mgr.delta_job_time( job_tokens, @@ -1535,7 +1539,11 @@ def _insert_task_job( # not see previous submissions (so can't use itask.jobs[submit_num-1]). # And transient tasks, used for setting outputs and spawning children, # do not submit jobs. - if (itask.tdef.run_mode == RunMode.SIMULATION) or forced: + if ( + not itask.run_mode + or itask.run_mode in RunMode.JOBLESS_MODES.value + or forced + ): job_conf = {"submit_num": itask.submit_num} else: try: diff --git a/cylc/flow/task_job_mgr.py b/cylc/flow/task_job_mgr.py index 500aa830b2c..675fad0c6ce 100644 --- a/cylc/flow/task_job_mgr.py +++ b/cylc/flow/task_job_mgr.py @@ -35,7 +35,7 @@ ) from shutil import rmtree from time import time -from typing import TYPE_CHECKING, Any, Union, Optional +from typing import TYPE_CHECKING, Any, List, Tuple, Union, Optional from cylc.flow import LOG from cylc.flow.job_runner_mgr import JobPollContext @@ -59,7 +59,12 @@ get_platform, ) from cylc.flow.remote import construct_ssh_cmd -from cylc.flow.simulation import ModeSettings +from cylc.flow.run_modes.simulation import ( + submit_task_job as simulation_submit_task_job) +from cylc.flow.run_modes.skip import ( + submit_task_job as skip_submit_task_job) +from cylc.flow.run_modes.dummy import ( + submit_task_job as dummy_submit_task_job) from cylc.flow.subprocctx import SubProcContext from cylc.flow.subprocpool import SubProcPool from cylc.flow.task_action_timer import ( @@ -99,7 +104,8 @@ TASK_STATUS_SUBMITTED, TASK_STATUS_RUNNING, TASK_STATUS_WAITING, - TASK_STATUSES_ACTIVE + TASK_STATUSES_ACTIVE, + RunMode ) from cylc.flow.wallclock import ( get_current_time_string, @@ -243,7 +249,7 @@ def prep_submit_task_jobs(self, workflow, itasks, check_syntax=True): return [prepared_tasks, bad_tasks] def submit_task_jobs(self, workflow, itasks, curve_auth, - client_pub_key_dir, is_simulation=False): + client_pub_key_dir, run_mode='live'): """Prepare for job submission and submit task jobs. Preparation (host selection, remote host init, and remote install) @@ -258,8 +264,8 @@ def submit_task_jobs(self, workflow, itasks, curve_auth, Return (list): list of tasks that attempted submission. """ - if is_simulation: - return self._simulation_submit_task_jobs(itasks, workflow) + itasks, nonlive_tasks = self._nonlive_submit_task_jobs( + itasks, workflow, run_mode) # Prepare tasks for job submission prepared_tasks, bad_tasks = self.prep_submit_task_jobs( @@ -268,9 +274,10 @@ def submit_task_jobs(self, workflow, itasks, curve_auth, # Reset consumed host selection results self.task_remote_mgr.subshell_eval_reset() - if not prepared_tasks: + if not prepared_tasks and not nonlive_tasks: return bad_tasks - + elif not prepared_tasks: + return nonlive_tasks auth_itasks = {} # {platform: [itask, ...], ...} for itask in prepared_tasks: @@ -278,8 +285,7 @@ def submit_task_jobs(self, workflow, itasks, curve_auth, auth_itasks.setdefault(platform_name, []) auth_itasks[platform_name].append(itask) # Submit task jobs for each platform - # Non-prepared tasks can be considered done for now: - done_tasks = bad_tasks + done_tasks = bad_tasks + nonlive_tasks for _, itasks in sorted(auth_itasks.items()): # Find the first platform where >1 host has not been tried and @@ -1003,44 +1009,74 @@ def _set_retry_timers( except KeyError: itask.try_timers[key] = TaskActionTimer(delays=delays) - def _simulation_submit_task_jobs(self, itasks, workflow): - """Simulation mode task jobs submission.""" + def _nonlive_submit_task_jobs( + self: 'TaskJobManager', + itasks: 'List[TaskProxy]', + workflow: str, + workflow_run_mode: str, + ) -> 'Tuple[List[TaskProxy], List[TaskProxy]]': + """Identify task mode and carry out alternative submission + paths if required: + + * Simulation: Job submission. + * Skip: Entire job lifecycle happens here! + * Dummy: Pre-submission preparation (removing task scripts content) + before returning to live pathway. + * Live: return to main submission pathway without doing anything. + + Returns: + lively_tasks: + A list of tasks which require subsequent + processing **as if** they were live mode tasks. + (This includes live and dummy mode tasks) + nonlive_tasks: + A list of tasks which require no further processing + because their apparent execution is done entirely inside + the scheduler. (This includes skip and simulation mode tasks). + """ + lively_tasks: 'List[TaskProxy]' = [] + nonlive_tasks: 'List[TaskProxy]' = [] now = time() - now_str = get_time_string_from_unix_time(now) + now = (now, get_time_string_from_unix_time(now)) + for itask in itasks: - # Handle broadcasts + # Get task config with broadcasts applied: rtconfig = self.task_events_mgr.broadcast_mgr.get_updated_rtconfig( itask) - itask.summary['started_time'] = now - self._set_retry_timers(itask, rtconfig) - itask.mode_settings = ModeSettings( - itask, - self.workflow_db_mgr, - rtconfig - ) - - itask.waiting_on_job_prep = False - itask.submit_num += 1 + # Apply task run mode + if workflow_run_mode in RunMode.NON_OVERRIDABLE_MODES.value: + # Task run mode cannot override workflow run-mode sim or dummy: + run_mode = workflow_run_mode + else: + # If workflow mode is skip or live and task mode is set, + # override workflow mode, else use workflow mode. + run_mode = rtconfig.get('run mode', None) or workflow_run_mode + # Store the run mode of the this submission: + itask.run_mode = run_mode + + # Submit nonlive tasks, or add live-like (live or dummy) + # tasks to list of tasks to put through live + # submission pipeline - We decide based on the output + # of the submit method: + is_nonlive = False + if run_mode == RunMode.DUMMY.value: + is_nonlive = dummy_submit_task_job( + self, itask, rtconfig, workflow, now) + elif run_mode == RunMode.SIMULATION.value: + is_nonlive = simulation_submit_task_job( + self, itask, rtconfig, workflow, now) + elif run_mode == RunMode.SKIP.value: + is_nonlive = skip_submit_task_job( + self, itask, rtconfig, now) + + # Assign task to list: + if is_nonlive: + nonlive_tasks.append(itask) + else: + lively_tasks.append(itask) - itask.platform = {'name': 'SIMULATION'} - itask.summary['job_runner_name'] = 'SIMULATION' - itask.summary[self.KEY_EXECUTE_TIME_LIMIT] = ( - itask.mode_settings.simulated_run_length - ) - itask.jobs.append( - self.get_simulation_job_conf(itask, workflow) - ) - self.task_events_mgr.process_message( - itask, INFO, TASK_OUTPUT_SUBMITTED, - ) - self.workflow_db_mgr.put_insert_task_jobs( - itask, { - 'time_submit': now_str, - 'try_num': itask.get_try_num(), - } - ) - return itasks + return lively_tasks, nonlive_tasks def _submit_task_jobs_callback(self, ctx, workflow, itasks): """Callback when submit task jobs command exits.""" diff --git a/cylc/flow/task_outputs.py b/cylc/flow/task_outputs.py index b8363a50ada..c9be4884152 100644 --- a/cylc/flow/task_outputs.py +++ b/cylc/flow/task_outputs.py @@ -194,6 +194,7 @@ def get_completion_expression(tdef: 'TaskDef') -> str: def get_optional_outputs( expression: str, outputs: Iterable[str], + force_optional: "Optional[str]" = None ) -> Dict[str, Optional[bool]]: """Determine which outputs in an expression are optional. @@ -202,6 +203,8 @@ def get_optional_outputs( The completion expression. outputs: All outputs that apply to this task. + force_optional: + Don't have the CompletionEvaluator consider this output. Returns: dict: compvar: is_optional @@ -229,6 +232,14 @@ def get_optional_outputs( [('expired', True), ('failed', None), ('succeeded', False), ('x', False), ('y', False)] + >>> sorted(get_optional_outputs( + ... '(succeeded and towel) or (failed and bugblatter)', + ... {'succeeded', 'towel', 'failed', 'bugblatter'}, + ... 'failed' + ... ).items()) + [('bugblatter', True), ('failed', True), + ('succeeded', False), ('towel', False)] + """ # determine which triggers are used in the expression used_compvars = get_variable_names(expression) @@ -236,6 +247,9 @@ def get_optional_outputs( # all completion variables which could appear in the expression all_compvars = {trigger_to_completion_variable(out) for out in outputs} + # Allows exclusion of additional outcomes: + extra_excludes = {force_optional: False} if force_optional else {} + return { # output: is_optional # the outputs that are used in the expression **{ @@ -247,6 +261,7 @@ def get_optional_outputs( # (pre-conditions are considered separately) 'expired': False, 'submit_failed': False, + **extra_excludes }, ) for output in used_compvars @@ -609,16 +624,25 @@ def _is_compvar_complete(self, compvar: str) -> Optional[bool]: else: raise KeyError(compvar) - def iter_required_messages(self) -> Iterator[str]: + def iter_required_messages( + self, + exclude=None + ) -> Iterator[str]: """Yield task messages that are required for this task to be complete. Note, in some cases tasks might not have any required messages, e.g. "completion = succeeded or failed". + + Args: + exclude: Exclude one possible required messages, allowing + specification of all required outputs if succeeded or failed. """ for compvar, is_optional in get_optional_outputs( self._completion_expression, set(self._message_to_compvar.values()), + force_optional=exclude ).items(): + # breakpoint(header=f"=== {compvar=}, {is_optional=} ===") if is_optional is False: for message, _compvar in self._message_to_compvar.items(): if _compvar == compvar: diff --git a/cylc/flow/task_pool.py b/cylc/flow/task_pool.py index 1f1de6c77e6..a554310f645 100644 --- a/cylc/flow/task_pool.py +++ b/cylc/flow/task_pool.py @@ -53,6 +53,7 @@ from cylc.flow.task_id import TaskID from cylc.flow.task_proxy import TaskProxy from cylc.flow.task_state import ( + RunMode, TASK_STATUSES_ACTIVE, TASK_STATUSES_FINAL, TASK_STATUS_WAITING, @@ -70,6 +71,8 @@ ) from cylc.flow.wallclock import get_current_time_string from cylc.flow.platforms import get_platform +from cylc.flow.run_modes.skip import ( + process_outputs as get_skip_mode_outputs) from cylc.flow.task_outputs import ( TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_EXPIRED, @@ -1430,7 +1433,10 @@ def spawn_on_output(self, itask: TaskProxy, output: str) -> None: tasks = [c_task] for t in tasks: - t.satisfy_me([itask.tokens.duplicate(task_sel=output)]) + t.satisfy_me( + [itask.tokens.duplicate(task_sel=output)], + getattr(itask.tdef, 'run_mode', RunMode.LIVE.value) + ) self.data_store_mgr.delta_task_prerequisite(t) if not in_pool: self.add_to_pool(t) @@ -1557,7 +1563,8 @@ def spawn_on_all_outputs( continue if completed_only: c_task.satisfy_me( - [itask.tokens.duplicate(task_sel=message)] + [itask.tokens.duplicate(task_sel=message)], + itask.run_mode ) self.data_store_mgr.delta_task_prerequisite(c_task) self.add_to_pool(c_task) @@ -1875,7 +1882,8 @@ def _standardise_outputs( try: msg = tdef.outputs[output][0] except KeyError: - LOG.warning(f"output {point}/{tdef.name}:{output} not found") + LOG.warning( + f"output {point}/{tdef.name}:{output} not found") continue _outputs.append(msg) return _outputs @@ -1978,9 +1986,19 @@ def _set_outputs_itask( if not outputs: outputs = list(itask.state.outputs.iter_required_messages()) else: + # --out=skip is a shortcut to setting all the outputs that + # skip mode would. + skips = [] + if RunMode.SKIP.value in outputs: + # Check for broadcasts to task: + bc_mgr = self.task_events_mgr.broadcast_mgr + rtconfig = bc_mgr.get_updated_rtconfig(itask) + outputs.remove(RunMode.SKIP.value) + skips = get_skip_mode_outputs(itask, rtconfig) + itask.run_mode = RunMode.SKIP.value outputs = self._standardise_outputs( - itask.point, itask.tdef, outputs - ) + itask.point, itask.tdef, outputs) + outputs = list(set(outputs + skips)) for output in sorted(outputs, key=itask.state.outputs.output_sort_key): if itask.state.outputs.is_message_complete(output): diff --git a/cylc/flow/task_proxy.py b/cylc/flow/task_proxy.py index 73d819c2fce..b3c6e891da1 100644 --- a/cylc/flow/task_proxy.py +++ b/cylc/flow/task_proxy.py @@ -40,6 +40,7 @@ from cylc.flow.platforms import get_platform from cylc.flow.task_action_timer import TimerFlags from cylc.flow.task_state import ( + RunMode, TaskState, TASK_STATUS_WAITING, TASK_STATUS_EXPIRED, @@ -189,6 +190,7 @@ class TaskProxy: 'point_as_seconds', 'poll_timer', 'reload_successor', + 'run_mode', 'submit_num', 'tdef', 'state', @@ -296,6 +298,7 @@ def __init__( self.graph_children = generate_graph_children(tdef, self.point) self.mode_settings: Optional['ModeSettings'] = None + self.run_mode: Optional[str] = None if self.tdef.expiration_offset is not None: self.expire_time = ( @@ -553,7 +556,7 @@ def state_reset( return False def satisfy_me( - self, task_messages: 'Iterable[Tokens]' + self, task_messages: 'Iterable[Tokens]', mode=RunMode.LIVE.value ) -> 'Set[Tokens]': """Try to satisfy my prerequisites with given output messages. @@ -563,7 +566,7 @@ def satisfy_me( Return a set of unmatched task messages. """ - used = self.state.satisfy_me(task_messages) + used = self.state.satisfy_me(task_messages, mode) return set(task_messages) - used def clock_expire(self) -> bool: diff --git a/cylc/flow/task_state.py b/cylc/flow/task_state.py index 9ecd9414d17..838414198f6 100644 --- a/cylc/flow/task_state.py +++ b/cylc/flow/task_state.py @@ -17,6 +17,7 @@ """Task state related logic.""" +from enum import Enum from typing import ( TYPE_CHECKING, Dict, @@ -40,6 +41,7 @@ if TYPE_CHECKING: from cylc.flow.cycling import PointBase + from cylc.flow.option_parsers import Values from cylc.flow.id import Tokens from cylc.flow.prerequisite import PrereqMessage from cylc.flow.taskdef import TaskDef @@ -177,6 +179,73 @@ } +class RunMode(Enum): + """The possible run modes of a task/workflow.""" + + LIVE = 'live' + """Task will run normally.""" + + SIMULATION = 'simulation' + """Simulates job submission with configurable exection time + and succeeded/failed outcomes(does not submit real jobs).""" + + DUMMY = 'dummy' + """Submits real jobs with empty scripts.""" + + SKIP = 'skip' + """Skips job submission; sets required outputs (by default) or + configured outputs.""" + + WORKFLOW_MODES = (LIVE, DUMMY, SIMULATION, SKIP) + """Workflow mode not sensible mode for workflow. + + n.b. not using a set to ensure ordering in CLI + """ + + OVERRIDING_MODES = frozenset({LIVE, SKIP}) + """Modes which can be set in task config.""" + + NON_OVERRIDABLE_MODES = frozenset({SIMULATION, DUMMY}) + + JOBLESS_MODES = frozenset({SKIP, SIMULATION}) + """Modes which completely ignore the standard submission path.""" + + def describe(self): + """Return user friendly description of run mode. + + For use by configuration spec documenter. + """ + if self == self.LIVE: + return "Task will run normally." + if self == self.SKIP: + return ( + "Skips job submission; sets required outputs" + " (by default) or configured outputs.") + raise KeyError(f'No description for {self}.') + + @staticmethod + def get(options: 'Values') -> str: + """Return the workflow run mode from the options.""" + return getattr(options, 'run_mode', None) or RunMode.LIVE.value + + @staticmethod + def disable_task_event_handlers(itask): + """Should we disable event handlers for this task? + + No event handlers in simulation mode, or in skip mode + if we don't deliberately enable them: + """ + mode = itask.run_mode + return ( + mode == RunMode.SIMULATION.value + or ( + mode == RunMode.SKIP.value + and itask.platform.get( + 'disable task event handlers', False) + ) + ) + + def status_leq(status_a, status_b): """"Return True if status_a <= status_b""" return (TASK_STATUSES_ORDERED.index(status_a) <= @@ -324,7 +393,8 @@ def __call__( def satisfy_me( self, - outputs: Iterable['Tokens'] + outputs: Iterable['Tokens'], + mode, ) -> Set['Tokens']: """Try to satisfy my prerequisites with given outputs. @@ -333,7 +403,7 @@ def satisfy_me( valid: Set[Tokens] = set() for prereq in (*self.prerequisites, *self.suicide_prerequisites): valid.update( - prereq.satisfy_me(outputs) + prereq.satisfy_me(outputs, mode) ) return valid diff --git a/cylc/flow/unicode_rules.py b/cylc/flow/unicode_rules.py index a6974888248..0dbb5aa22f9 100644 --- a/cylc/flow/unicode_rules.py +++ b/cylc/flow/unicode_rules.py @@ -23,7 +23,7 @@ _TASK_NAME_PREFIX, ) from cylc.flow.task_qualifiers import TASK_QUALIFIERS -from cylc.flow.task_state import TASK_STATUSES_ORDERED +from cylc.flow.task_state import TASK_STATUSES_ORDERED, RunMode ENGLISH_REGEX_MAP = { r'\w': 'alphanumeric', @@ -351,6 +351,8 @@ class TaskOutputValidator(UnicodeRuleChecker): not_starts_with('_cylc'), # blacklist keywords not_equals('required', 'optional', 'all', 'and', 'or'), + # blacklist Run Modes: + not_equals(RunMode.SKIP.value), # blacklist built-in task qualifiers and statuses (e.g. "waiting") not_equals(*sorted({*TASK_QUALIFIERS, *TASK_STATUSES_ORDERED})), ] diff --git a/cylc/flow/workflow_status.py b/cylc/flow/workflow_status.py index d6d6fb587dc..72761c08c87 100644 --- a/cylc/flow/workflow_status.py +++ b/cylc/flow/workflow_status.py @@ -23,8 +23,6 @@ from cylc.flow.wallclock import get_time_string_from_unix_time as time2str if TYPE_CHECKING: - from optparse import Values - from cylc.flow.cycling import PointBase from cylc.flow.scheduler import Scheduler from cylc.flow.task_pool import TaskPool @@ -202,21 +200,3 @@ def _get_earliest_stop_point_status_msg(pool: 'TaskPool') -> Optional[str]: if prop is None: return None return template % prop - - -class RunMode: - """The possible run modes of a workflow.""" - - LIVE = 'live' - """Workflow will run normally.""" - - SIMULATION = 'simulation' - """Workflow will run in simulation mode.""" - - DUMMY = 'dummy' - """Workflow will run in dummy mode.""" - - @staticmethod - def get(options: 'Values') -> str: - """Return the run mode from the options.""" - return getattr(options, 'run_mode', None) or RunMode.LIVE diff --git a/tests/functional/cylc-config/00-simple/section2.stdout b/tests/functional/cylc-config/00-simple/section2.stdout index 3d83ac15278..049db739435 100644 --- a/tests/functional/cylc-config/00-simple/section2.stdout +++ b/tests/functional/cylc-config/00-simple/section2.stdout @@ -15,10 +15,14 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -90,10 +94,14 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -165,10 +173,14 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -240,12 +252,16 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[directives]]] job_type = serial [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -316,12 +332,16 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[directives]]] job_type = parallel [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -392,12 +412,16 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[directives]]] job_type = serial [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -468,12 +492,16 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[directives]]] job_type = serial [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -544,12 +572,16 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[directives]]] job_type = parallel [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -620,12 +652,16 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[directives]]] job_type = parallel [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -696,12 +732,16 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[directives]]] job_type = serial [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -772,12 +812,16 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[directives]]] job_type = serial [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -848,12 +892,16 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[directives]]] job_type = parallel [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = @@ -924,12 +972,16 @@ execution time limit = submission polling intervals = submission retry delays = + run mode = [[[directives]]] job_type = parallel [[[meta]]] title = description = URL = + [[[skip]]] + outputs = + disable task event handlers = True [[[simulation]]] default run length = PT10S speedup factor = diff --git a/tests/functional/cylc-set/09-set-skip.t b/tests/functional/cylc-set/09-set-skip.t new file mode 100644 index 00000000000..dd314283700 --- /dev/null +++ b/tests/functional/cylc-set/09-set-skip.t @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +#------------------------------------------------------------------------------- +# +# Skip Mode proposal example: +# https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md +# The cylc set --out option should accept the skip value +# which should set the outputs defined in +# [runtime][][skip]outputs. + +. "$(dirname "$0")/test_header" +set_test_number 2 +reftest +exit diff --git a/tests/functional/cylc-set/09-set-skip/flow.cylc b/tests/functional/cylc-set/09-set-skip/flow.cylc new file mode 100644 index 00000000000..ef74c362773 --- /dev/null +++ b/tests/functional/cylc-set/09-set-skip/flow.cylc @@ -0,0 +1,50 @@ +[meta] + test_description = """ + Test that cylc set --out skip satisfies + all outputs which are required by the graph. + """ + proposal url = https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md + +[scheduler] + allow implicit tasks = true + [[events]] + expected task failures = 1/bar + +[scheduling] + [[graph]] + R1 = """ + # Optional out not created by set --out skip + foo:no? => not_this_task? + + # set --out skip creates required, started, submitted + # and succeeded (unless failed is set): + foo:yes => require_this_task + foo:submitted => submitted_emitted + foo:succeeded => succeeded_emitted + foo:started => skip_foo + + # set --out skip creates failed if that is required + # by skip mode settings: + bar:started => skip_bar + bar:failed? => bar_failed + """ + +[runtime] + [[foo]] + script = sleep 100 + [[[skip]]] + outputs = yes + [[[outputs]]] + no = 'Don\'t require this task' + yes = 'Require this task' + + [[bar]] + script = sleep 100 + [[[skip]]] + outputs = failed + + [[skip_foo]] + script = cylc set ${CYLC_WORKFLOW_ID}//1/foo --out skip + + [[skip_bar]] + script = cylc set ${CYLC_WORKFLOW_ID}//1/bar --out skip diff --git a/tests/functional/cylc-set/09-set-skip/reference.log b/tests/functional/cylc-set/09-set-skip/reference.log new file mode 100644 index 00000000000..6e7b636f540 --- /dev/null +++ b/tests/functional/cylc-set/09-set-skip/reference.log @@ -0,0 +1,8 @@ +1/bar -triggered off [] in flow 1 +1/foo -triggered off [] in flow 1 +1/submitted_emitted -triggered off ['1/foo'] in flow 1 +1/skip_bar -triggered off ['1/bar'] in flow 1 +1/skip_foo -triggered off ['1/foo'] in flow 1 +1/succeeded_emitted -triggered off ['1/foo'] in flow 1 +1/bar_failed -triggered off ['1/bar'] in flow 1 +1/require_this_task -triggered off ['1/foo'] in flow 1 diff --git a/tests/functional/modes/01-dummy.t b/tests/functional/run_modes/01-dummy.t similarity index 100% rename from tests/functional/modes/01-dummy.t rename to tests/functional/run_modes/01-dummy.t diff --git a/tests/functional/modes/01-dummy/flow.cylc b/tests/functional/run_modes/01-dummy/flow.cylc similarity index 100% rename from tests/functional/modes/01-dummy/flow.cylc rename to tests/functional/run_modes/01-dummy/flow.cylc diff --git a/tests/functional/modes/01-dummy/reference.log b/tests/functional/run_modes/01-dummy/reference.log similarity index 100% rename from tests/functional/modes/01-dummy/reference.log rename to tests/functional/run_modes/01-dummy/reference.log diff --git a/tests/functional/modes/02-dummy-message-outputs.t b/tests/functional/run_modes/02-dummy-message-outputs.t similarity index 100% rename from tests/functional/modes/02-dummy-message-outputs.t rename to tests/functional/run_modes/02-dummy-message-outputs.t diff --git a/tests/functional/modes/02-dummy-message-outputs/flow.cylc b/tests/functional/run_modes/02-dummy-message-outputs/flow.cylc similarity index 100% rename from tests/functional/modes/02-dummy-message-outputs/flow.cylc rename to tests/functional/run_modes/02-dummy-message-outputs/flow.cylc diff --git a/tests/functional/modes/02-dummy-message-outputs/reference.log b/tests/functional/run_modes/02-dummy-message-outputs/reference.log similarity index 100% rename from tests/functional/modes/02-dummy-message-outputs/reference.log rename to tests/functional/run_modes/02-dummy-message-outputs/reference.log diff --git a/tests/functional/modes/03-simulation.t b/tests/functional/run_modes/03-simulation.t similarity index 100% rename from tests/functional/modes/03-simulation.t rename to tests/functional/run_modes/03-simulation.t diff --git a/tests/functional/modes/03-simulation/flow.cylc b/tests/functional/run_modes/03-simulation/flow.cylc similarity index 100% rename from tests/functional/modes/03-simulation/flow.cylc rename to tests/functional/run_modes/03-simulation/flow.cylc diff --git a/tests/functional/modes/03-simulation/reference.log b/tests/functional/run_modes/03-simulation/reference.log similarity index 100% rename from tests/functional/modes/03-simulation/reference.log rename to tests/functional/run_modes/03-simulation/reference.log diff --git a/tests/functional/modes/04-simulation-runtime.t b/tests/functional/run_modes/04-simulation-runtime.t similarity index 100% rename from tests/functional/modes/04-simulation-runtime.t rename to tests/functional/run_modes/04-simulation-runtime.t diff --git a/tests/functional/modes/04-simulation-runtime/flow.cylc b/tests/functional/run_modes/04-simulation-runtime/flow.cylc similarity index 100% rename from tests/functional/modes/04-simulation-runtime/flow.cylc rename to tests/functional/run_modes/04-simulation-runtime/flow.cylc diff --git a/tests/functional/modes/04-simulation-runtime/reference.log b/tests/functional/run_modes/04-simulation-runtime/reference.log similarity index 100% rename from tests/functional/modes/04-simulation-runtime/reference.log rename to tests/functional/run_modes/04-simulation-runtime/reference.log diff --git a/tests/functional/modes/05-sim-trigger.t b/tests/functional/run_modes/05-sim-trigger.t similarity index 100% rename from tests/functional/modes/05-sim-trigger.t rename to tests/functional/run_modes/05-sim-trigger.t diff --git a/tests/functional/modes/05-sim-trigger/flow.cylc b/tests/functional/run_modes/05-sim-trigger/flow.cylc similarity index 100% rename from tests/functional/modes/05-sim-trigger/flow.cylc rename to tests/functional/run_modes/05-sim-trigger/flow.cylc diff --git a/tests/functional/modes/05-sim-trigger/reference.log b/tests/functional/run_modes/05-sim-trigger/reference.log similarity index 100% rename from tests/functional/modes/05-sim-trigger/reference.log rename to tests/functional/run_modes/05-sim-trigger/reference.log diff --git a/tests/functional/run_modes/06-run-mode-overrides.t b/tests/functional/run_modes/06-run-mode-overrides.t new file mode 100644 index 00000000000..f6d4faafb30 --- /dev/null +++ b/tests/functional/run_modes/06-run-mode-overrides.t @@ -0,0 +1,66 @@ +#!/usr/bin/env bash +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# Testing Skip mode functionality. + +. "$(dirname "$0")/test_header" +set_test_number 11 + +# Install and run the workflow in live mode (default). +# Check that tasks with run mode unset and run mode = live +# leave log files, and that skip mode tasks don't. +TEST_NAME="${TEST_NAME_BASE}:live-workflow" +install_workflow "${TEST_NAME_BASE}" "${TEST_NAME_BASE}" +run_ok "${TEST_NAME}:validate" cylc validate "${WORKFLOW_NAME}" +workflow_run_ok "${TEST_NAME}:play" \ + cylc play "${WORKFLOW_NAME}" \ + --no-detach + +JOB_LOGS="${WORKFLOW_RUN_DIR}/log/job/1000" +run_fail "${TEST_NAME}:config run mode=skip" ls "${JOB_LOGS}/skip_" +for MODE in default live; do + named_grep_ok "${TEST_NAME}:config run mode=${MODE}" "===.*===" "${JOB_LOGS}/${MODE}_/NN/job.out" +done + +# After broadcasting a change in run_mode to task default_ it now runs +# in skip mode and fails to produce a log file: +JOB_LOGS="${WORKFLOW_RUN_DIR}/log/job/1001" +run_fail "${TEST_NAME}:broadcast run mode=skip" ls "${JOB_LOGS}/default_/" + +purge + +# Install and run the workflow in skip mode. +# Check that tasks with run mode unset and run mode = skip +# don't leave log files, and that skip mode tasks does. +TEST_NAME="${TEST_NAME_BASE}:skip-workflow" +install_workflow "${TEST_NAME_BASE}" "${TEST_NAME_BASE}" +workflow_run_ok "${TEST_NAME}:run" \ + cylc play "${WORKFLOW_NAME}" \ + --no-detach \ + --mode skip \ + --set='changemode="live"' \ + --final-cycle-point=1000 + +JOB_LOGS="${WORKFLOW_RUN_DIR}/log/job/1000" +run_ok "${TEST_NAME}:run mode=live" ls "${JOB_LOGS}/live_" +run_fail "${TEST_NAME}:run mode=default" ls "${JOB_LOGS}/default_" +run_fail "${TEST_NAME}:run mode=skip" ls "${JOB_LOGS}/skip_" +JOB_LOGS="${WORKFLOW_RUN_DIR}/log/job/1000" +named_grep_ok "${TEST_NAME}:run mode=live" "===.*===" "${JOB_LOGS}/live_/NN/job.out" + +purge +exit 0 diff --git a/tests/functional/run_modes/06-run-mode-overrides/flow.cylc b/tests/functional/run_modes/06-run-mode-overrides/flow.cylc new file mode 100644 index 00000000000..6d1b1258833 --- /dev/null +++ b/tests/functional/run_modes/06-run-mode-overrides/flow.cylc @@ -0,0 +1,28 @@ +#!Jinja2 +[scheduler] + cycle point format = %Y + +[scheduling] + initial cycle point = 1000 + final cycle point = 1001 + [[graph]] + R1/1000 = default_ & live_ & skip_ => end + R1/1001 = end[-P1Y] => broadcaster => default_ + +[runtime] + [[root]] + script = echo "=== this task ran in live mode ===" + [[[simulation]]] + default run length = PT0S + [[default_, end]] + [[live_]] + run mode = live + [[skip_]] + run mode = skip + [[broadcaster]] + script = """ + cylc broadcast "${CYLC_WORKFLOW_ID}" \ + --name default_ \ + --point 1001 \ + --set='run mode="{{changemode | default("skip")}}"' + """ diff --git a/tests/functional/modes/test_header b/tests/functional/run_modes/test_header similarity index 100% rename from tests/functional/modes/test_header rename to tests/functional/run_modes/test_header diff --git a/tests/integration/run_modes/test_mode_overrides.py b/tests/integration/run_modes/test_mode_overrides.py new file mode 100644 index 00000000000..f9ab318e0e6 --- /dev/null +++ b/tests/integration/run_modes/test_mode_overrides.py @@ -0,0 +1,152 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Test that using [runtime][TASK]run mode works in each mode. + +Point 3 of the Skip Mode proposal +https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md + +| The run mode should be controlled by a new task configuration +| [runtime][]run mode with the default being live. +| As a runtime configuration, this can be defined in the workflow +| for development / testing purposes or set by cylc broadcast. + +n.b: This is pretty much a functional test and +probably ought to be labelled as such, but uses the +integration test framework. +""" + +import pytest + + +@pytest.mark.parametrize( + 'workflow_run_mode', [('live'), ('skip')]) +async def test_run_mode_override_from_config( + workflow_run_mode, flow, scheduler, run, complete, log_filter +): + """Test that ``[runtime][TASK]run mode`` overrides workflow modes. + """ + cfg = { + "scheduler": {"cycle point format": "%Y"}, + "scheduling": { + "initial cycle point": "1000", + "final cycle point": "1000", + "graph": {"P1Y": "live_\nskip_\ndefault_"}}, + "runtime": { + "skip_": {"run mode": "skip"}, + "live_": {"run mode": "live"} + } + } + id_ = flow(cfg) + schd = scheduler(id_, run_mode=workflow_run_mode, paused_start=False) + expect_template = ( + '[1000/{}_/01:preparing] submitted to localhost:background') + + async with run(schd) as log: + await complete(schd) + + # Live task has been really submitted: + assert log_filter(log, contains=expect_template.format('live')) + + # Default is the same as workflow: + if workflow_run_mode == 'live': + assert log_filter(log, contains=expect_template.format('default')) + else: + assert log_filter( + log, contains='[1000/default_/01:running] => succeeded') + assert not log_filter( + log, contains=expect_template.format('default')) + + # Skip task has run, but not actually been submitted: + assert log_filter(log, contains='[1000/skip_/01:running] => succeeded') + assert not log_filter(log, contains=expect_template.format('skip')) + + +async def test_force_trigger_does_not_override_run_mode( + flow, + scheduler, + start, +): + """Force-triggering a task will not override the run mode. + + Tasks with run mode = skip will continue to abide by + the is_held flag as normal. + + Taken from spec at + https://github.com/cylc/cylc-admin/blob/master/ + docs/proposal-skip-mode.md#proposal + """ + wid = flow({ + 'scheduling': {'graph': {'R1': 'foo'}}, + 'runtime': {'foo': {'run mode': 'skip'}} + }) + schd = scheduler(wid) + async with start(schd): + # Check that task isn't held at first + foo = schd.pool.get_tasks()[0] + assert foo.state.is_held is False + + # Hold task, check that it's held: + schd.pool.hold_tasks('1/foo') + assert foo.state.is_held is True + + # Trigger task, check that it's _still_ held: + schd.pool.force_trigger_tasks('1/foo', [1]) + assert foo.state.is_held is True + + # run_mode will always be simulation from test + # workflow before submit routine... + assert not foo.run_mode + + # ... but job submission will always change this to the correct mode: + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + [foo], + schd.server.curve_auth, + schd.server.client_pub_key_dir) + assert foo.run_mode == 'skip' + + +async def test_run_mode_override_from_broadcast( + flow, scheduler, run, complete, log_filter +): + """Test that run_mode modifications only apply to one task. + """ + cfg = { + "scheduler": {"cycle point format": "%Y"}, + "scheduling": { + "initial cycle point": "1000", + "final cycle point": "1001", + "graph": {"P1Y": "foo"}}, + "runtime": { + } + } + id_ = flow(cfg) + schd = scheduler(id_, run_mode='live', paused_start=False) + + async with run(schd): + schd.broadcast_mgr.put_broadcast( + ['1000'], ['foo'], [{'run mode': 'skip'}]) + + foo_1000, foo_1001 = schd.pool.get_tasks() + + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + [foo_1000, foo_1001], + schd.server.curve_auth, + schd.server.client_pub_key_dir) + + assert foo_1000.run_mode == 'skip' + assert foo_1001.run_mode == 'live' diff --git a/tests/integration/run_modes/test_nonlive.py b/tests/integration/run_modes/test_nonlive.py new file mode 100644 index 00000000000..42ddca128ce --- /dev/null +++ b/tests/integration/run_modes/test_nonlive.py @@ -0,0 +1,120 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from typing import Any, Dict + +# Define here to ensure test doesn't just mirror code: +KGO = { + 'live': { + 'flow_nums': '[1]', + 'is_manual_submit': 0, + 'try_num': 1, + 'submit_status': None, + 'run_signal': None, + 'run_status': None, + 'platform_name': 'localhost', + 'job_runner_name': 'background', + 'job_id': None}, + 'skip': { + 'flow_nums': '[1]', + 'is_manual_submit': 0, + 'try_num': 1, + 'submit_status': 0, + 'run_signal': None, + 'run_status': 0, + 'platform_name': 'skip', + 'job_runner_name': 'simulation', + 'job_id': None}, +} + + +def not_time(data: Dict[str, Any]): + """Filter out fields containing times to reduce risk of + flakiness""" + return {k: v for k, v in data.items() if 'time' not in k} + + +async def test_task_jobs(flow, scheduler, start): + """Ensure that task job data is added to the database correctly + for each run mode. + """ + schd = scheduler(flow({ + 'scheduling': {'graph': { + 'R1': '&'.join(KGO)}}, + 'runtime': { + mode: {'run mode': mode} for mode in KGO} + })) + async with start(schd): + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + schd.pool.get_tasks(), + schd.server.curve_auth, + schd.server.client_pub_key_dir + ) + schd.workflow_db_mgr.process_queued_ops() + + for mode, kgo in KGO.items(): + taskdata = not_time( + schd.workflow_db_mgr.pub_dao.select_task_job(1, mode)) + assert taskdata == kgo, ( + f'Mode {mode}: incorrect db entries.') + + schd.pool.set_prereqs_and_outputs('*', ['failed'], [], []) + + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + schd.pool.get_tasks(), + schd.server.curve_auth, + schd.server.client_pub_key_dir + ) + schd.workflow_db_mgr.process_queued_ops() + + for mode, kgo in KGO.items(): + taskdata = not_time( + schd.workflow_db_mgr.pub_dao.select_task_job(1, mode)) + assert taskdata == kgo, ( + f'Mode {mode}: incorrect db entries.') + + +async def test_mean_task_time(flow, scheduler, run, complete): + """Non-live tasks are not added to the list of task times, + so skipping tasks will not affect how long Cylc expects tasks to run. + """ + schd = scheduler(flow({ + 'scheduling': { + 'initial cycle point': '1000', + 'final cycle point': '1002', + 'graph': {'P1Y': 'foo'}} + }), run_mode='live') + + async with run(schd): + tasks = schd.pool.get_tasks() + tdef = tasks[0].tdef + assert list(tdef.elapsed_times) == [] + + # Make the task run in skip mode at one cycle: + schd.broadcast_mgr.put_broadcast( + ['1000'], ['foo'], [{'run mode': 'skip'}]) + + # Submit two tasks: + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + tasks[:2], + schd.server.curve_auth, + schd.server.client_pub_key_dir + ) + await complete(schd, '10010101T0000Z/foo') + assert len(tdef.elapsed_times) == 1 diff --git a/tests/integration/run_modes/test_simulation.py b/tests/integration/run_modes/test_simulation.py index 4d1cd0b7ed9..4c48a572b15 100644 --- a/tests/integration/run_modes/test_simulation.py +++ b/tests/integration/run_modes/test_simulation.py @@ -1,6 +1,6 @@ # THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. # Copyright (C) NIWA & British Crown (Met Office) & Contributors. - +# # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or @@ -16,6 +16,14 @@ """Test the workings of simulation mode""" +from pathlib import Path +import pytest +from pytest import param + +from cylc.flow import commands +from cylc.flow.cycling.iso8601 import ISO8601Point +from cylc.flow.run_modes.simulation import sim_time_check + async def test_started_trigger(flow, reftest, scheduler): """Does the started task output trigger downstream tasks @@ -32,3 +40,430 @@ async def test_started_trigger(flow, reftest, scheduler): ('1/a', None), ('1/b', ('1/a',)) } + + +@pytest.fixture +def monkeytime(monkeypatch): + """Convenience function monkeypatching time.""" + def _inner(time_: int): + monkeypatch.setattr('cylc.flow.task_job_mgr.time', lambda: time_) + monkeypatch.setattr( + 'cylc.flow.run_modes.simulation.time', lambda: time_) + return _inner + + +@pytest.fixture +def run_simjob(monkeytime): + """Run a simulated job to completion. + + Returns the output status. + """ + def _run_simjob(schd, point, task): + itask = schd.pool.get_task(point, task) + itask.state.is_queued = False + monkeytime(0) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') + monkeytime(itask.mode_settings.timeout + 1) + + # Run Time Check + assert sim_time_check( + schd.task_events_mgr, [itask], + schd.workflow_db_mgr + ) is True + + # Capture result process queue. + return itask + return _run_simjob + + +@pytest.fixture(scope='module') +async def sim_time_check_setup( + mod_flow, mod_scheduler, mod_start, mod_one_conf, +): + schd = mod_scheduler(mod_flow({ + 'scheduler': {'cycle point format': '%Y'}, + 'scheduling': { + 'initial cycle point': '1066', + 'graph': { + 'R1': 'one & fail_all & fast_forward', + 'P1Y': 'fail_once & fail_all_submits' + } + }, + 'runtime': { + 'one': {}, + 'fail_all': { + 'simulation': { + 'fail cycle points': 'all', + 'fail try 1 only': False + }, + 'outputs': {'foo': 'bar'} + }, + # This task ought not be finished quickly, but for the speed up + 'fast_forward': { + 'execution time limit': 'PT1M', + 'simulation': {'speedup factor': 2} + }, + 'fail_once': { + 'simulation': { + 'fail cycle points': '1066, 1068', + } + }, + 'fail_all_submits': { + 'simulation': { + 'fail cycle points': '1066', + 'fail try 1 only': False, + } + } + } + })) + async with mod_start(schd): + itasks = schd.pool.get_tasks() + [schd.task_job_mgr._set_retry_timers(i) for i in itasks] + yield schd, itasks + + +def test_false_if_not_running( + sim_time_check_setup, monkeypatch +): + schd, itasks = sim_time_check_setup + + itasks = [i for i in itasks if i.state.status != 'running'] + + # False if task status not running: + assert sim_time_check(schd.task_events_mgr, itasks, '') is False + + +@pytest.mark.parametrize( + 'itask, point, results', + ( + # Task fails this CP, first submit. + param( + 'fail_once', '1066', (True, False, False), + id='only-fail-on-submit-1'), + # Task succeeds this CP, all submits. + param( + 'fail_once', '1067', (False, False, False), + id='do-not-fail-this-cp'), + # Task fails this CP, first submit. + param( + 'fail_once', '1068', (True, False, False), + id='and-another-cp'), + # Task fails this CP, all submits. + param( + 'fail_all_submits', '1066', (True, True, True), + id='fail-all-submits'), + # Task succeeds this CP, all submits. + param( + 'fail_all_submits', '1067', (False, False, False), + id='fail-no-submits'), + ) +) +def test_fail_once(sim_time_check_setup, itask, point, results, monkeypatch): + """A task with a fail cycle point only fails + at that cycle point, and then only on the first submission. + """ + schd, _ = sim_time_check_setup + + itask = schd.pool.get_task( + ISO8601Point(point), itask) + + for i, result in enumerate(results): + itask.try_timers['execution-retry'].num = i + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') + assert itask.mode_settings.sim_task_fails is result + + +def test_task_finishes(sim_time_check_setup, monkeytime, caplog): + """...and an appropriate message sent. + + Checks that failed and bar are output if a task is set to fail. + + Does NOT check every possible cause of an outcome - this is done + in unit tests. + """ + schd, _ = sim_time_check_setup + monkeytime(0) + + # Setup a task to fail, submit it. + fail_all_1066 = schd.pool.get_task(ISO8601Point('1066'), 'fail_all') + fail_all_1066.state.status = 'running' + fail_all_1066.state.is_queued = False + schd.task_job_mgr._nonlive_submit_task_jobs( + [fail_all_1066], schd.workflow, 'simulation') + + # For the purpose of the test delete the started time set by + # _nonlive_submit_task_jobs. + fail_all_1066.summary['started_time'] = 0 + + # Before simulation time is up: + assert sim_time_check(schd.task_events_mgr, [fail_all_1066], '') is False + + # Time's up... + monkeytime(12) + + # After simulation time is up it Fails and records custom outputs: + assert sim_time_check(schd.task_events_mgr, [fail_all_1066], '') is True + outputs = fail_all_1066.state.outputs + assert outputs.is_message_complete('succeeded') is False + assert outputs.is_message_complete('bar') is True + assert outputs.is_message_complete('failed') is True + + +def test_task_sped_up(sim_time_check_setup, monkeytime): + """Task will speed up by a factor set in config.""" + + schd, _ = sim_time_check_setup + fast_forward_1066 = schd.pool.get_task( + ISO8601Point('1066'), 'fast_forward') + + # Run the job submission method: + monkeytime(0) + schd.task_job_mgr._nonlive_submit_task_jobs( + [fast_forward_1066], schd.workflow, 'simulation') + fast_forward_1066.state.is_queued = False + + result = sim_time_check(schd.task_events_mgr, [fast_forward_1066], '') + assert result is False + monkeytime(29) + result = sim_time_check(schd.task_events_mgr, [fast_forward_1066], '') + assert result is False + monkeytime(31) + result = sim_time_check(schd.task_events_mgr, [fast_forward_1066], '') + assert result is True + + +async def test_settings_restart( + monkeytime, flow, scheduler, start +): + """Check that simulation mode settings are correctly restored + upon restart. + + In the case of start time this is collected from the database + from task_jobs.start_time. + + tasks: + one: Runs straighforwardly. + two: Test case where database is missing started_time + because it was upgraded from an earlier version of Cylc. + """ + id_ = flow({ + 'scheduler': {'cycle point format': '%Y'}, + 'scheduling': { + 'initial cycle point': '1066', + 'graph': { + 'R1': 'one & two' + } + }, + 'runtime': { + 'root': { + 'execution time limit': 'PT1M', + 'execution retry delays': 'P0Y', + 'simulation': { + 'speedup factor': 1, + 'fail cycle points': 'all', + 'fail try 1 only': True, + } + }, + } + }) + schd = scheduler(id_) + + # Start the workflow: + async with start(schd): + og_timeouts = {} + for itask in schd.pool.get_tasks(): + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') + + og_timeouts[itask.identity] = itask.mode_settings.timeout + + # Mock wallclock < sim end timeout + monkeytime(itask.mode_settings.timeout - 1) + assert sim_time_check( + schd.task_events_mgr, [itask], schd.workflow_db_mgr + ) is False + + # Stop and restart the scheduler: + schd = scheduler(id_) + async with start(schd): + # Get our tasks and fix wallclock: + itasks = schd.pool.get_tasks() + for itask in itasks: + + # Check that we haven't got started time & mode settings back: + assert itask.summary['started_time'] is None + assert itask.mode_settings is None + + if itask.identity == '1066/two': + # Delete the database entry for `two`: Ensure that + # we don't break sim mode on upgrade to this version of Cylc. + schd.workflow_db_mgr.pri_dao.connect().execute( + 'UPDATE task_jobs' + '\n SET time_submit = NULL' + '\n WHERE (name == \'two\')' + ) + schd.workflow_db_mgr.process_queued_ops() + monkeytime(42) + expected_timeout = 102.0 + else: + monkeytime(og_timeouts[itask.identity] - 1) + expected_timeout = float(int(og_timeouts[itask.identity])) + + assert sim_time_check( + schd.task_events_mgr, [itask], schd.workflow_db_mgr + ) is False + + # Check that the itask.mode_settings is now re-created + assert itask.mode_settings.__dict__ == { + 'simulated_run_length': 60.0, + 'sim_task_fails': True, + 'timeout': expected_timeout + } + + +async def test_settings_reload( + flow, scheduler, start, run_simjob +): + """Check that simulation mode settings are changed for future + pseudo jobs on reload. + + """ + id_ = flow({ + 'scheduler': {'cycle point format': '%Y'}, + 'scheduling': { + 'initial cycle point': '1066', + 'graph': {'R1': 'one'} + }, + 'runtime': { + 'one': { + 'execution time limit': 'PT1M', + 'execution retry delays': 'P0Y', + 'simulation': { + 'speedup factor': 1, + 'fail cycle points': 'all', + 'fail try 1 only': False, + } + }, + } + }) + schd = scheduler(id_) + async with start(schd): + # Submit first psuedo-job and "run" to failure: + one_1066 = schd.pool.get_tasks()[0] + + itask = run_simjob(schd, one_1066.point, 'one') + assert itask.state.outputs.is_message_complete('failed') is False + + # Modify config as if reinstall had taken place: + conf_file = Path(schd.workflow_run_dir) / 'flow.cylc' + conf_file.write_text( + conf_file.read_text().replace('False', 'True')) + + # Reload Workflow: + await commands.run_cmd(commands.reload_workflow, schd) + + # Submit second psuedo-job and "run" to success: + itask = run_simjob(schd, one_1066.point, 'one') + assert itask.state.outputs.is_message_complete('succeeded') is True + + +async def test_settings_broadcast( + flow, scheduler, start, monkeytime +): + """Assert that broadcasting a change in the settings for a task + affects subsequent psuedo-submissions. + """ + id_ = flow({ + 'scheduler': {'cycle point format': '%Y'}, + 'scheduling': { + 'initial cycle point': '1066', + 'graph': {'R1': 'one'} + }, + 'runtime': { + 'one': { + 'execution time limit': 'PT1S', + 'execution retry delays': '2*PT5S', + 'simulation': { + 'speedup factor': 1, + 'fail cycle points': '1066', + 'fail try 1 only': False + } + }, + } + }, defaults=False) + schd = scheduler(id_, paused_start=False, run_mode='simulation') + async with start(schd) as log: + itask = schd.pool.get_tasks()[0] + itask.state.is_queued = False + + # Submit the first - the sim task will fail: + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') + assert itask.mode_settings.sim_task_fails is True + + # Let task finish. + monkeytime(itask.mode_settings.timeout + 1) + assert sim_time_check( + schd.task_events_mgr, [itask], + schd.workflow_db_mgr + ) is True + + # The mode_settings object has been cleared: + assert itask.mode_settings is None + # Change a setting using broadcast: + schd.broadcast_mgr.put_broadcast( + ['1066'], ['one'], [{ + 'simulation': {'fail cycle points': ''} + }]) + # Submit again - result is different: + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') + assert itask.mode_settings.sim_task_fails is False + + # Assert Clearing the broadcast works + schd.broadcast_mgr.clear_broadcast() + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') + assert itask.mode_settings.sim_task_fails is True + + # Assert that list of broadcasts doesn't change if we submit + # Invalid fail cycle points to broadcast. + itask.mode_settings = None + schd.broadcast_mgr.put_broadcast( + ['1066'], ['one'], [{ + 'simulation': {'fail cycle points': 'higadfuhasgiurguj'} + }]) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') + assert ( + 'Invalid ISO 8601 date representation: higadfuhasgiurguj' + in log.messages[-1]) + + # Check that the invalid broadcast hasn't + # changed the itask sim mode settings: + assert itask.mode_settings.sim_task_fails is True + + schd.broadcast_mgr.put_broadcast( + ['1066'], ['one'], [{ + 'simulation': {'fail cycle points': '1'} + }]) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') + assert ( + 'Invalid ISO 8601 date representation: 1' + in log.messages[-1]) + + # Broadcast tasks will reparse correctly: + schd.broadcast_mgr.put_broadcast( + ['1066'], ['one'], [{ + 'simulation': {'fail cycle points': '1945, 1977, 1066'}, + 'execution retry delays': '3*PT2S' + }]) + schd.task_job_mgr._nonlive_submit_task_jobs( + [itask], schd.workflow, 'simulation') + assert itask.mode_settings.sim_task_fails is True + assert itask.try_timers['execution-retry'].delays == [2.0, 2.0, 2.0] + # n.b. rtconfig should remain unchanged, lest we cancel broadcasts: + assert itask.tdef.rtconfig['execution retry delays'] == [5.0, 5.0] diff --git a/tests/integration/run_modes/test_skip.py b/tests/integration/run_modes/test_skip.py new file mode 100644 index 00000000000..bc9f29116f2 --- /dev/null +++ b/tests/integration/run_modes/test_skip.py @@ -0,0 +1,249 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Test for skip mode integration. +""" + + +async def test_settings_override_from_broadcast( + flow, scheduler, start, complete, log_filter +): + """Test that skip mode runs differently if settings are modified. + """ + cfg = { + "scheduling": {"graph": {"R1": "foo:failed => bar"}}, + "runtime": { + "foo": { + "events": { + 'handler events': 'failed', + "handlers": 'echo "HELLO"' + } + } + } + } + id_ = flow(cfg) + schd = scheduler(id_, run_mode='live') + + async with start(schd): + schd.broadcast_mgr.put_broadcast( + ['1'], + ['foo'], + [ + {'run mode': 'skip'}, + {'skip': {'outputs': 'failed'}}, + {'skip': {'disable task event handlers': "False"}} + ] + ) + + foo, = schd.pool.get_tasks() + + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + schd.pool.get_tasks(), + schd.server.curve_auth, + schd.server.client_pub_key_dir + ) + # Run mode has changed: + assert foo.platform['name'] == 'skip' + # Output failed emitted: + assert foo.state.status == 'failed' + # After processing events there is a handler in the subprocpool: + schd.task_events_mgr.process_events(schd) + assert 'echo "HELLO"' in schd.proc_pool.is_not_done()[0][0].cmd + + +async def test_broadcast_changes_set_skip_outputs( + flow, scheduler, start, complete, log_filter +): + """When cylc set --out skip is used, task outputs are updated with + broadcasts. + + Skip mode proposal point 4 + https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md + + | The cylc set --out option should accept the skip value which should + | set the outputs defined in [runtime][][skip]outputs. + | The skip keyword should not be allowed in custom outputs. + """ + wid = flow({ + 'scheduling': {'graph': {'R1': 'foo:expect_this'}}, + 'runtime': {'foo': {'outputs': {'expect_this': 'some message'}}} + }) + schd = scheduler(wid, run_mode='live') + async with start(schd): + schd.broadcast_mgr.put_broadcast( + ['1'], + ['foo'], + [{'skip': {'outputs': 'expect_this'}}], + ) + foo, = schd.pool.get_tasks() + schd.pool.set_prereqs_and_outputs( + '1/foo', ['skip'], [], ['all']) + + foo_outputs = foo.state.outputs.get_completed_outputs() + + assert 'expect_this' in foo_outputs + assert foo_outputs['expect_this'] == '(manually completed)' + + +async def test_skip_mode_outputs( + flow, scheduler, reftest, +): + """Nearly a functional test of the output emission of skip mode tasks + + Skip mode proposal point 2 + https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md + """ + graph = """ + # By default, all required outputs will be generated + # plus succeeded if success is optional: + foo? & foo:required_out => success_if_optional & required_outs + + # The outputs submitted and started are always produced + # and do not need to be defined in outputs: + foo:submitted => submitted_always + foo:started => started_always + + # If outputs is specified and does not include either + # succeeded or failed then succeeded will be produced. + opt:optional_out? => optional_outs_produced + + should_fail:fail => did_fail + """ + wid = flow({ + 'scheduling': {'graph': {'R1': graph}}, + 'runtime': { + 'root': { + 'run mode': 'skip', + 'outputs': { + 'required_out': 'the plans have been on display...', + 'optional_out': 'its only four light years away...' + } + }, + 'opt': { + 'skip': { + 'outputs': 'optional_out' + } + }, + 'should_fail': { + 'skip': { + 'outputs': 'failed' + } + } + } + }) + schd = scheduler(wid, run_mode='live', paused_start=False) + assert await reftest(schd) == { + ('1/did_fail', ('1/should_fail',),), + ('1/foo', None,), + ('1/opt', None,), + ('1/optional_outs_produced', ('1/opt',),), + ('1/required_outs', ('1/foo', '1/foo',),), + ('1/should_fail', None,), + ('1/started_always', ('1/foo',),), + ('1/submitted_always', ('1/foo',),), + ('1/success_if_optional', ('1/foo', '1/foo',),), + } + + +async def test_doesnt_release_held_tasks( + one_conf, flow, scheduler, start, log_filter +): + """Point 5 of the proposal + https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md + + | Tasks with run mode = skip will continue to abide by the is_held + | flag as normal. + + """ + schd = scheduler(flow(one_conf), run_mode='skip') + async with start(schd) as log: + itask = schd.pool.get_tasks()[0] + msg = 'held tasks shoudn\'t {}' + + # Set task to held and check submission in skip mode doesn't happen: + itask.state.is_held = True + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + [itask], + schd.server.curve_auth, + schd.server.client_pub_key_dir, + run_mode=schd.get_run_mode() + ) + assert not log_filter(log, contains='=> running'), msg.format('run') + assert not log_filter(log, contains='=> succeeded'), msg.format( + 'succeed') + + # Release held task and assert that it now skips successfully: + schd.pool.release_held_tasks(['1/one']) + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + [itask], + schd.server.curve_auth, + schd.server.client_pub_key_dir, + run_mode=schd.get_run_mode() + ) + assert log_filter(log, contains='=> running'), msg.format('run') + assert log_filter(log, contains='=> succeeded'), msg.format('succeed') + + +async def test_force_trigger_doesnt_change_mode( + flow, scheduler, run, complete +): + """Point 6 from the skip mode proposal + https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md + + | Force-triggering a task will not override the run mode. + """ + wid = flow({ + 'scheduling': {'graph': {'R1': 'slow => skip'}}, + 'runtime': { + 'slow': {'script': 'sleep 6'}, + 'skip': {'script': 'exit 1', 'run mode': 'skip'} + } + }) + schd = scheduler(wid, run_mode='live', paused_start=False) + async with run(schd): + schd.pool.force_trigger_tasks(['1/skip'], [1]) + # This will timeout if the skip task has become live on triggering: + await complete(schd, '1/skip', timeout=6) + + +async def test_prereqs_marked_satisfied_by_skip_mode( + flow, scheduler, start, log_filter, complete +): + """Point 8 from the skip mode proposal + https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md + + | When tasks are run in skip mode, the prerequisites which correspond + | to the outputs they generate should be marked as "satisfied by skip mode" + | rather than "satisfied naturally" for provenance reasons. + """ + schd = scheduler(flow({ + 'scheduling': {'graph': {'R1': 'foo => bar'}} + }), run_mode='skip') + + async with start(schd) as log: + foo, = schd.pool.get_tasks() + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + [foo], + schd.server.curve_auth, + schd.server.client_pub_key_dir, + run_mode=schd.get_run_mode() + ) + bar, = schd.pool.get_tasks() + satisfied_message, = bar.state.prerequisites[0]._satisfied.values() + assert satisfied_message == 'satisfied by skip mode' diff --git a/tests/integration/scripts/test_validate_integration.py b/tests/integration/scripts/test_validate_integration.py index dcf697aac36..7f9073f66f4 100644 --- a/tests/integration/scripts/test_validate_integration.py +++ b/tests/integration/scripts/test_validate_integration.py @@ -158,7 +158,7 @@ def test_pre_cylc8(flow, validate, caplog): assert warning in caplog.messages -def test_graph_upgrade_msg_default(flow, validate, caplog): +def test_graph_upgrade_msg_default(flow, validate, caplog, log_filter): """It lists Cycling definitions which need upgrading.""" id_ = flow({ 'scheduler': {'allow implicit tasks': True}, @@ -171,11 +171,11 @@ def test_graph_upgrade_msg_default(flow, validate, caplog): }, }) validate(id_) - assert '[scheduling][dependencies][X]graph' in caplog.messages[0] - assert 'for X in:\n P1Y, R1' in caplog.messages[0] + assert log_filter(caplog, contains='[scheduling][dependencies][X]graph') + assert log_filter(caplog, contains='for X in:\n P1Y, R1') -def test_graph_upgrade_msg_graph_equals(flow, validate, caplog): +def test_graph_upgrade_msg_graph_equals(flow, validate, caplog, log_filter): """It gives a more useful message in special case where graph is key rather than section: @@ -188,11 +188,12 @@ def test_graph_upgrade_msg_graph_equals(flow, validate, caplog): 'scheduling': {'dependencies': {'graph': 'foo => bar'}}, }) validate(id_) - expect = ('[scheduling][dependencies]graph -> [scheduling][graph]R1') - assert expect in caplog.messages[0] + assert log_filter( + caplog, + contains='[scheduling][dependencies]graph -> [scheduling][graph]R1') -def test_graph_upgrade_msg_graph_equals2(flow, validate, caplog): +def test_graph_upgrade_msg_graph_equals2(flow, validate, caplog, log_filter): """Both an implicit R1 and explict reccurance exist: It appends a note. """ @@ -212,4 +213,4 @@ def test_graph_upgrade_msg_graph_equals2(flow, validate, caplog): '\n P1Y, graph' '\n ([scheduling][dependencies]graph moves to [scheduling][graph]R1)' ) - assert expect in caplog.messages[0] + assert log_filter(caplog, contains=expect) diff --git a/tests/integration/test_config.py b/tests/integration/test_config.py index c75797e9cbb..e10ecf6b64f 100644 --- a/tests/integration/test_config.py +++ b/tests/integration/test_config.py @@ -17,13 +17,13 @@ import logging from pathlib import Path import sqlite3 +from textwrap import dedent from typing import Any import pytest from cylc.flow.cfgspec.glbl_cfg import glbl_cfg from cylc.flow.cfgspec.globalcfg import GlobalConfig from cylc.flow.exceptions import ( - PointParsingError, ServiceFileError, WorkflowConfigError, XtriggerConfigError, @@ -274,7 +274,7 @@ def test_parse_special_tasks_families(flow, scheduler, validate, section): } -def test_queue_treated_as_implicit(flow, validate, caplog): +def test_queue_treated_as_implicit(flow, validate, caplog, log_filter): """Tasks in queues but not in runtime generate a warning. https://github.com/cylc/cylc-flow/issues/5260 @@ -289,10 +289,9 @@ def test_queue_treated_as_implicit(flow, validate, caplog): } ) validate(id_) - assert ( - 'Queues contain tasks not defined in runtime' - in caplog.records[0].message - ) + assert log_filter( + caplog, + contains='Queues contain tasks not defined in runtime') def test_queue_treated_as_comma_separated(flow, validate): @@ -596,25 +595,36 @@ def _inner(*args, **kwargs): assert get_platforms(glbl_cfg()) == {'localhost', 'foo', 'bar'} -def test_validate_run_mode(flow: Fixture, validate: Fixture): - """Test that Cylc validate will only check simulation mode settings - if validate --mode simulation or dummy. - - Discovered in: - https://github.com/cylc/cylc-flow/pull/6213#issuecomment-2225365825 +def test_nonlive_mode_validation(flow, validate, caplog, log_filter): + """Nonlive tasks return a warning at validation. """ + msg1 = dedent('The following tasks are set to run in skip mode:\n * skip') + wid = flow({ - 'scheduling': {'graph': {'R1': 'mytask'}}, - 'runtime': {'mytask': {'simulation': {'fail cycle points': 'alll'}}} + 'scheduling': { + 'graph': { + 'R1': 'live => skip => simulation => dummy => default' + } + }, + 'runtime': { + 'default': {}, + 'live': {'run mode': 'live'}, + 'skip': { + 'run mode': 'skip', + 'skip': {'outputs': 'started, submitted'} + }, + }, }) - # It's fine with run mode live validate(wid) + assert log_filter(caplog, contains=msg1) - # It fails with run mode simulation: - with pytest.raises(PointParsingError, match='Incompatible value'): - validate(wid, run_mode='simulation') - # It fails with run mode dummy: - with pytest.raises(PointParsingError, match='Incompatible value'): - validate(wid, run_mode='dummy') +def test_skip_forbidden_as_output(flow, validate): + """Run mode names are forbidden as task output names.""" + wid = flow({ + 'scheduling': {'graph': {'R1': 'task'}}, + 'runtime': {'task': {'outputs': {'skip': 'message for skip'}}} + }) + with pytest.raises(WorkflowConfigError, match='message for skip'): + validate(wid) diff --git a/tests/integration/test_simulation.py b/tests/integration/test_simulation.py deleted file mode 100644 index 49bc76ce5e2..00000000000 --- a/tests/integration/test_simulation.py +++ /dev/null @@ -1,459 +0,0 @@ -# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. -# Copyright (C) NIWA & British Crown (Met Office) & Contributors. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -from pathlib import Path - -import pytest -from pytest import param - -from cylc.flow import commands -from cylc.flow.cycling.iso8601 import ISO8601Point -from cylc.flow.scheduler import Scheduler -from cylc.flow.simulation import sim_time_check - - -@pytest.fixture -def monkeytime(monkeypatch): - """Convenience function monkeypatching time.""" - def _inner(time_: int): - monkeypatch.setattr('cylc.flow.task_job_mgr.time', lambda: time_) - monkeypatch.setattr('cylc.flow.simulation.time', lambda: time_) - return _inner - - -@pytest.fixture -def run_simjob(monkeytime): - """Run a simulated job to completion. - - Returns the output status. - """ - def _run_simjob(schd, point, task): - itask = schd.pool.get_task(point, task) - itask.state.is_queued = False - monkeytime(0) - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) - monkeytime(itask.mode_settings.timeout + 1) - - # Run Time Check - assert sim_time_check( - schd.task_events_mgr, [itask], - schd.workflow_db_mgr - ) is True - - # Capture result process queue. - return itask - return _run_simjob - - -@pytest.fixture(scope='module') -async def sim_time_check_setup( - mod_flow, mod_scheduler, mod_start, mod_one_conf, -): - schd = mod_scheduler(mod_flow({ - 'scheduler': {'cycle point format': '%Y'}, - 'scheduling': { - 'initial cycle point': '1066', - 'graph': { - 'R1': 'one & fail_all & fast_forward', - 'P1Y': 'fail_once & fail_all_submits' - } - }, - 'runtime': { - 'one': {}, - 'fail_all': { - 'simulation': { - 'fail cycle points': 'all', - 'fail try 1 only': False - }, - 'outputs': {'foo': 'bar'} - }, - # This task ought not be finished quickly, but for the speed up - 'fast_forward': { - 'execution time limit': 'PT1M', - 'simulation': {'speedup factor': 2} - }, - 'fail_once': { - 'simulation': { - 'fail cycle points': '1066, 1068', - } - }, - 'fail_all_submits': { - 'simulation': { - 'fail cycle points': '1066', - 'fail try 1 only': False, - } - } - } - })) - async with mod_start(schd): - itasks = schd.pool.get_tasks() - [schd.task_job_mgr._set_retry_timers(i) for i in itasks] - yield schd, itasks - - -def test_false_if_not_running( - sim_time_check_setup, monkeypatch -): - schd, itasks = sim_time_check_setup - - itasks = [i for i in itasks if i.state.status != 'running'] - - # False if task status not running: - assert sim_time_check(schd.task_events_mgr, itasks, '') is False - - -@pytest.mark.parametrize( - 'itask, point, results', - ( - # Task fails this CP, first submit. - param( - 'fail_once', '1066', (True, False, False), - id='only-fail-on-submit-1'), - # Task succeeds this CP, all submits. - param( - 'fail_once', '1067', (False, False, False), - id='do-not-fail-this-cp'), - # Task fails this CP, first submit. - param( - 'fail_once', '1068', (True, False, False), - id='and-another-cp'), - # Task fails this CP, all submits. - param( - 'fail_all_submits', '1066', (True, True, True), - id='fail-all-submits'), - # Task succeeds this CP, all submits. - param( - 'fail_all_submits', '1067', (False, False, False), - id='fail-no-submits'), - ) -) -def test_fail_once(sim_time_check_setup, itask, point, results, monkeypatch): - """A task with a fail cycle point only fails - at that cycle point, and then only on the first submission. - """ - schd, _ = sim_time_check_setup - - itask = schd.pool.get_task( - ISO8601Point(point), itask) - - for i, result in enumerate(results): - itask.try_timers['execution-retry'].num = i - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) - assert itask.mode_settings.sim_task_fails is result - - -def test_task_finishes(sim_time_check_setup, monkeytime, caplog): - """...and an appropriate message sent. - - Checks that failed and bar are output if a task is set to fail. - - Does NOT check every possible cause of an outcome - this is done - in unit tests. - """ - schd, _ = sim_time_check_setup - monkeytime(0) - - # Setup a task to fail, submit it. - fail_all_1066 = schd.pool.get_task(ISO8601Point('1066'), 'fail_all') - fail_all_1066.state.status = 'running' - fail_all_1066.state.is_queued = False - schd.task_job_mgr._simulation_submit_task_jobs( - [fail_all_1066], schd.workflow) - - # For the purpose of the test delete the started time set by - # _simulation_submit_task_jobs. - fail_all_1066.summary['started_time'] = 0 - - # Before simulation time is up: - assert sim_time_check(schd.task_events_mgr, [fail_all_1066], '') is False - - # Time's up... - monkeytime(12) - - # After simulation time is up it Fails and records custom outputs: - assert sim_time_check(schd.task_events_mgr, [fail_all_1066], '') is True - outputs = fail_all_1066.state.outputs - assert outputs.is_message_complete('succeeded') is False - assert outputs.is_message_complete('bar') is True - assert outputs.is_message_complete('failed') is True - - -def test_task_sped_up(sim_time_check_setup, monkeytime): - """Task will speed up by a factor set in config.""" - - schd, _ = sim_time_check_setup - fast_forward_1066 = schd.pool.get_task( - ISO8601Point('1066'), 'fast_forward') - - # Run the job submission method: - monkeytime(0) - schd.task_job_mgr._simulation_submit_task_jobs( - [fast_forward_1066], schd.workflow) - fast_forward_1066.state.is_queued = False - - result = sim_time_check(schd.task_events_mgr, [fast_forward_1066], '') - assert result is False - monkeytime(29) - result = sim_time_check(schd.task_events_mgr, [fast_forward_1066], '') - assert result is False - monkeytime(31) - result = sim_time_check(schd.task_events_mgr, [fast_forward_1066], '') - assert result is True - - -async def test_settings_restart( - monkeytime, flow, scheduler, start -): - """Check that simulation mode settings are correctly restored - upon restart. - - In the case of start time this is collected from the database - from task_jobs.start_time. - - tasks: - one: Runs straighforwardly. - two: Test case where database is missing started_time - because it was upgraded from an earlier version of Cylc. - """ - id_ = flow({ - 'scheduler': {'cycle point format': '%Y'}, - 'scheduling': { - 'initial cycle point': '1066', - 'graph': { - 'R1': 'one & two' - } - }, - 'runtime': { - 'root': { - 'execution time limit': 'PT1M', - 'execution retry delays': 'P0Y', - 'simulation': { - 'speedup factor': 1, - 'fail cycle points': 'all', - 'fail try 1 only': True, - } - }, - } - }) - schd = scheduler(id_) - - # Start the workflow: - async with start(schd): - og_timeouts = {} - for itask in schd.pool.get_tasks(): - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) - - og_timeouts[itask.identity] = itask.mode_settings.timeout - - # Mock wallclock < sim end timeout - monkeytime(itask.mode_settings.timeout - 1) - assert sim_time_check( - schd.task_events_mgr, [itask], schd.workflow_db_mgr - ) is False - - # Stop and restart the scheduler: - schd = scheduler(id_) - async with start(schd): - # Get our tasks and fix wallclock: - itasks = schd.pool.get_tasks() - for itask in itasks: - - # Check that we haven't got started time & mode settings back: - assert itask.summary['started_time'] is None - assert itask.mode_settings is None - - if itask.identity == '1066/two': - # Delete the database entry for `two`: Ensure that - # we don't break sim mode on upgrade to this version of Cylc. - schd.workflow_db_mgr.pri_dao.connect().execute( - 'UPDATE task_jobs' - '\n SET time_submit = NULL' - '\n WHERE (name == \'two\')' - ) - schd.workflow_db_mgr.process_queued_ops() - monkeytime(42) - expected_timeout = 102.0 - else: - monkeytime(og_timeouts[itask.identity] - 1) - expected_timeout = float(int(og_timeouts[itask.identity])) - - assert sim_time_check( - schd.task_events_mgr, [itask], schd.workflow_db_mgr - ) is False - - # Check that the itask.mode_settings is now re-created - assert itask.mode_settings.__dict__ == { - 'simulated_run_length': 60.0, - 'sim_task_fails': True, - 'timeout': expected_timeout - } - - -async def test_settings_reload( - flow, scheduler, start, run_simjob -): - """Check that simulation mode settings are changed for future - pseudo jobs on reload. - - """ - id_ = flow({ - 'scheduler': {'cycle point format': '%Y'}, - 'scheduling': { - 'initial cycle point': '1066', - 'graph': {'R1': 'one'} - }, - 'runtime': { - 'one': { - 'execution time limit': 'PT1M', - 'execution retry delays': 'P0Y', - 'simulation': { - 'speedup factor': 1, - 'fail cycle points': 'all', - 'fail try 1 only': False, - } - }, - } - }) - schd = scheduler(id_) - async with start(schd): - # Submit first psuedo-job and "run" to failure: - one_1066 = schd.pool.get_tasks()[0] - - itask = run_simjob(schd, one_1066.point, 'one') - assert itask.state.outputs.is_message_complete('failed') is False - - # Modify config as if reinstall had taken place: - conf_file = Path(schd.workflow_run_dir) / 'flow.cylc' - conf_file.write_text( - conf_file.read_text().replace('False', 'True')) - - # Reload Workflow: - await commands.run_cmd(commands.reload_workflow, schd) - - # Submit second psuedo-job and "run" to success: - itask = run_simjob(schd, one_1066.point, 'one') - assert itask.state.outputs.is_message_complete('succeeded') is True - - -async def test_settings_broadcast( - flow, scheduler, start, monkeytime -): - """Assert that broadcasting a change in the settings for a task - affects subsequent psuedo-submissions. - """ - id_ = flow({ - 'scheduler': {'cycle point format': '%Y'}, - 'scheduling': { - 'initial cycle point': '1066', - 'graph': {'R1': 'one'} - }, - 'runtime': { - 'one': { - 'execution time limit': 'PT1S', - 'execution retry delays': '2*PT5S', - 'simulation': { - 'speedup factor': 1, - 'fail cycle points': '1066', - 'fail try 1 only': False - } - }, - } - }, defaults=False) - schd = scheduler(id_, paused_start=False, run_mode='simulation') - async with start(schd) as log: - itask = schd.pool.get_tasks()[0] - itask.state.is_queued = False - - # Submit the first - the sim task will fail: - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) - assert itask.mode_settings.sim_task_fails is True - - # Let task finish. - monkeytime(itask.mode_settings.timeout + 1) - assert sim_time_check( - schd.task_events_mgr, [itask], - schd.workflow_db_mgr - ) is True - - # The mode_settings object has been cleared: - assert itask.mode_settings is None - # Change a setting using broadcast: - schd.broadcast_mgr.put_broadcast( - ['1066'], ['one'], [{ - 'simulation': {'fail cycle points': ''} - }]) - # Submit again - result is different: - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) - assert itask.mode_settings.sim_task_fails is False - - # Assert Clearing the broadcast works - schd.broadcast_mgr.clear_broadcast() - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) - assert itask.mode_settings.sim_task_fails is True - - # Assert that list of broadcasts doesn't change if we submit - # Invalid fail cycle points to broadcast. - itask.mode_settings = None - schd.broadcast_mgr.put_broadcast( - ['1066'], ['one'], [{ - 'simulation': {'fail cycle points': 'higadfuhasgiurguj'} - }]) - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) - assert ( - 'Invalid ISO 8601 date representation: higadfuhasgiurguj' - in log.messages[-1]) - - schd.broadcast_mgr.put_broadcast( - ['1066'], ['one'], [{ - 'simulation': {'fail cycle points': '1'} - }]) - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) - assert ( - 'Invalid ISO 8601 date representation: 1' - in log.messages[-1]) - - # Broadcast tasks will reparse correctly: - schd.broadcast_mgr.put_broadcast( - ['1066'], ['one'], [{ - 'simulation': {'fail cycle points': '1945, 1977, 1066'}, - 'execution retry delays': '3*PT2S' - }]) - schd.task_job_mgr._simulation_submit_task_jobs( - [itask], schd.workflow) - assert itask.mode_settings.sim_task_fails is True - assert itask.try_timers['execution-retry'].delays == [2.0, 2.0, 2.0] - # n.b. rtconfig should remain unchanged, lest we cancel broadcasts: - assert itask.tdef.rtconfig['execution retry delays'] == [5.0, 5.0] - - -async def test_db_submit_num( - flow, one_conf, scheduler, run, complete, db_select -): - """Test simulation mode correctly increments the submit_num in the DB.""" - schd: Scheduler = scheduler(flow(one_conf), paused_start=False) - async with run(schd): - await complete(schd, '1/one') - assert db_select(schd, False, 'task_states', 'submit_num', 'status') == [ - (1, 'succeeded'), - ] diff --git a/tests/integration/test_task_events_mgr.py b/tests/integration/test_task_events_mgr.py index 7ac12274d7b..08ed816414d 100644 --- a/tests/integration/test_task_events_mgr.py +++ b/tests/integration/test_task_events_mgr.py @@ -152,7 +152,7 @@ async def test__always_insert_task_job( schd.pool.get_tasks(), schd.server.curve_auth, schd.server.client_pub_key_dir, - is_simulation=False + run_mode='live' ) # Both tasks are in a waiting state: diff --git a/tests/integration/test_task_pool.py b/tests/integration/test_task_pool.py index f2a0e650dcd..6af4f92813b 100644 --- a/tests/integration/test_task_pool.py +++ b/tests/integration/test_task_pool.py @@ -645,7 +645,8 @@ def list_tasks(schd): ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): 'satisfied naturally'}, + {('1', 'a', 'succeeded'): + 'satisfied by simulation mode'}, {('1', 'b', 'succeeded'): False}, {('1', 'c', 'succeeded'): False}, ], @@ -673,7 +674,8 @@ def list_tasks(schd): ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): 'satisfied naturally'}, + {('1', 'a', 'succeeded'): + 'satisfied by simulation mode'}, {('1', 'b', 'succeeded'): False}, ], id='removed' @@ -768,7 +770,8 @@ async def test_restart_prereqs( ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): 'satisfied naturally'}, + {('1', 'a', 'succeeded'): + 'satisfied by simulation mode'}, {('1', 'b', 'succeeded'): False}, {('1', 'c', 'succeeded'): False}, ], @@ -796,7 +799,8 @@ async def test_restart_prereqs( ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): 'satisfied naturally'}, + {('1', 'a', 'succeeded'): + 'satisfied by simulation mode'}, {('1', 'b', 'succeeded'): False}, ], id='removed' @@ -895,7 +899,7 @@ async def _test_restart_prereqs_sat(): for prereq in task_c.state.prerequisites for key, satisfied in prereq.items() ) == [ - ('1', 'a', 'succeeded', 'satisfied naturally'), + ('1', 'a', 'succeeded', 'satisfied by simulation mode'), ('1', 'b', 'succeeded', 'satisfied from database') ] @@ -912,7 +916,7 @@ async def _test_restart_prereqs_sat(): for prereq in task_c_prereqs for condition in prereq.conditions ) == [ - ('1/a', True, 'satisfied naturally'), + ('1/a', True, 'satisfied by simulation mode'), ('1/b', True, 'satisfied from database'), ] @@ -1576,6 +1580,75 @@ async def test_set_outputs_future( assert log_filter(log, contains="completed output y") +async def test_set_outputs_from_skip_settings( + flow, + scheduler, + start, + log_filter, + validate +): + """Check working of ``cylc set --out=skip``: + + 1. --out=skip can be used to set all required outputs. + 2. --out=skip,other_output can be used to set other outputs. + + """ + id_ = flow( + { + 'scheduler': { + 'allow implicit tasks': 'True', + }, + 'scheduling': { + 'cycling mode': 'integer', + 'initial cycle point': 1, + 'final cycle point': 2, + 'graph': { + 'P1': """ + a => after_asucceeded + a:x => after_ax + a:y? => after_ay + """ + } + }, + 'runtime': { + 'a': { + 'outputs': { + 'x': 'xebec', + 'y': 'yacht' + }, + 'skip': {'outputs': 'x'} + } + } + } + ) + validate(id_) + schd = scheduler(id_) + + async with start(schd): + + # it should start up with just tasks a: + assert pool_get_task_ids(schd.pool) == ['1/a', '2/a'] + + # setting 1/a output to skip should set output x, but not + # y (because y is optional). + schd.pool.set_prereqs_and_outputs( + ['1/a'], ['skip'], None, ['all']) + assert (pool_get_task_ids(schd.pool) == [ + '1/after_asucceeded', + '1/after_ax', + '2/a']) + + # You should be able to set skip as part of a list of outputs: + schd.pool.set_prereqs_and_outputs( + ['2/a'], ['skip', 'y'], None, ['all']) + assert (pool_get_task_ids(schd.pool) == [ + '1/after_asucceeded', + '1/after_ax', + '2/after_asucceeded', + '2/after_ax', + '2/after_ay']) + + async def test_prereq_satisfaction( flow, scheduler, diff --git a/tests/integration/utils/flow_tools.py b/tests/integration/utils/flow_tools.py index 34b80a25882..10e879b560d 100644 --- a/tests/integration/utils/flow_tools.py +++ b/tests/integration/utils/flow_tools.py @@ -116,6 +116,10 @@ def __make_scheduler(id_: str, **opts: Any) -> Scheduler: schd.workflow_db_mgr.on_workflow_shutdown() +def caplogprinter(caplog): + _ = [print(i) for i in caplog.messages] + + @asynccontextmanager async def _start_flow( caplog: Optional[pytest.LogCaptureFixture], @@ -125,6 +129,8 @@ async def _start_flow( """Start a scheduler but don't set the main loop running.""" if caplog: caplog.set_level(level, CYLC_LOG) + # Debug functionality + caplog.print = lambda: caplogprinter(caplog) await schd.install() @@ -155,6 +161,8 @@ async def _run_flow( """Start a scheduler and set the main loop running.""" if caplog: caplog.set_level(level, CYLC_LOG) + # Debug functionality + caplog.print = lambda: caplogprinter(caplog) await schd.install() diff --git a/tests/unit/run_modes/test_dummy.py b/tests/unit/run_modes/test_dummy.py new file mode 100644 index 00000000000..998c13767c9 --- /dev/null +++ b/tests/unit/run_modes/test_dummy.py @@ -0,0 +1,40 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Tests for utilities supporting dummy mode. +""" +import pytest +from cylc.flow.run_modes.dummy import build_dummy_script + + +@pytest.mark.parametrize( + 'fail_one_time_only', (True, False) +) +def test_build_dummy_script(fail_one_time_only): + rtc = { + 'outputs': {'foo': '1', 'bar': '2'}, + 'simulation': { + 'fail try 1 only': fail_one_time_only, + 'fail cycle points': '1', + } + } + result = build_dummy_script(rtc, 60) + assert result.split('\n') == [ + 'sleep 60', + "cylc message '1'", + "cylc message '2'", + f"cylc__job__dummy_result {str(fail_one_time_only).lower()}" + " 1 || exit 1" + ] diff --git a/tests/unit/run_modes/test_nonlive.py b/tests/unit/run_modes/test_nonlive.py new file mode 100644 index 00000000000..71695f2c96b --- /dev/null +++ b/tests/unit/run_modes/test_nonlive.py @@ -0,0 +1,51 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Unit tests for utilities supporting all nonlive modes +""" + +from types import SimpleNamespace + +from cylc.flow.run_modes.nonlive import run_mode_validate_checks + + +def test_run_mode_validate_checks(monkeypatch, caplog): + """It warns us if we've set a task config to nonlive mode. + + (And not otherwise) + + Point 3 from the skip mode proposal + https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md + + | If the run mode is set to simulation or skip in the workflow + | configuration, then cylc validate and cylc lint should produce + | warning (similar to development features in other languages / systems). + """ + taskdefs = { + f'{run_mode}_task': SimpleNamespace( + rtconfig={'run mode': run_mode}, + name=f'{run_mode}_task' + ) + for run_mode + in ['live', 'skip'] + } + + run_mode_validate_checks(taskdefs) + + message = caplog.messages[0] + + assert 'skip mode:\n * skip_task' in message + assert ' live mode' not in message # Avoid matching "non-live mode" + assert 'workflow mode' not in message diff --git a/tests/unit/test_simulation.py b/tests/unit/run_modes/test_simulation.py similarity index 86% rename from tests/unit/test_simulation.py rename to tests/unit/run_modes/test_simulation.py index 920a872503a..109174c8b43 100644 --- a/tests/unit/test_simulation.py +++ b/tests/unit/run_modes/test_simulation.py @@ -20,9 +20,8 @@ from cylc.flow.cycling.integer import IntegerPoint from cylc.flow.cycling.iso8601 import ISO8601Point -from cylc.flow.simulation import ( +from cylc.flow.run_modes.simulation import ( parse_fail_cycle_points, - build_dummy_script, disable_platforms, get_simulated_run_len, sim_task_failed, @@ -56,27 +55,6 @@ def test_get_simulated_run_len( assert get_simulated_run_len(rtc) == 3600 -@pytest.mark.parametrize( - 'fail_one_time_only', (True, False) -) -def test_set_simulation_script(fail_one_time_only): - rtc = { - 'outputs': {'foo': '1', 'bar': '2'}, - 'simulation': { - 'fail try 1 only': fail_one_time_only, - 'fail cycle points': '1', - } - } - result = build_dummy_script(rtc, 60) - assert result.split('\n') == [ - 'sleep 60', - "cylc message '1'", - "cylc message '2'", - f"cylc__job__dummy_result {str(fail_one_time_only).lower()}" - " 1 || exit 1" - ] - - @pytest.mark.parametrize( 'rtc, expect', ( ({'platform': 'skarloey'}, 'localhost'), @@ -100,7 +78,7 @@ def test_disable_platforms(rtc, expect): def test_parse_fail_cycle_points(set_cycling_type): before = ['2', '4'] set_cycling_type() - assert parse_fail_cycle_points(before) == [ + assert parse_fail_cycle_points(before, ['']) == [ IntegerPoint(i) for i in before ] diff --git a/tests/unit/run_modes/test_skip.py b/tests/unit/run_modes/test_skip.py new file mode 100644 index 00000000000..f5ad89381d7 --- /dev/null +++ b/tests/unit/run_modes/test_skip.py @@ -0,0 +1,101 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Unit tests for utilities supporting skip modes +""" +import pytest +from pytest import param, raises +from types import SimpleNamespace + +from cylc.flow.exceptions import WorkflowConfigError +from cylc.flow.run_modes.skip import check_task_skip_config, process_outputs + + +@pytest.mark.parametrize( + 'conf', + ( + param({}, id='no-skip-config'), + param({'skip': {'outputs': []}}, id='no-skip-outputs'), + param({'skip': {'outputs': ['foo1', 'failed']}}, id='ok-skip-outputs'), + ) +) +def test_good_check_task_skip_config(conf): + """It returns none if the problems this function checks are not present. + """ + tdef = SimpleNamespace(rtconfig=conf) + tdef.name = 'foo' + assert check_task_skip_config(tdef) is None + + +def test_raises_check_task_skip_config(): + """It raises an error if succeeded and failed are set. + """ + tdef = SimpleNamespace( + rtconfig={'skip': {'outputs': ['foo1', 'failed', 'succeeded']}} + ) + tdef.name = 'foo' + with raises(WorkflowConfigError, match='succeeded AND failed'): + check_task_skip_config(tdef) + + +@pytest.mark.parametrize( + 'outputs, required, expect', + ( + param([], [], ['succeeded'], id='implicit-succeded'), + param( + ['succeeded'], ['succeeded'], ['succeeded'], + id='explicit-succeded' + ), + param(['submitted'], [], ['succeeded'], id='only-1-submit'), + param( + ['foo', 'bar', 'baz', 'qux'], + ['bar', 'qux'], + ['bar', 'qux', 'succeeded'], + id='required-only' + ), + param( + ['foo', 'baz'], + ['bar', 'qux'], + ['succeeded'], + id='no-required' + ), + param( + ['failed'], + [], + ['failed'], + id='explicit-failed' + ), + ) +) +def test_process_outputs(outputs, required, expect): + """Check that skip outputs: + + 1. Doesn't send submitted twice. + 2. Sends every required output. + 3. If failed is set send failed + 4. If failed in not set send succeeded. + """ + # Create a mocked up task-proxy: + rtconf = {'skip': {'outputs': outputs}} + itask = SimpleNamespace( + tdef=SimpleNamespace( + rtconfig=rtconf), + state=SimpleNamespace( + outputs=SimpleNamespace( + iter_required_messages=lambda exclude: iter(required), + _message_to_trigger={v: v for v in required} + ))) + + assert process_outputs(itask, rtconf) == ['submitted', 'started'] + expect diff --git a/tests/unit/scripts/test_lint.py b/tests/unit/scripts/test_lint.py index 20ff738a1ac..1486765e68b 100644 --- a/tests/unit/scripts/test_lint.py +++ b/tests/unit/scripts/test_lint.py @@ -182,7 +182,10 @@ [[[directives]]] -l walltime = 666 [[baz]] + run mode = skip platform = `no backticks` + [[[skip]]] + outputs = succeeded, failed """ + ( '\nscript = the quick brown fox jumps over the lazy dog until it becomes ' 'clear that this line is longer than the default 130 character limit.' diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 4a820fdb126..0da543b6fac 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -17,8 +17,8 @@ import os import sys from optparse import Values -from typing import Any, Callable, Dict, List, Optional, Tuple, Type -from pathlib import Path +from typing import ( + TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type) import pytest import logging from types import SimpleNamespace @@ -47,8 +47,9 @@ from cylc.flow.cycling.iso8601 import ISO8601Point - -Fixture = Any +if TYPE_CHECKING: + from pathlib import Path + Fixture = Any def _tmp_flow_config(tmp_run_dir: Callable): @@ -60,8 +61,8 @@ def _tmp_flow_config(tmp_run_dir: Callable): Returns the path to the flow file. """ - def __tmp_flow_config(id_: str, config: str) -> Path: - run_dir: Path = tmp_run_dir(id_) + def __tmp_flow_config(id_: str, config: str) -> 'Path': + run_dir: 'Path' = tmp_run_dir(id_) flow_file = run_dir / WorkflowFiles.FLOW_FILE flow_file.write_text(config) return flow_file @@ -82,7 +83,7 @@ class TestWorkflowConfig: """Test class for the Cylc WorkflowConfig object.""" def test_xfunction_imports( - self, mock_glbl_cfg: Fixture, tmp_path: Path): + self, mock_glbl_cfg: 'Fixture', tmp_path: 'Path'): """Test for a workflow configuration with valid xtriggers""" mock_glbl_cfg( 'cylc.flow.platforms.glbl_cfg', @@ -175,7 +176,8 @@ def test_xfunction_attribute_error(self, mock_glbl_cfg, tmp_path): with pytest.raises(XtriggerConfigError) as excinfo: WorkflowConfig(workflow="capybara_workflow", fpath=flow_file, options=SimpleNamespace()) - assert "module 'capybara' has no attribute 'capybara'" in str(excinfo.value) + assert "module 'capybara' has no attribute 'capybara'" in str( + excinfo.value) def test_xfunction_not_callable(self, mock_glbl_cfg, tmp_path): """Test for error when a xtrigger function is not callable.""" @@ -358,7 +360,7 @@ def test_process_icp( expected_icp: Optional[str], expected_eval_icp: Optional[str], expected_err: Optional[Tuple[Type[Exception], str]], - monkeypatch: pytest.MonkeyPatch, set_cycling_type: Fixture + monkeypatch: pytest.MonkeyPatch, set_cycling_type: 'Fixture' ) -> None: """Test WorkflowConfig.process_initial_cycle_point(). @@ -445,7 +447,7 @@ def test_process_startcp( starttask: Optional[str], expected: str, expected_err: Optional[Tuple[Type[Exception], str]], - monkeypatch: pytest.MonkeyPatch, set_cycling_type: Fixture + monkeypatch: pytest.MonkeyPatch, set_cycling_type: 'Fixture' ) -> None: """Test WorkflowConfig.process_start_cycle_point(). @@ -648,7 +650,7 @@ def test_process_fcp( options_fcp: Optional[str], expected_fcp: Optional[str], expected_err: Optional[Tuple[Type[Exception], str]], - set_cycling_type: Fixture + set_cycling_type: 'Fixture' ) -> None: """Test WorkflowConfig.process_final_cycle_point(). @@ -671,7 +673,7 @@ def test_process_fcp( initial_point=loader.get_point( scheduling_cfg['initial cycle point'] ).standardise(), - final_point = None, + final_point=None, options=SimpleNamespace(fcp=options_fcp), ) @@ -812,7 +814,7 @@ def test_stopcp_after_fcp( cycle point is handled correctly.""" caplog.set_level(logging.WARNING, CYLC_LOG) id_ = 'cassini' - flow_file: Path = tmp_flow_config(id_, f""" + flow_file: 'Path' = tmp_flow_config(id_, f""" [scheduler] allow implicit tasks = True [scheduling] @@ -1366,7 +1368,7 @@ def test_implicit_tasks( """ # Setup id_ = 'rincewind' - flow_file: Path = tmp_flow_config(id_, f""" + flow_file: 'Path' = tmp_flow_config(id_, f""" [scheduler] { f'allow implicit tasks = {allow_implicit_tasks}' @@ -1470,7 +1472,7 @@ def test_zero_interval( """Test that a zero-duration recurrence with >1 repetition gets an appropriate warning.""" id_ = 'ordinary' - flow_file: Path = tmp_flow_config(id_, f""" + flow_file: 'Path' = tmp_flow_config(id_, f""" [scheduler] UTC mode = True allow implicit tasks = True @@ -1514,7 +1516,7 @@ def test_chain_expr( Note the order matters when "nominal" units (years, months) are used. """ id_ = 'osgiliath' - flow_file: Path = tmp_flow_config(id_, f""" + flow_file: 'Path' = tmp_flow_config(id_, f""" [scheduler] UTC mode = True allow implicit tasks = True @@ -1693,7 +1695,7 @@ def test__warn_if_queues_have_implicit_tasks(caplog): ] ) def test_cylc_env_at_parsing( - tmp_path: Path, + tmp_path: 'Path', monkeypatch: pytest.MonkeyPatch, installed, run_dir, diff --git a/tests/unit/test_platforms.py b/tests/unit/test_platforms.py index 68d08cb8d16..3167afabf70 100644 --- a/tests/unit/test_platforms.py +++ b/tests/unit/test_platforms.py @@ -34,6 +34,7 @@ PlatformLookupError, GlobalConfigError ) +from cylc.flow.task_state import RunMode PLATFORMS = { @@ -470,9 +471,9 @@ def test_get_install_target_to_platforms_map( for install_target in _map: _map[install_target] = sorted(_map[install_target], key=lambda k: k['name']) + result.pop('localhost') assert result == expected_map - @pytest.mark.parametrize( 'platform, job, remote, expect', [ diff --git a/tests/unit/test_task_outputs.py b/tests/unit/test_task_outputs.py index 2dbe684f04e..28abb27dbb1 100644 --- a/tests/unit/test_task_outputs.py +++ b/tests/unit/test_task_outputs.py @@ -274,7 +274,7 @@ def test_iter_required_outputs(): assert set(outputs.iter_required_messages()) == set() # the preconditions expiry/submitted are excluded from this logic when - # defined as optional + # defined as optional: outputs = TaskOutputs( tdef( {TASK_OUTPUT_SUCCEEDED, 'x', 'y'}, @@ -288,6 +288,32 @@ def test_iter_required_outputs(): 'y', } + # Get all outputs required for success path (excluding failure, what + # is still required): + outputs = TaskOutputs( + tdef( + {}, + {'a', 'succeeded', 'b', 'y', 'failed', 'x'}, + '(x and y and failed) or (a and b and succeeded)' + ) + ) + + # Excluding succeeded leaves us with failure required outputs: + assert set(outputs.iter_required_messages( + exclude=TASK_OUTPUT_SUCCEEDED)) == { + TASK_OUTPUT_FAILED, 'x', 'y',} + + # Excluding failed leaves us with succeeded required outputs: + assert set(outputs.iter_required_messages( + exclude=TASK_OUTPUT_FAILED)) == { + TASK_OUTPUT_SUCCEEDED, 'a', 'b',} + + # Excluding an abitrary output leaves us with required outputs + # from another branch: + assert set(outputs.iter_required_messages( + exclude='a')) == { + TASK_OUTPUT_FAILED, 'x', 'y',} + def test_get_trigger_completion_variable_maps(): """It should return a bi-map of triggers to compvars.""" diff --git a/tests/unit/test_task_remote_mgr.py b/tests/unit/test_task_remote_mgr.py index c41e415eba3..61cdcce2bc5 100644 --- a/tests/unit/test_task_remote_mgr.py +++ b/tests/unit/test_task_remote_mgr.py @@ -14,6 +14,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +from contextlib import suppress from pathlib import Path from time import sleep import pytest @@ -229,6 +230,9 @@ def flatten_install_targets_map(itm): install_targets_map = TaskRemoteMgr._get_remote_tidy_targets( set(platform_names), set(install_targets)) + with suppress(KeyError): + install_targets_map.pop('localhost') + assert ( expect['targets'] == flatten_install_targets_map(install_targets_map)) diff --git a/tests/unit/test_task_state.py b/tests/unit/test_task_state.py index 4b5d796a043..1a2041fcba5 100644 --- a/tests/unit/test_task_state.py +++ b/tests/unit/test_task_state.py @@ -15,11 +15,13 @@ # along with this program. If not, see . import pytest +from types import SimpleNamespace from cylc.flow.taskdef import TaskDef from cylc.flow.cycling.integer import IntegerSequence, IntegerPoint from cylc.flow.task_trigger import Dependency, TaskTrigger from cylc.flow.task_state import ( + RunMode, TaskState, TASK_STATUS_PREPARING, TASK_STATUS_SUBMIT_FAILED, @@ -118,3 +120,31 @@ def test_task_state_order(): assert tstate.is_gte(TASK_STATUS_SUBMITTED) assert not tstate.is_gt(TASK_STATUS_RUNNING) assert not tstate.is_gte(TASK_STATUS_RUNNING) + + +@pytest.mark.parametrize( + 'itask_run_mode, disable_handlers, expect', + ( + ('live', True, False), + ('live', False, False), + ('dummy', True, False), + ('dummy', False, False), + ('simulation', True, True), + ('simulation', False, True), + ('skip', True, True), + ('skip', False, False), + ) +) +def test_disable_task_event_handlers(itask_run_mode, disable_handlers, expect): + """Conditions under which task event handlers should not be used. + """ + # Construct a fake itask object: + itask = SimpleNamespace( + run_mode=itask_run_mode, + platform={'disable task event handlers': disable_handlers}, + tdef=SimpleNamespace( + rtconfig={ + 'skip': {'disable task event handlers': disable_handlers}}) + ) + # Check method: + assert RunMode.disable_task_event_handlers(itask) is expect From c34bc640887d2c043abc0e31f80bc6ddf0405e96 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Thu, 19 Sep 2024 13:39:09 +0100 Subject: [PATCH 02/29] Suggestions from review. Co-authored-by: Oliver Sanders --- cylc/flow/cfgspec/workflow.py | 7 +- cylc/flow/commands.py | 3 +- cylc/flow/config.py | 11 +- cylc/flow/data_store_mgr.py | 4 +- cylc/flow/etc/syntax/cylc.lang | 7 +- cylc/flow/etc/syntax/cylc.xml | 7 +- cylc/flow/network/schema.py | 16 +- cylc/flow/platforms.py | 10 +- cylc/flow/prerequisite.py | 1 + cylc/flow/run_modes/__init__.py | 141 ++++++++++++++++++ cylc/flow/run_modes/dummy.py | 2 +- cylc/flow/run_modes/nonlive.py | 2 +- cylc/flow/run_modes/simulation.py | 16 +- cylc/flow/run_modes/skip.py | 13 +- cylc/flow/scheduler.py | 6 +- cylc/flow/scheduler_cli.py | 9 +- cylc/flow/scripts/validate.py | 2 +- cylc/flow/task_events_mgr.py | 7 +- cylc/flow/task_job_mgr.py | 77 ++++++---- cylc/flow/task_outputs.py | 1 - cylc/flow/task_pool.py | 7 +- cylc/flow/task_proxy.py | 3 +- cylc/flow/task_state.py | 70 --------- cylc/flow/unicode_rules.py | 3 +- .../cylc-config/00-simple/section2.stdout | 26 ++-- .../run_modes/06-run-mode-overrides.t | 23 +-- tests/integration/conftest.py | 42 ++++++ .../run_modes/test_mode_overrides.py | 129 +++++++++------- tests/integration/run_modes/test_nonlive.py | 120 +++++++++++---- .../integration/run_modes/test_simulation.py | 65 ++++---- tests/integration/run_modes/test_skip.py | 81 ++++------ tests/integration/test_dbstatecheck.py | 10 +- tests/integration/utils/flow_tools.py | 8 - ...{test_nonlive.py => test_nonlive_units.py} | 0 ...simulation.py => test_simulation_units.py} | 0 .../{test_skip.py => test_skip_units.py} | 0 tests/unit/test_platforms.py | 1 - tests/unit/test_task_state.py | 4 +- 38 files changed, 545 insertions(+), 389 deletions(-) create mode 100644 cylc/flow/run_modes/__init__.py rename tests/unit/run_modes/{test_nonlive.py => test_nonlive_units.py} (100%) rename tests/unit/run_modes/{test_simulation.py => test_simulation_units.py} (100%) rename tests/unit/run_modes/{test_skip.py => test_skip_units.py} (100%) diff --git a/cylc/flow/cfgspec/workflow.py b/cylc/flow/cfgspec/workflow.py index dce1b0316a0..1da7401d2d5 100644 --- a/cylc/flow/cfgspec/workflow.py +++ b/cylc/flow/cfgspec/workflow.py @@ -56,8 +56,9 @@ from cylc.flow.platforms import ( fail_if_platform_and_host_conflict, get_platform_deprecated_settings, is_platform_definition_subshell) +from cylc.flow.run_modes import RunMode from cylc.flow.task_events_mgr import EventData -from cylc.flow.task_state import RunMode +from cylc.flow.run_modes import TASK_CONFIG_RUN_MODES # Regex to check whether a string is a command @@ -1338,8 +1339,8 @@ def get_script_common_text(this: str, example: Optional[str] = None): ) Conf( 'run mode', VDR.V_STRING, - options=list(RunMode.OVERRIDING_MODES.value) + [''], - default='', + options=list(TASK_CONFIG_RUN_MODES), + default=RunMode.LIVE.value, desc=f''' For a workflow run in live mode run this task in skip mode. diff --git a/cylc/flow/commands.py b/cylc/flow/commands.py index d7f3ffc5b4e..75ed5169112 100644 --- a/cylc/flow/commands.py +++ b/cylc/flow/commands.py @@ -76,9 +76,10 @@ from cylc.flow.log_level import log_level_to_verbosity from cylc.flow.network.schema import WorkflowStopMode from cylc.flow.parsec.exceptions import ParsecError +from cylc.flow.run_modes import RunMode from cylc.flow.task_id import TaskID from cylc.flow.task_state import ( - TASK_STATUSES_ACTIVE, TASK_STATUS_FAILED, RunMode) + TASK_STATUSES_ACTIVE, TASK_STATUS_FAILED) from cylc.flow.workflow_status import StopMode from metomi.isodatetime.parsers import TimePointParser diff --git a/cylc/flow/config.py b/cylc/flow/config.py index ef6300b7d43..73d39e2e6a8 100644 --- a/cylc/flow/config.py +++ b/cylc/flow/config.py @@ -99,7 +99,7 @@ get_trigger_completion_variable_maps, trigger_to_completion_variable, ) -from cylc.flow.task_state import RunMode +from cylc.flow.run_modes import RunMode from cylc.flow.task_trigger import TaskTrigger, Dependency from cylc.flow.taskdef import TaskDef from cylc.flow.unicode_rules import ( @@ -1739,10 +1739,6 @@ def process_config_env(self): ] ) - def run_mode(self) -> str: - """Return the run mode.""" - return RunMode.get(self.options) - def _check_task_event_handlers(self): """Check custom event handler templates can be expanded. @@ -2494,7 +2490,10 @@ def _get_taskdef(self, name: str) -> TaskDef: # Get the taskdef object for generating the task proxy class taskd = TaskDef( - name, rtcfg, self.run_mode(), self.start_point, + name, + rtcfg, + RunMode.get(self.options), + self.start_point, self.initial_point) # TODO - put all taskd.foo items in a single config dict diff --git a/cylc/flow/data_store_mgr.py b/cylc/flow/data_store_mgr.py index a9eee3f175c..f25a6bb11ac 100644 --- a/cylc/flow/data_store_mgr.py +++ b/cylc/flow/data_store_mgr.py @@ -85,6 +85,7 @@ pdeepcopy, poverride ) +from cylc.flow.run_modes import RunMode from cylc.flow.workflow_status import ( get_workflow_status, get_workflow_status_msg, @@ -700,8 +701,7 @@ def generate_definition_elements(self): time_zone_info = TIME_ZONE_LOCAL_INFO for key, val in time_zone_info.items(): setbuff(workflow.time_zone_info, key, val) - - workflow.run_mode = config.run_mode() + workflow.run_mode = RunMode.get(config.options) workflow.cycling_mode = config.cfg['scheduling']['cycling mode'] workflow.workflow_log_dir = self.schd.workflow_log_dir workflow.job_log_names.extend(list(JOB_LOG_OPTS.values())) diff --git a/cylc/flow/etc/syntax/cylc.lang b/cylc/flow/etc/syntax/cylc.lang index c3f43da2c95..6179a18f750 100644 --- a/cylc/flow/etc/syntax/cylc.lang +++ b/cylc/flow/etc/syntax/cylc.lang @@ -93,7 +93,6 @@ work sub-directory warning handlers verbose mode - user to title time limit buffer @@ -116,15 +115,17 @@ stall handlers speedup factor special tasks + skip simulation shutdown handlers + sequential xtriggers sequential script scheduling scheduler runtime runahead limit - run-dir + run mode retry handlers retrieve job logs retry delays retrieve job logs max size @@ -198,10 +199,12 @@ cycle point format custom handlers critical handlers + completion clock-trigger clock-expire batch system batch submit command template + alt-cylc-run-dir allow implicit tasks abort on workflow timeout abort on stall timeout diff --git a/cylc/flow/etc/syntax/cylc.xml b/cylc/flow/etc/syntax/cylc.xml index da11c1215e1..be74c2fa4ad 100644 --- a/cylc/flow/etc/syntax/cylc.xml +++ b/cylc/flow/etc/syntax/cylc.xml @@ -20,7 +20,6 @@ - @@ -43,15 +42,17 @@ + + - + @@ -125,10 +126,12 @@ + + diff --git a/cylc/flow/network/schema.py b/cylc/flow/network/schema.py index 97886545b37..3ee37892f1e 100644 --- a/cylc/flow/network/schema.py +++ b/cylc/flow/network/schema.py @@ -49,9 +49,10 @@ ) from cylc.flow.flow_mgr import FLOW_ALL, FLOW_NEW, FLOW_NONE from cylc.flow.id import Tokens +from cylc.flow.run_modes import ( + TASK_CONFIG_RUN_MODES, WORKFLOW_RUN_MODES, RunMode) from cylc.flow.task_outputs import SORT_ORDERS from cylc.flow.task_state import ( - RunMode, TASK_STATUSES_ORDERED, TASK_STATUS_DESC, TASK_STATUS_WAITING, @@ -605,20 +606,19 @@ def describe_run_mode(run_mode: Optional['Enum']) -> str: return getattr(RunMode, run_mode.value.upper()).__doc__ +# The run mode for the workflow. WorkflowRunMode = graphene.Enum( 'WorkflowRunMode', - [(m.capitalize(), m) for m in RunMode.WORKFLOW_MODES.value], - description=describe_run_mode, + [(m.capitalize(), m) for m in WORKFLOW_RUN_MODES], + description=lambda x: RunMode(x.value).describe() if x else None, ) -"""The run mode for the workflow.""" - +# The run mode for the task. TaskRunMode = graphene.Enum( 'TaskRunMode', - [(m.capitalize(), m) for m in RunMode.WORKFLOW_MODES.value], - description=describe_run_mode, + [(m.capitalize(), m) for m in TASK_CONFIG_RUN_MODES], + description=lambda x: RunMode(x.value).describe() if x else None, ) -"""The run mode for tasks.""" class Workflow(ObjectType): diff --git a/cylc/flow/platforms.py b/cylc/flow/platforms.py index fa49e598ec2..02ff15c5462 100644 --- a/cylc/flow/platforms.py +++ b/cylc/flow/platforms.py @@ -31,7 +31,7 @@ PlatformLookupError, CylcError, NoHostsError, NoPlatformsError) from cylc.flow.cfgspec.glbl_cfg import glbl_cfg from cylc.flow.hostuserutil import is_remote_host -from cylc.flow.task_state import RunMode +from cylc.flow.run_modes import JOBLESS_MODES if TYPE_CHECKING: from cylc.flow.parsec.OrderedDict import OrderedDictWithDefaults @@ -267,7 +267,7 @@ def platform_from_name( return platform_data # If platform name in run mode and not otherwise defined: - if platform_name in RunMode.JOBLESS_MODES.value: + if platform_name in JOBLESS_MODES: return platforms['localhost'] raise PlatformLookupError( @@ -652,7 +652,7 @@ def get_install_target_to_platforms_map( Return {install_target_1: [platform_1_dict, platform_2_dict, ...], ...} """ ret: Dict[str, List[Dict[str, Any]]] = {} - for p_name in set(platform_names) - set(RunMode.JOBLESS_MODES.value): + for p_name in set(platform_names) - set(JOBLESS_MODES): try: platform = platform_from_name(p_name) except PlatformLookupError as exc: @@ -665,10 +665,10 @@ def get_install_target_to_platforms_map( # Map jobless modes to localhost. if 'localhost' in ret: ret['localhost'] += [ - {'name': mode} for mode in RunMode.JOBLESS_MODES.value] + {'name': mode} for mode in JOBLESS_MODES] else: ret['localhost'] = [ - {'name': mode} for mode in RunMode.JOBLESS_MODES.value] + {'name': mode} for mode in JOBLESS_MODES] return ret diff --git a/cylc/flow/prerequisite.py b/cylc/flow/prerequisite.py index 04ea4596c09..ba9300bd75d 100644 --- a/cylc/flow/prerequisite.py +++ b/cylc/flow/prerequisite.py @@ -272,6 +272,7 @@ def satisfy_me( """ satisfied_message: SatisfiedState + if mode != 'live': satisfied_message = self.DEP_STATE_SATISFIED_BY.format( mode) # type: ignore diff --git a/cylc/flow/run_modes/__init__.py b/cylc/flow/run_modes/__init__.py new file mode 100644 index 00000000000..529be513c6c --- /dev/null +++ b/cylc/flow/run_modes/__init__.py @@ -0,0 +1,141 @@ +# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. +# Copyright (C) NIWA & British Crown (Met Office) & Contributors. + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from enum import Enum +from typing import TYPE_CHECKING, Callable, Optional, Tuple + +if TYPE_CHECKING: + from optparse import Values + from cylc.flow.task_job_mgr import TaskJobManager + from cylc.flow.task_proxy import TaskProxy + + # The interface for submitting jobs + SubmissionInterface = Callable[ + [ # Args: + # the task job manager instance + 'TaskJobManager', + # the task to submit + 'TaskProxy', + # the task's runtime config (with broadcasts applied) + dict, + # the workflow ID + str, + # the current time as (float_unix_time, str_ISO8601) + Tuple[float, str] + ], + # Return False if the job requires live-mode submission + # (dummy mode does this), else return True. + bool + ] + + +class RunMode(Enum): + """The possible run modes of a task/workflow.""" + + LIVE = 'live' + """Task will run normally.""" + + SIMULATION = 'simulation' + """Simulates job submission with configurable exection time + and succeeded/failed outcomes(but does not submit real jobs).""" + + DUMMY = 'dummy' + """Submits real jobs with empty scripts.""" + + SKIP = 'skip' + """Skips job submission; sets required outputs (by default) or + configured outputs.""" + + def describe(self): + """Return user friendly description of run mode. + + For use by configuration spec documenter. + """ + if self == self.LIVE: + return "Task will run normally." + if self == self.SKIP: + return ( + "Skips job submission; sets required outputs" + " (by default) or configured outputs.") + if self == self.DUMMY: + return "Submits real jobs with empty scripts." + if self == self.SIMULATION: + return ( + "Simulates job submission with configurable" + " exection time and succeeded/failed outcomes" + "(but does not submit real jobs).") + raise KeyError(f'No description for {self}.') + + @staticmethod + def get(options: 'Values') -> str: + """Return the workflow run mode from the options.""" + if hasattr(options, 'run_mode') and options.run_mode: + return options.run_mode + else: + return RunMode.LIVE.value + + def get_submit_method(self) -> 'Optional[SubmissionInterface]': + """Return the job submission method for this run mode. + + This returns None for live-mode jobs as these use a + different code pathway for job submission. + """ + if self == RunMode.DUMMY: + from cylc.flow.run_modes.dummy import ( + submit_task_job as dummy_submit_task_job) + return dummy_submit_task_job + elif self == RunMode.SIMULATION: + from cylc.flow.run_modes.simulation import ( + submit_task_job as simulation_submit_task_job) + return simulation_submit_task_job + elif self == RunMode.SKIP: + from cylc.flow.run_modes.skip import ( + submit_task_job as skip_submit_task_job) + return skip_submit_task_job + return None + + +def disable_task_event_handlers(itask: 'TaskProxy'): + """Should we disable event handlers for this task? + + No event handlers in simulation mode, or in skip mode + if we don't deliberately enable them: + """ + mode = itask.run_mode + return ( + mode == RunMode.SIMULATION.value + or ( + mode == RunMode.SKIP.value + and itask.platform.get( + 'disable task event handlers', False) + ) + ) + + +# Modes available for running a whole workflow: +WORKFLOW_RUN_MODES = frozenset(i.value for i in { + RunMode.LIVE, RunMode.DUMMY, RunMode.SIMULATION}) + +# Modes which can be set in task config: +TASK_CONFIG_RUN_MODES = frozenset( + i.value for i in (RunMode.LIVE, RunMode.SKIP)) +# And those only available to the workflow: +WORKFLOW_ONLY_MODES = frozenset( + i.value for i in RunMode) - TASK_CONFIG_RUN_MODES + +# Modes which completely ignore the standard submission path: +JOBLESS_MODES = frozenset(i.value for i in { + RunMode.SKIP, RunMode.SIMULATION}) diff --git a/cylc/flow/run_modes/dummy.py b/cylc/flow/run_modes/dummy.py index 91935ee5c3b..26d887d87dc 100644 --- a/cylc/flow/run_modes/dummy.py +++ b/cylc/flow/run_modes/dummy.py @@ -26,7 +26,7 @@ get_simulated_run_len, parse_fail_cycle_points ) -from cylc.flow.task_state import RunMode +from cylc.flow.run_modes import RunMode from cylc.flow.platforms import get_platform diff --git a/cylc/flow/run_modes/nonlive.py b/cylc/flow/run_modes/nonlive.py index 5bea9f70be5..0add79cba4f 100644 --- a/cylc/flow/run_modes/nonlive.py +++ b/cylc/flow/run_modes/nonlive.py @@ -19,7 +19,7 @@ from cylc.flow import LOG from cylc.flow.run_modes.skip import check_task_skip_config -from cylc.flow.task_state import RunMode +from cylc.flow.run_modes import RunMode if TYPE_CHECKING: from cylc.flow.taskdef import TaskDef diff --git a/cylc/flow/run_modes/simulation.py b/cylc/flow/run_modes/simulation.py index 122277bcf4c..4eaaf48ac56 100644 --- a/cylc/flow/run_modes/simulation.py +++ b/cylc/flow/run_modes/simulation.py @@ -36,7 +36,7 @@ TASK_STATUS_SUCCEEDED, ) from cylc.flow.wallclock import get_unix_time_from_time_string -from cylc.flow.task_state import RunMode +from cylc.flow.run_modes import RunMode if TYPE_CHECKING: @@ -73,8 +73,12 @@ def submit_task_job( itask.submit_num += 1 itask.platform = { - 'name': RunMode.SIMULATION.value, 'install target': 'localhost'} - itask.platform['name'] = RunMode.SIMULATION.value + 'name': RunMode.SIMULATION.value, + 'install target': 'localhost', + 'hosts': ['localhost'], + 'disable task event handlers': + rtconfig['simulation']['disable task event handlers'], + } itask.summary['job_runner_name'] = RunMode.SIMULATION.value itask.summary[task_job_mgr.KEY_EXECUTE_TIME_LIMIT] = ( itask.mode_settings.simulated_run_length @@ -88,6 +92,7 @@ def submit_task_job( task_job_mgr.workflow_db_mgr.put_insert_task_jobs( itask, { 'time_submit': now[1], + 'time_run': now[1], 'try_num': itask.get_try_num(), 'flow_nums': str(list(itask.flow_nums)), 'is_manual_submit': itask.is_manual_submit, @@ -311,7 +316,10 @@ def sim_time_check( for itask in itasks: if ( itask.state.status != TASK_STATUS_RUNNING - or itask.run_mode and itask.run_mode != RunMode.SIMULATION.value + or ( + itask.run_mode + and itask.run_mode != RunMode.SIMULATION.value + ) ): continue diff --git a/cylc/flow/run_modes/skip.py b/cylc/flow/run_modes/skip.py index 960301bfabc..9a05aa4729b 100644 --- a/cylc/flow/run_modes/skip.py +++ b/cylc/flow/run_modes/skip.py @@ -26,7 +26,7 @@ TASK_OUTPUT_FAILED, TASK_OUTPUT_STARTED ) -from cylc.flow.task_state import RunMode +from cylc.flow.run_modes import RunMode if TYPE_CHECKING: from cylc.flow.taskdef import TaskDef @@ -39,6 +39,7 @@ def submit_task_job( task_job_mgr: 'TaskJobManager', itask: 'TaskProxy', rtconfig: Dict, + _workflow: str, now: Tuple[float, str] ) -> 'Literal[True]': """Submit a task in skip mode. @@ -46,10 +47,6 @@ def submit_task_job( Returns: True - indicating that TaskJobManager need take no further action. """ - # Don't do anything if task is held: - if itask.state.is_held: - return True - task_job_mgr._set_retry_timers(itask, rtconfig) itask.summary['started_time'] = now[0] itask.waiting_on_job_prep = False @@ -63,7 +60,6 @@ def submit_task_job( rtconfig['skip']['disable task event handlers'], 'execution polling intervals': [] } - itask.platform['name'] = RunMode.SKIP.value itask.summary['job_runner_name'] = RunMode.SKIP.value itask.run_mode = RunMode.SKIP.value task_job_mgr.workflow_db_mgr.put_insert_task_jobs( @@ -72,11 +68,12 @@ def submit_task_job( 'try_num': itask.get_try_num(), 'flow_nums': str(list(itask.flow_nums)), 'is_manual_submit': itask.is_manual_submit, - 'job_runner_name': RunMode.SIMULATION.value, - 'platform_name': RunMode.SIMULATION.value, + 'job_runner_name': RunMode.SKIP.value, + 'platform_name': RunMode.SKIP.value, 'submit_status': 0 # Submission has succeeded } ) + task_job_mgr.workflow_db_mgr.put_update_task_state(itask) for output in process_outputs(itask, rtconfig): task_job_mgr.task_events_mgr.process_message(itask, INFO, output) diff --git a/cylc/flow/scheduler.py b/cylc/flow/scheduler.py index d4b5d0310a5..d73ab947f98 100644 --- a/cylc/flow/scheduler.py +++ b/cylc/flow/scheduler.py @@ -114,6 +114,7 @@ from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager from cylc.flow.workflow_events import WorkflowEventHandler from cylc.flow.workflow_status import StopMode, AutoRestartMode +from cylc.flow.run_modes import RunMode, WORKFLOW_ONLY_MODES from cylc.flow.taskdef import TaskDef from cylc.flow.task_events_mgr import TaskEventsManager from cylc.flow.task_job_mgr import TaskJobManager @@ -132,8 +133,7 @@ TASK_STATUS_PREPARING, TASK_STATUS_RUNNING, TASK_STATUS_SUBMITTED, - TASK_STATUS_WAITING, - RunMode) + TASK_STATUS_WAITING) from cylc.flow.templatevars import get_template_vars from cylc.flow.timer import Timer from cylc.flow.util import cli_format @@ -1221,7 +1221,7 @@ def run_event_handlers(self, event, reason=""): Run workflow events only in live mode or skip mode. """ - if self.get_run_mode() in RunMode.NON_OVERRIDABLE_MODES.value: + if self.get_run_mode() in WORKFLOW_ONLY_MODES: return self.workflow_event_handler.handle(self, event, str(reason)) diff --git a/cylc/flow/scheduler_cli.py b/cylc/flow/scheduler_cli.py index aa5a821558c..739f26892c6 100644 --- a/cylc/flow/scheduler_cli.py +++ b/cylc/flow/scheduler_cli.py @@ -54,7 +54,7 @@ from cylc.flow.remote import cylc_server_cmd from cylc.flow.scheduler import Scheduler, SchedulerError from cylc.flow.scripts.common import cylc_header -from cylc.flow.task_state import RunMode +from cylc.flow.run_modes import WORKFLOW_RUN_MODES from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager from cylc.flow.workflow_files import ( SUITERC_DEPR_MSG, @@ -130,14 +130,15 @@ RUN_MODE = OptionSettings( ["-m", "--mode"], help=( - f"Run mode: {RunMode.WORKFLOW_MODES.value} (default live)." - " Live mode executes the tasks as defined in the runtime section." + f"Run mode: {WORKFLOW_RUN_MODES} (default live)." + " Live mode executes the tasks as defined in the runtime" + " section." " Simulation, skip and dummy modes ignore part of tasks'" " runtime configurations. Simulation and dummy modes are" " designed for testing, and skip mode is for flow control." ), metavar="STRING", action='store', dest="run_mode", - choices=list(RunMode.WORKFLOW_MODES.value), + choices=list(WORKFLOW_RUN_MODES), ) PLAY_RUN_MODE = deepcopy(RUN_MODE) diff --git a/cylc/flow/scripts/validate.py b/cylc/flow/scripts/validate.py index 9e4f8f7cb89..443557375cd 100755 --- a/cylc/flow/scripts/validate.py +++ b/cylc/flow/scripts/validate.py @@ -54,7 +54,7 @@ from cylc.flow.task_proxy import TaskProxy from cylc.flow.templatevars import get_template_vars from cylc.flow.terminal import cli_function -from cylc.flow.task_state import RunMode +from cylc.flow.run_modes import RunMode if TYPE_CHECKING: from cylc.flow.option_parsers import Values diff --git a/cylc/flow/task_events_mgr.py b/cylc/flow/task_events_mgr.py index 8285ef68e50..03452d9a4cd 100644 --- a/cylc/flow/task_events_mgr.py +++ b/cylc/flow/task_events_mgr.py @@ -68,6 +68,8 @@ JOB_LOG_OUT, JOB_LOG_ERR, ) +from cylc.flow.run_modes import ( + JOBLESS_MODES, RunMode, disable_task_event_handlers) from cylc.flow.task_message import ( ABORT_MESSAGE_PREFIX, FAIL_MESSAGE_PREFIX, VACATION_MESSAGE_PREFIX) from cylc.flow.task_state import ( @@ -80,7 +82,6 @@ TASK_STATUS_EXPIRED, TASK_STATUS_SUCCEEDED, TASK_STATUS_WAITING, - RunMode, ) from cylc.flow.task_outputs import ( TASK_OUTPUT_EXPIRED, @@ -940,7 +941,7 @@ def _process_message_check( def setup_event_handlers(self, itask, event, message): """Set up handlers for a task event.""" - if RunMode.disable_task_event_handlers(itask): + if disable_task_event_handlers(itask): return msg = "" if message != f"job {event}": @@ -1541,7 +1542,7 @@ def _insert_task_job( # do not submit jobs. if ( not itask.run_mode - or itask.run_mode in RunMode.JOBLESS_MODES.value + or itask.run_mode in JOBLESS_MODES or forced ): job_conf = {"submit_num": itask.submit_num} diff --git a/cylc/flow/task_job_mgr.py b/cylc/flow/task_job_mgr.py index 675fad0c6ce..40ac119f897 100644 --- a/cylc/flow/task_job_mgr.py +++ b/cylc/flow/task_job_mgr.py @@ -35,7 +35,7 @@ ) from shutil import rmtree from time import time -from typing import TYPE_CHECKING, Any, List, Tuple, Union, Optional +from typing import TYPE_CHECKING, Any, Dict, List, Tuple, Union, Optional from cylc.flow import LOG from cylc.flow.job_runner_mgr import JobPollContext @@ -59,14 +59,9 @@ get_platform, ) from cylc.flow.remote import construct_ssh_cmd -from cylc.flow.run_modes.simulation import ( - submit_task_job as simulation_submit_task_job) -from cylc.flow.run_modes.skip import ( - submit_task_job as skip_submit_task_job) -from cylc.flow.run_modes.dummy import ( - submit_task_job as dummy_submit_task_job) from cylc.flow.subprocctx import SubProcContext from cylc.flow.subprocpool import SubProcPool +from cylc.flow.run_modes import RunMode, WORKFLOW_ONLY_MODES from cylc.flow.task_action_timer import ( TaskActionTimer, TimerFlags @@ -105,7 +100,6 @@ TASK_STATUS_RUNNING, TASK_STATUS_WAITING, TASK_STATUSES_ACTIVE, - RunMode ) from cylc.flow.wallclock import ( get_current_time_string, @@ -249,7 +243,7 @@ def prep_submit_task_jobs(self, workflow, itasks, check_syntax=True): return [prepared_tasks, bad_tasks] def submit_task_jobs(self, workflow, itasks, curve_auth, - client_pub_key_dir, run_mode='live'): + client_pub_key_dir, run_mode=RunMode.LIVE): """Prepare for job submission and submit task jobs. Preparation (host selection, remote host init, and remote install) @@ -264,28 +258,42 @@ def submit_task_jobs(self, workflow, itasks, curve_auth, Return (list): list of tasks that attempted submission. """ - itasks, nonlive_tasks = self._nonlive_submit_task_jobs( - itasks, workflow, run_mode) + # submit "simulation/skip" mode tasks, modify "dummy" task configs: + itasks, submitted_nonlive_tasks = self.submit_nonlive_task_jobs( + workflow, itasks, run_mode) + + # submit "live" mode tasks (and "dummy" mode tasks) + submitted_live_tasks = self.submit_livelike_task_jobs( + workflow, itasks, curve_auth, client_pub_key_dir) + + return submitted_nonlive_tasks + submitted_live_tasks + + def submit_livelike_task_jobs( + self, workflow, itasks, curve_auth, client_pub_key_dir + ) -> 'List[TaskProxy]': + """Submission for live tasks and dummy tasks. + """ + done_tasks: 'List[TaskProxy]' = [] + # {platform: [itask, ...], ...} + auth_itasks: 'Dict[str, List[TaskProxy]]' = {} - # Prepare tasks for job submission prepared_tasks, bad_tasks = self.prep_submit_task_jobs( workflow, itasks) # Reset consumed host selection results self.task_remote_mgr.subshell_eval_reset() - if not prepared_tasks and not nonlive_tasks: + if not prepared_tasks: return bad_tasks - elif not prepared_tasks: - return nonlive_tasks - auth_itasks = {} # {platform: [itask, ...], ...} for itask in prepared_tasks: platform_name = itask.platform['name'] auth_itasks.setdefault(platform_name, []) auth_itasks[platform_name].append(itask) + # Submit task jobs for each platform - done_tasks = bad_tasks + nonlive_tasks + # Non-prepared tasks can be considered done for now: + done_tasks = bad_tasks for _, itasks in sorted(auth_itasks.items()): # Find the first platform where >1 host has not been tried and @@ -453,6 +461,7 @@ def submit_task_jobs(self, workflow, itasks, curve_auth, 'platform_name': itask.platform['name'], 'job_runner_name': itask.summary['job_runner_name'], }) + itask.is_manual_submit = False if ri_map[install_target] == REMOTE_FILE_INSTALL_255: @@ -1009,10 +1018,10 @@ def _set_retry_timers( except KeyError: itask.try_timers[key] = TaskActionTimer(delays=delays) - def _nonlive_submit_task_jobs( + def submit_nonlive_task_jobs( self: 'TaskJobManager', - itasks: 'List[TaskProxy]', workflow: str, + itasks: 'List[TaskProxy]', workflow_run_mode: str, ) -> 'Tuple[List[TaskProxy], List[TaskProxy]]': """Identify task mode and carry out alternative submission @@ -1043,15 +1052,15 @@ def _nonlive_submit_task_jobs( # Get task config with broadcasts applied: rtconfig = self.task_events_mgr.broadcast_mgr.get_updated_rtconfig( itask) - # Apply task run mode - if workflow_run_mode in RunMode.NON_OVERRIDABLE_MODES.value: + if workflow_run_mode in WORKFLOW_ONLY_MODES: # Task run mode cannot override workflow run-mode sim or dummy: run_mode = workflow_run_mode else: # If workflow mode is skip or live and task mode is set, # override workflow mode, else use workflow mode. run_mode = rtconfig.get('run mode', None) or workflow_run_mode + # Store the run mode of the this submission: itask.run_mode = run_mode @@ -1059,19 +1068,23 @@ def _nonlive_submit_task_jobs( # tasks to list of tasks to put through live # submission pipeline - We decide based on the output # of the submit method: - is_nonlive = False - if run_mode == RunMode.DUMMY.value: - is_nonlive = dummy_submit_task_job( - self, itask, rtconfig, workflow, now) - elif run_mode == RunMode.SIMULATION.value: - is_nonlive = simulation_submit_task_job( + submit_func = RunMode(run_mode).get_submit_method() + if not submit_func: + # Return to nonlive. + nonlive_mode = False + else: + nonlive_mode = submit_func( self, itask, rtconfig, workflow, now) - elif run_mode == RunMode.SKIP.value: - is_nonlive = skip_submit_task_job( - self, itask, rtconfig, now) - # Assign task to list: - if is_nonlive: + if nonlive_mode: + self.workflow_db_mgr.put_insert_task_states( + itask, + { + 'submit_num': itask.submit_num, + 'flow_nums': serialise_set(itask.flow_nums), + 'time_created': itask.summary['submitted_time_string'] + } + ) nonlive_tasks.append(itask) else: lively_tasks.append(itask) diff --git a/cylc/flow/task_outputs.py b/cylc/flow/task_outputs.py index c9be4884152..1b8b4f2709a 100644 --- a/cylc/flow/task_outputs.py +++ b/cylc/flow/task_outputs.py @@ -642,7 +642,6 @@ def iter_required_messages( set(self._message_to_compvar.values()), force_optional=exclude ).items(): - # breakpoint(header=f"=== {compvar=}, {is_optional=} ===") if is_optional is False: for message, _compvar in self._message_to_compvar.items(): if _compvar == compvar: diff --git a/cylc/flow/task_pool.py b/cylc/flow/task_pool.py index a554310f645..7aea9055137 100644 --- a/cylc/flow/task_pool.py +++ b/cylc/flow/task_pool.py @@ -42,6 +42,7 @@ from cylc.flow.id import Tokens, detokenise from cylc.flow.id_cli import contains_fnmatch from cylc.flow.id_match import filter_ids +from cylc.flow.run_modes import RunMode from cylc.flow.workflow_status import StopMode from cylc.flow.task_action_timer import TaskActionTimer, TimerFlags from cylc.flow.task_events_mgr import ( @@ -53,7 +54,6 @@ from cylc.flow.task_id import TaskID from cylc.flow.task_proxy import TaskProxy from cylc.flow.task_state import ( - RunMode, TASK_STATUSES_ACTIVE, TASK_STATUSES_FINAL, TASK_STATUS_WAITING, @@ -1433,9 +1433,10 @@ def spawn_on_output(self, itask: TaskProxy, output: str) -> None: tasks = [c_task] for t in tasks: + t.satisfy_me( [itask.tokens.duplicate(task_sel=output)], - getattr(itask.tdef, 'run_mode', RunMode.LIVE.value) + mode=itask.run_mode ) self.data_store_mgr.delta_task_prerequisite(t) if not in_pool: @@ -1564,7 +1565,7 @@ def spawn_on_all_outputs( if completed_only: c_task.satisfy_me( [itask.tokens.duplicate(task_sel=message)], - itask.run_mode + mode=itask.run_mode ) self.data_store_mgr.delta_task_prerequisite(c_task) self.add_to_pool(c_task) diff --git a/cylc/flow/task_proxy.py b/cylc/flow/task_proxy.py index b3c6e891da1..7219fafef18 100644 --- a/cylc/flow/task_proxy.py +++ b/cylc/flow/task_proxy.py @@ -38,9 +38,9 @@ from cylc.flow import LOG from cylc.flow.flow_mgr import stringify_flow_nums from cylc.flow.platforms import get_platform +from cylc.flow.run_modes import RunMode from cylc.flow.task_action_timer import TimerFlags from cylc.flow.task_state import ( - RunMode, TaskState, TASK_STATUS_WAITING, TASK_STATUS_EXPIRED, @@ -566,6 +566,7 @@ def satisfy_me( Return a set of unmatched task messages. """ + used = self.state.satisfy_me(task_messages, mode) return set(task_messages) - used diff --git a/cylc/flow/task_state.py b/cylc/flow/task_state.py index 838414198f6..8447a7bed6d 100644 --- a/cylc/flow/task_state.py +++ b/cylc/flow/task_state.py @@ -16,8 +16,6 @@ """Task state related logic.""" - -from enum import Enum from typing import ( TYPE_CHECKING, Dict, @@ -41,7 +39,6 @@ if TYPE_CHECKING: from cylc.flow.cycling import PointBase - from cylc.flow.option_parsers import Values from cylc.flow.id import Tokens from cylc.flow.prerequisite import PrereqMessage from cylc.flow.taskdef import TaskDef @@ -179,73 +176,6 @@ } -class RunMode(Enum): - """The possible run modes of a task/workflow.""" - - LIVE = 'live' - """Task will run normally.""" - - SIMULATION = 'simulation' - """Simulates job submission with configurable exection time - and succeeded/failed outcomes(does not submit real jobs).""" - - DUMMY = 'dummy' - """Submits real jobs with empty scripts.""" - - SKIP = 'skip' - """Skips job submission; sets required outputs (by default) or - configured outputs.""" - - WORKFLOW_MODES = (LIVE, DUMMY, SIMULATION, SKIP) - """Workflow mode not sensible mode for workflow. - - n.b. not using a set to ensure ordering in CLI - """ - - OVERRIDING_MODES = frozenset({LIVE, SKIP}) - """Modes which can be set in task config.""" - - NON_OVERRIDABLE_MODES = frozenset({SIMULATION, DUMMY}) - - JOBLESS_MODES = frozenset({SKIP, SIMULATION}) - """Modes which completely ignore the standard submission path.""" - - def describe(self): - """Return user friendly description of run mode. - - For use by configuration spec documenter. - """ - if self == self.LIVE: - return "Task will run normally." - if self == self.SKIP: - return ( - "Skips job submission; sets required outputs" - " (by default) or configured outputs.") - raise KeyError(f'No description for {self}.') - - @staticmethod - def get(options: 'Values') -> str: - """Return the workflow run mode from the options.""" - return getattr(options, 'run_mode', None) or RunMode.LIVE.value - - @staticmethod - def disable_task_event_handlers(itask): - """Should we disable event handlers for this task? - - No event handlers in simulation mode, or in skip mode - if we don't deliberately enable them: - """ - mode = itask.run_mode - return ( - mode == RunMode.SIMULATION.value - or ( - mode == RunMode.SKIP.value - and itask.platform.get( - 'disable task event handlers', False) - ) - ) - - def status_leq(status_a, status_b): """"Return True if status_a <= status_b""" return (TASK_STATUSES_ORDERED.index(status_a) <= diff --git a/cylc/flow/unicode_rules.py b/cylc/flow/unicode_rules.py index 0dbb5aa22f9..b24d576332d 100644 --- a/cylc/flow/unicode_rules.py +++ b/cylc/flow/unicode_rules.py @@ -22,8 +22,9 @@ _TASK_NAME_CHARACTERS, _TASK_NAME_PREFIX, ) +from cylc.flow.run_modes import RunMode from cylc.flow.task_qualifiers import TASK_QUALIFIERS -from cylc.flow.task_state import TASK_STATUSES_ORDERED, RunMode +from cylc.flow.task_state import TASK_STATUSES_ORDERED ENGLISH_REGEX_MAP = { r'\w': 'alphanumeric', diff --git a/tests/functional/cylc-config/00-simple/section2.stdout b/tests/functional/cylc-config/00-simple/section2.stdout index 049db739435..559d1c2556c 100644 --- a/tests/functional/cylc-config/00-simple/section2.stdout +++ b/tests/functional/cylc-config/00-simple/section2.stdout @@ -15,7 +15,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[meta]]] title = description = @@ -94,7 +94,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[meta]]] title = description = @@ -173,7 +173,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[meta]]] title = description = @@ -252,7 +252,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[directives]]] job_type = serial [[[meta]]] @@ -332,7 +332,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[directives]]] job_type = parallel [[[meta]]] @@ -412,7 +412,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[directives]]] job_type = serial [[[meta]]] @@ -492,7 +492,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[directives]]] job_type = serial [[[meta]]] @@ -572,7 +572,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[directives]]] job_type = parallel [[[meta]]] @@ -652,7 +652,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[directives]]] job_type = parallel [[[meta]]] @@ -732,7 +732,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[directives]]] job_type = serial [[[meta]]] @@ -812,7 +812,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[directives]]] job_type = serial [[[meta]]] @@ -892,7 +892,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[directives]]] job_type = parallel [[[meta]]] @@ -972,7 +972,7 @@ execution time limit = submission polling intervals = submission retry delays = - run mode = + run mode = live [[[directives]]] job_type = parallel [[[meta]]] diff --git a/tests/functional/run_modes/06-run-mode-overrides.t b/tests/functional/run_modes/06-run-mode-overrides.t index f6d4faafb30..c7fc3325b8f 100644 --- a/tests/functional/run_modes/06-run-mode-overrides.t +++ b/tests/functional/run_modes/06-run-mode-overrides.t @@ -18,7 +18,7 @@ # Testing Skip mode functionality. . "$(dirname "$0")/test_header" -set_test_number 11 +set_test_number 6 # Install and run the workflow in live mode (default). # Check that tasks with run mode unset and run mode = live @@ -41,26 +41,5 @@ done JOB_LOGS="${WORKFLOW_RUN_DIR}/log/job/1001" run_fail "${TEST_NAME}:broadcast run mode=skip" ls "${JOB_LOGS}/default_/" -purge - -# Install and run the workflow in skip mode. -# Check that tasks with run mode unset and run mode = skip -# don't leave log files, and that skip mode tasks does. -TEST_NAME="${TEST_NAME_BASE}:skip-workflow" -install_workflow "${TEST_NAME_BASE}" "${TEST_NAME_BASE}" -workflow_run_ok "${TEST_NAME}:run" \ - cylc play "${WORKFLOW_NAME}" \ - --no-detach \ - --mode skip \ - --set='changemode="live"' \ - --final-cycle-point=1000 - -JOB_LOGS="${WORKFLOW_RUN_DIR}/log/job/1000" -run_ok "${TEST_NAME}:run mode=live" ls "${JOB_LOGS}/live_" -run_fail "${TEST_NAME}:run mode=default" ls "${JOB_LOGS}/default_" -run_fail "${TEST_NAME}:run mode=skip" ls "${JOB_LOGS}/skip_" -JOB_LOGS="${WORKFLOW_RUN_DIR}/log/job/1000" -named_grep_ok "${TEST_NAME}:run mode=live" "===.*===" "${JOB_LOGS}/live_/NN/job.out" - purge exit 0 diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index bce6ea64e9f..2f0aa5afab4 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -33,6 +33,7 @@ install as cylc_install, get_option_parser as install_gop ) +from cylc.flow.task_state import TASK_STATUS_SUBMITTED, TASK_STATUS_SUCCEEDED from cylc.flow.util import serialise_set from cylc.flow.wallclock import get_current_time_string from cylc.flow.workflow_files import infer_latest_run_from_id @@ -47,6 +48,7 @@ _start_flow, ) + if TYPE_CHECKING: from cylc.flow.network.client import WorkflowRuntimeClient from cylc.flow.scheduler import Scheduler @@ -672,3 +674,43 @@ async def _reftest( return triggers return _reftest + + +@pytest.fixture +def capture_live_submissions(capcall, monkeypatch): + """Capture live submission attempts. + + This prevents real jobs from being submitted to the system. + + If you call this fixture from a test, it will return a set of tasks that + would have been submitted had this fixture not been used. + """ + def fake_submit(self, _workflow, itasks, *_): + self.submit_nonlive_task_jobs(_workflow, itasks, 'simulation') + for itask in itasks: + for status in (TASK_STATUS_SUBMITTED, TASK_STATUS_SUCCEEDED): + self.task_events_mgr.process_message( + itask, + 'INFO', + status, + '2000-01-01T00:00:00Z', + '(received)', + ) + return itasks + + # suppress and capture live submissions + submit_live_calls = capcall( + 'cylc.flow.task_job_mgr.TaskJobManager.submit_livelike_task_jobs', + fake_submit) + + + + def get_submissions(): + nonlocal submit_live_calls + return { + itask.identity + for ((_self, _workflow, itasks, *_), _kwargs) in submit_live_calls + for itask in itasks + } + + return get_submissions diff --git a/tests/integration/run_modes/test_mode_overrides.py b/tests/integration/run_modes/test_mode_overrides.py index f9ab318e0e6..7c75c7c69f1 100644 --- a/tests/integration/run_modes/test_mode_overrides.py +++ b/tests/integration/run_modes/test_mode_overrides.py @@ -30,48 +30,42 @@ import pytest +from cylc.flow.cycling.iso8601 import ISO8601Point +from cylc.flow.run_modes import WORKFLOW_RUN_MODES -@pytest.mark.parametrize( - 'workflow_run_mode', [('live'), ('skip')]) + +@pytest.mark.parametrize('workflow_run_mode', sorted(WORKFLOW_RUN_MODES)) async def test_run_mode_override_from_config( - workflow_run_mode, flow, scheduler, run, complete, log_filter + capture_live_submissions, + flow, + scheduler, + run, + complete, + workflow_run_mode ): - """Test that ``[runtime][TASK]run mode`` overrides workflow modes. - """ - cfg = { - "scheduler": {"cycle point format": "%Y"}, - "scheduling": { - "initial cycle point": "1000", - "final cycle point": "1000", - "graph": {"P1Y": "live_\nskip_\ndefault_"}}, - "runtime": { - "skip_": {"run mode": "skip"}, - "live_": {"run mode": "live"} + """Test that `[runtime][]run mode` overrides workflow modes.""" + id_ = flow({ + 'scheduling': { + 'graph': { + 'R1': 'live & skip', + }, + }, + 'runtime': { + 'live': {'run mode': 'live'}, + 'skip': {'run mode': 'skip'}, } - } - id_ = flow(cfg) + }) schd = scheduler(id_, run_mode=workflow_run_mode, paused_start=False) - expect_template = ( - '[1000/{}_/01:preparing] submitted to localhost:background') - - async with run(schd) as log: + async with run(schd): await complete(schd) - # Live task has been really submitted: - assert log_filter(log, contains=expect_template.format('live')) - - # Default is the same as workflow: - if workflow_run_mode == 'live': - assert log_filter(log, contains=expect_template.format('default')) - else: - assert log_filter( - log, contains='[1000/default_/01:running] => succeeded') - assert not log_filter( - log, contains=expect_template.format('default')) - - # Skip task has run, but not actually been submitted: - assert log_filter(log, contains='[1000/skip_/01:running] => succeeded') - assert not log_filter(log, contains=expect_template.format('skip')) + if workflow_run_mode == 'live': + assert capture_live_submissions() == {'1/live'} + elif workflow_run_mode == 'dummy': + # Skip mode doesn't override dummy mode: + assert capture_live_submissions() == {'1/live', '1/skip'} + else: + assert capture_live_submissions() == set() async def test_force_trigger_does_not_override_run_mode( @@ -81,34 +75,19 @@ async def test_force_trigger_does_not_override_run_mode( ): """Force-triggering a task will not override the run mode. - Tasks with run mode = skip will continue to abide by - the is_held flag as normal. - Taken from spec at - https://github.com/cylc/cylc-admin/blob/master/ - docs/proposal-skip-mode.md#proposal + https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md#proposal """ wid = flow({ 'scheduling': {'graph': {'R1': 'foo'}}, 'runtime': {'foo': {'run mode': 'skip'}} }) - schd = scheduler(wid) + schd = scheduler(wid, run_mode="live") async with start(schd): - # Check that task isn't held at first foo = schd.pool.get_tasks()[0] - assert foo.state.is_held is False - # Hold task, check that it's held: - schd.pool.hold_tasks('1/foo') - assert foo.state.is_held is True - - # Trigger task, check that it's _still_ held: + # Force trigger task: schd.pool.force_trigger_tasks('1/foo', [1]) - assert foo.state.is_held is True - - # run_mode will always be simulation from test - # workflow before submit routine... - assert not foo.run_mode # ... but job submission will always change this to the correct mode: schd.task_job_mgr.submit_task_jobs( @@ -116,11 +95,45 @@ async def test_force_trigger_does_not_override_run_mode( [foo], schd.server.curve_auth, schd.server.client_pub_key_dir) + assert foo.run_mode == 'skip' +async def test_run_mode_skip_abides_by_held( + flow, + scheduler, + run, + complete +): + """Tasks with run mode = skip will continue to abide by the + is_held flag as normal. + + Taken from spec at + https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md#proposal + """ + wid = flow({ + 'scheduling': {'graph': {'R1': 'foo'}}, + 'runtime': {'foo': {'run mode': 'skip'}} + }) + schd = scheduler(wid, run_mode="live", paused_start=False) + async with run(schd): + foo = schd.pool.get_tasks()[0] + assert foo.state.is_held is False + + # Hold task, check that it's held: + schd.pool.hold_tasks('1/foo') + assert foo.state.is_held is True + + # Run to completion, should happen if task isn't held: + with pytest.raises( + Exception, + match="Timeout waiting for workflow to shut down" + ): + await complete(schd, timeout=5) + + async def test_run_mode_override_from_broadcast( - flow, scheduler, run, complete, log_filter + flow, scheduler, start, complete, log_filter, capture_live_submissions ): """Test that run_mode modifications only apply to one task. """ @@ -136,17 +149,17 @@ async def test_run_mode_override_from_broadcast( id_ = flow(cfg) schd = scheduler(id_, run_mode='live', paused_start=False) - async with run(schd): + async with start(schd): schd.broadcast_mgr.put_broadcast( ['1000'], ['foo'], [{'run mode': 'skip'}]) - foo_1000, foo_1001 = schd.pool.get_tasks() + foo_1000 = schd.pool.get_task(ISO8601Point('1000'), 'foo') + foo_1001 = schd.pool.get_task(ISO8601Point('1001'), 'foo') schd.task_job_mgr.submit_task_jobs( schd.workflow, [foo_1000, foo_1001], schd.server.curve_auth, schd.server.client_pub_key_dir) - assert foo_1000.run_mode == 'skip' - assert foo_1001.run_mode == 'live' + assert capture_live_submissions() == {'1001/foo'} diff --git a/tests/integration/run_modes/test_nonlive.py b/tests/integration/run_modes/test_nonlive.py index 42ddca128ce..eca02cf5018 100644 --- a/tests/integration/run_modes/test_nonlive.py +++ b/tests/integration/run_modes/test_nonlive.py @@ -14,19 +14,24 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +import pytest from typing import Any, Dict +from cylc.flow.cycling.integer import IntegerPoint +from cylc.flow.cycling.iso8601 import ISO8601Point + + # Define here to ensure test doesn't just mirror code: KGO = { 'live': { 'flow_nums': '[1]', 'is_manual_submit': 0, 'try_num': 1, - 'submit_status': None, + 'submit_status': 0, 'run_signal': None, - 'run_status': None, - 'platform_name': 'localhost', - 'job_runner_name': 'background', + 'run_status': 0, + 'platform_name': 'simulation', + 'job_runner_name': 'simulation', 'job_id': None}, 'skip': { 'flow_nums': '[1]', @@ -36,7 +41,7 @@ 'run_signal': None, 'run_status': 0, 'platform_name': 'skip', - 'job_runner_name': 'simulation', + 'job_runner_name': 'skip', 'job_id': None}, } @@ -47,33 +52,78 @@ def not_time(data: Dict[str, Any]): return {k: v for k, v in data.items() if 'time' not in k} -async def test_task_jobs(flow, scheduler, start): - """Ensure that task job data is added to the database correctly - for each run mode. +@pytest.fixture +def submit_and_check_db(): + """Wraps up testing that we want to do repeatedly in + test_db_task_jobs. """ - schd = scheduler(flow({ - 'scheduling': {'graph': { - 'R1': '&'.join(KGO)}}, - 'runtime': { - mode: {'run mode': mode} for mode in KGO} - })) - async with start(schd): + def _inner(schd): + # Submit task jobs: schd.task_job_mgr.submit_task_jobs( schd.workflow, schd.pool.get_tasks(), schd.server.curve_auth, schd.server.client_pub_key_dir ) + # Make sure that db changes are enacted: schd.workflow_db_mgr.process_queued_ops() for mode, kgo in KGO.items(): - taskdata = not_time( - schd.workflow_db_mgr.pub_dao.select_task_job(1, mode)) - assert taskdata == kgo, ( + task_jobs = schd.workflow_db_mgr.pub_dao.select_task_job(1, mode) + + # Check all non-datetime items against KGO: + assert not_time(task_jobs) == kgo, ( f'Mode {mode}: incorrect db entries.') + # Check that timestamps have been created: + for timestamp in [ + 'time_submit', 'time_submit_exit', 'time_run', 'time_run_exit' + ]: + assert task_jobs[timestamp] is not None + return _inner + + +async def test_db_task_jobs( + flow, scheduler, start, capture_live_submissions, + submit_and_check_db +): + """Ensure that task job data is added to the database correctly + for each run mode. + """ + schd = scheduler(flow({ + 'scheduling': {'graph': { + 'R1': '&'.join(KGO)}}, + 'runtime': { + mode: {'run mode': mode} for mode in KGO} + })) + async with start(schd): + # Reference all task proxies so we can examine them + # at the end of the test: + itask_skip = schd.pool.get_task(IntegerPoint('1'), 'skip') + itask_live = schd.pool.get_task(IntegerPoint('1'), 'live') + + + submit_and_check_db(schd) + + # Set outputs to failed: schd.pool.set_prereqs_and_outputs('*', ['failed'], [], []) + submit_and_check_db(schd) + + assert itask_live.run_mode == 'simulation' + assert itask_skip.run_mode == 'skip' + + +async def test_db_task_states( + one_conf, flow, scheduler, start +): + """Test that tasks will have the same information entered into the task + state database whichever mode is used. + """ + conf = one_conf + conf['runtime'] = {'one': {'run mode': 'skip'}} + schd = scheduler(flow(conf)) + async with start(schd): schd.task_job_mgr.submit_task_jobs( schd.workflow, schd.pool.get_tasks(), @@ -81,15 +131,18 @@ async def test_task_jobs(flow, scheduler, start): schd.server.client_pub_key_dir ) schd.workflow_db_mgr.process_queued_ops() + result = schd.workflow_db_mgr.pri_dao.connect().execute( + 'SELECT * FROM task_states').fetchone() - for mode, kgo in KGO.items(): - taskdata = not_time( - schd.workflow_db_mgr.pub_dao.select_task_job(1, mode)) - assert taskdata == kgo, ( - f'Mode {mode}: incorrect db entries.') + # Submit number has been added to the table: + assert result[5] == 1 + # time_created added to the table + assert result[3] -async def test_mean_task_time(flow, scheduler, run, complete): +async def test_mean_task_time( + flow, scheduler, start, complete, capture_live_submissions +): """Non-live tasks are not added to the list of task times, so skipping tasks will not affect how long Cylc expects tasks to run. """ @@ -100,21 +153,26 @@ async def test_mean_task_time(flow, scheduler, run, complete): 'graph': {'P1Y': 'foo'}} }), run_mode='live') - async with run(schd): - tasks = schd.pool.get_tasks() - tdef = tasks[0].tdef - assert list(tdef.elapsed_times) == [] + async with start(schd): + itask = schd.pool.get_task(ISO8601Point('10000101T0000Z'), 'foo') + assert list(itask.tdef.elapsed_times) == [] # Make the task run in skip mode at one cycle: schd.broadcast_mgr.put_broadcast( ['1000'], ['foo'], [{'run mode': 'skip'}]) + # Fake adding some other examples of the task: + itask.tdef.elapsed_times.extend([133.0, 132.4]) + # Submit two tasks: schd.task_job_mgr.submit_task_jobs( schd.workflow, - tasks[:2], + [itask], schd.server.curve_auth, schd.server.client_pub_key_dir ) - await complete(schd, '10010101T0000Z/foo') - assert len(tdef.elapsed_times) == 1 + + # Ensure that the skipped task has succeeded, and that the + # number of items in the elapsed_times has not changed. + assert itask.state.status == 'succeeded' + assert len(itask.tdef.elapsed_times) == 2 diff --git a/tests/integration/run_modes/test_simulation.py b/tests/integration/run_modes/test_simulation.py index 4c48a572b15..debbf117d7a 100644 --- a/tests/integration/run_modes/test_simulation.py +++ b/tests/integration/run_modes/test_simulation.py @@ -62,8 +62,8 @@ def _run_simjob(schd, point, task): itask = schd.pool.get_task(point, task) itask.state.is_queued = False monkeytime(0) - schd.task_job_mgr._nonlive_submit_task_jobs( - [itask], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [itask], 'simulation') monkeytime(itask.mode_settings.timeout + 1) # Run Time Check @@ -170,8 +170,8 @@ def test_fail_once(sim_time_check_setup, itask, point, results, monkeypatch): for i, result in enumerate(results): itask.try_timers['execution-retry'].num = i - schd.task_job_mgr._nonlive_submit_task_jobs( - [itask], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [itask], 'simulation') assert itask.mode_settings.sim_task_fails is result @@ -190,11 +190,11 @@ def test_task_finishes(sim_time_check_setup, monkeytime, caplog): fail_all_1066 = schd.pool.get_task(ISO8601Point('1066'), 'fail_all') fail_all_1066.state.status = 'running' fail_all_1066.state.is_queued = False - schd.task_job_mgr._nonlive_submit_task_jobs( - [fail_all_1066], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [fail_all_1066], 'simulation') # For the purpose of the test delete the started time set by - # _nonlive_submit_task_jobs. + # submit_nonlive_task_jobs. fail_all_1066.summary['started_time'] = 0 # Before simulation time is up: @@ -220,8 +220,8 @@ def test_task_sped_up(sim_time_check_setup, monkeytime): # Run the job submission method: monkeytime(0) - schd.task_job_mgr._nonlive_submit_task_jobs( - [fast_forward_1066], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [fast_forward_1066], 'simulation') fast_forward_1066.state.is_queued = False result = sim_time_check(schd.task_events_mgr, [fast_forward_1066], '') @@ -274,8 +274,8 @@ async def test_settings_restart( async with start(schd): og_timeouts = {} for itask in schd.pool.get_tasks(): - schd.task_job_mgr._nonlive_submit_task_jobs( - [itask], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [itask], 'simulation') og_timeouts[itask.identity] = itask.mode_settings.timeout @@ -288,12 +288,8 @@ async def test_settings_restart( # Stop and restart the scheduler: schd = scheduler(id_) async with start(schd): - # Get our tasks and fix wallclock: - itasks = schd.pool.get_tasks() - for itask in itasks: - - # Check that we haven't got started time & mode settings back: - assert itask.summary['started_time'] is None + for itask in schd.pool.get_tasks(): + # Check that we haven't got mode settings back: assert itask.mode_settings is None if itask.identity == '1066/two': @@ -316,11 +312,8 @@ async def test_settings_restart( ) is False # Check that the itask.mode_settings is now re-created - assert itask.mode_settings.__dict__ == { - 'simulated_run_length': 60.0, - 'sim_task_fails': True, - 'timeout': expected_timeout - } + assert itask.mode_settings.simulated_run_length == 60.0 + assert itask.mode_settings.sim_task_fails is True async def test_settings_reload( @@ -351,7 +344,7 @@ async def test_settings_reload( schd = scheduler(id_) async with start(schd): # Submit first psuedo-job and "run" to failure: - one_1066 = schd.pool.get_tasks()[0] + one_1066 = schd.pool.get_task(ISO8601Point('1066'), 'one') itask = run_simjob(schd, one_1066.point, 'one') assert itask.state.outputs.is_message_complete('failed') is False @@ -395,12 +388,12 @@ async def test_settings_broadcast( }, defaults=False) schd = scheduler(id_, paused_start=False, run_mode='simulation') async with start(schd) as log: - itask = schd.pool.get_tasks()[0] + itask = schd.pool.get_task(ISO8601Point('1066'), 'one') itask.state.is_queued = False # Submit the first - the sim task will fail: - schd.task_job_mgr._nonlive_submit_task_jobs( - [itask], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [itask], 'simulation') assert itask.mode_settings.sim_task_fails is True # Let task finish. @@ -418,14 +411,14 @@ async def test_settings_broadcast( 'simulation': {'fail cycle points': ''} }]) # Submit again - result is different: - schd.task_job_mgr._nonlive_submit_task_jobs( - [itask], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [itask], 'simulation') assert itask.mode_settings.sim_task_fails is False # Assert Clearing the broadcast works schd.broadcast_mgr.clear_broadcast() - schd.task_job_mgr._nonlive_submit_task_jobs( - [itask], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [itask], 'simulation') assert itask.mode_settings.sim_task_fails is True # Assert that list of broadcasts doesn't change if we submit @@ -435,8 +428,8 @@ async def test_settings_broadcast( ['1066'], ['one'], [{ 'simulation': {'fail cycle points': 'higadfuhasgiurguj'} }]) - schd.task_job_mgr._nonlive_submit_task_jobs( - [itask], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [itask], 'simulation') assert ( 'Invalid ISO 8601 date representation: higadfuhasgiurguj' in log.messages[-1]) @@ -449,8 +442,8 @@ async def test_settings_broadcast( ['1066'], ['one'], [{ 'simulation': {'fail cycle points': '1'} }]) - schd.task_job_mgr._nonlive_submit_task_jobs( - [itask], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [itask], 'simulation') assert ( 'Invalid ISO 8601 date representation: 1' in log.messages[-1]) @@ -461,8 +454,8 @@ async def test_settings_broadcast( 'simulation': {'fail cycle points': '1945, 1977, 1066'}, 'execution retry delays': '3*PT2S' }]) - schd.task_job_mgr._nonlive_submit_task_jobs( - [itask], schd.workflow, 'simulation') + schd.task_job_mgr.submit_nonlive_task_jobs( + schd.workflow, [itask], 'simulation') assert itask.mode_settings.sim_task_fails is True assert itask.try_timers['execution-retry'].delays == [2.0, 2.0, 2.0] # n.b. rtconfig should remain unchanged, lest we cancel broadcasts: diff --git a/tests/integration/run_modes/test_skip.py b/tests/integration/run_modes/test_skip.py index bc9f29116f2..b4c5cfdc2fb 100644 --- a/tests/integration/run_modes/test_skip.py +++ b/tests/integration/run_modes/test_skip.py @@ -16,6 +16,8 @@ """Test for skip mode integration. """ +from cylc.flow.cycling.integer import IntegerPoint + async def test_settings_override_from_broadcast( flow, scheduler, start, complete, log_filter @@ -78,15 +80,16 @@ async def test_broadcast_changes_set_skip_outputs( | The skip keyword should not be allowed in custom outputs. """ wid = flow({ - 'scheduling': {'graph': {'R1': 'foo:expect_this'}}, - 'runtime': {'foo': {'outputs': {'expect_this': 'some message'}}} + 'scheduling': {'graph': {'R1': 'foo:x?\nfoo:y?'}}, + 'runtime': {'foo': {'outputs': { + 'x': 'some message', 'y': 'another message'}}} }) schd = scheduler(wid, run_mode='live') async with start(schd): schd.broadcast_mgr.put_broadcast( ['1'], ['foo'], - [{'skip': {'outputs': 'expect_this'}}], + [{'skip': {'outputs': 'x'}}], ) foo, = schd.pool.get_tasks() schd.pool.set_prereqs_and_outputs( @@ -94,14 +97,18 @@ async def test_broadcast_changes_set_skip_outputs( foo_outputs = foo.state.outputs.get_completed_outputs() - assert 'expect_this' in foo_outputs - assert foo_outputs['expect_this'] == '(manually completed)' + assert foo_outputs == { + 'submitted': '(manually completed)', + 'started': '(manually completed)', + 'succeeded': '(manually completed)', + 'x': '(manually completed)'} async def test_skip_mode_outputs( flow, scheduler, reftest, ): - """Nearly a functional test of the output emission of skip mode tasks + """Skip mode can be configured by the `[runtime][][skip]` + section. Skip mode proposal point 2 https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md @@ -110,7 +117,7 @@ async def test_skip_mode_outputs( # By default, all required outputs will be generated # plus succeeded if success is optional: foo? & foo:required_out => success_if_optional & required_outs - + # The outputs submitted and started are always produced # and do not need to be defined in outputs: foo:submitted => submitted_always @@ -159,7 +166,7 @@ async def test_skip_mode_outputs( async def test_doesnt_release_held_tasks( - one_conf, flow, scheduler, start, log_filter + one_conf, flow, scheduler, start, log_filter, capture_live_submissions ): """Point 5 of the proposal https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md @@ -168,59 +175,30 @@ async def test_doesnt_release_held_tasks( | flag as normal. """ - schd = scheduler(flow(one_conf), run_mode='skip') + one_conf['runtime'] = {'one': {'run mode': 'skip'}} + schd = scheduler(flow(one_conf), run_mode='live', paused_start=False) async with start(schd) as log: - itask = schd.pool.get_tasks()[0] + itask, = schd.pool.get_tasks() msg = 'held tasks shoudn\'t {}' # Set task to held and check submission in skip mode doesn't happen: itask.state.is_held = True - schd.task_job_mgr.submit_task_jobs( - schd.workflow, - [itask], - schd.server.curve_auth, - schd.server.client_pub_key_dir, - run_mode=schd.get_run_mode() - ) + + # Relinquish contol to the main loop. + schd.release_queued_tasks() + assert not log_filter(log, contains='=> running'), msg.format('run') assert not log_filter(log, contains='=> succeeded'), msg.format( 'succeed') # Release held task and assert that it now skips successfully: schd.pool.release_held_tasks(['1/one']) - schd.task_job_mgr.submit_task_jobs( - schd.workflow, - [itask], - schd.server.curve_auth, - schd.server.client_pub_key_dir, - run_mode=schd.get_run_mode() - ) + schd.release_queued_tasks() + assert log_filter(log, contains='=> running'), msg.format('run') assert log_filter(log, contains='=> succeeded'), msg.format('succeed') -async def test_force_trigger_doesnt_change_mode( - flow, scheduler, run, complete -): - """Point 6 from the skip mode proposal - https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md - - | Force-triggering a task will not override the run mode. - """ - wid = flow({ - 'scheduling': {'graph': {'R1': 'slow => skip'}}, - 'runtime': { - 'slow': {'script': 'sleep 6'}, - 'skip': {'script': 'exit 1', 'run mode': 'skip'} - } - }) - schd = scheduler(wid, run_mode='live', paused_start=False) - async with run(schd): - schd.pool.force_trigger_tasks(['1/skip'], [1]) - # This will timeout if the skip task has become live on triggering: - await complete(schd, '1/skip', timeout=6) - - async def test_prereqs_marked_satisfied_by_skip_mode( flow, scheduler, start, log_filter, complete ): @@ -232,11 +210,12 @@ async def test_prereqs_marked_satisfied_by_skip_mode( | rather than "satisfied naturally" for provenance reasons. """ schd = scheduler(flow({ - 'scheduling': {'graph': {'R1': 'foo => bar'}} - }), run_mode='skip') + 'scheduling': {'graph': {'R1': 'foo => bar'}}, + 'runtime': {'foo': {'run mode': 'skip'}} + }), run_mode='live') - async with start(schd) as log: - foo, = schd.pool.get_tasks() + async with start(schd): + foo = schd.pool.get_task(IntegerPoint(1), 'foo') schd.task_job_mgr.submit_task_jobs( schd.workflow, [foo], @@ -244,6 +223,6 @@ async def test_prereqs_marked_satisfied_by_skip_mode( schd.server.client_pub_key_dir, run_mode=schd.get_run_mode() ) - bar, = schd.pool.get_tasks() + bar = schd.pool.get_task(IntegerPoint(1), 'bar') satisfied_message, = bar.state.prerequisites[0]._satisfied.values() assert satisfied_message == 'satisfied by skip mode' diff --git a/tests/integration/test_dbstatecheck.py b/tests/integration/test_dbstatecheck.py index 94de81fbef0..16f4a7bb460 100644 --- a/tests/integration/test_dbstatecheck.py +++ b/tests/integration/test_dbstatecheck.py @@ -79,27 +79,27 @@ def test_basic(checker): ['output', '10010101T0000Z', 'succeeded'], ['good', '10000101T0000Z', 'waiting', '(flows=2)'], ['good', '10010101T0000Z', 'waiting', '(flows=2)'], ] - assert result == expect + assert sorted(result) == sorted(expect) def test_task(checker): """Filter by task name""" result = checker.workflow_state_query(task='bad') - assert result == [ + assert sorted(result) == ([ ['bad', '10000101T0000Z', 'failed'], ['bad', '10010101T0000Z', 'succeeded'] - ] + ]) def test_point(checker): """Filter by point""" result = checker.workflow_state_query(cycle='10000101T0000Z') - assert result == [ + assert sorted(result) == sorted([ ['bad', '10000101T0000Z', 'failed'], ['good', '10000101T0000Z', 'succeeded'], ['output', '10000101T0000Z', 'succeeded'], ['good', '10000101T0000Z', 'waiting', '(flows=2)'], - ] + ]) def test_status(checker): diff --git a/tests/integration/utils/flow_tools.py b/tests/integration/utils/flow_tools.py index 10e879b560d..34b80a25882 100644 --- a/tests/integration/utils/flow_tools.py +++ b/tests/integration/utils/flow_tools.py @@ -116,10 +116,6 @@ def __make_scheduler(id_: str, **opts: Any) -> Scheduler: schd.workflow_db_mgr.on_workflow_shutdown() -def caplogprinter(caplog): - _ = [print(i) for i in caplog.messages] - - @asynccontextmanager async def _start_flow( caplog: Optional[pytest.LogCaptureFixture], @@ -129,8 +125,6 @@ async def _start_flow( """Start a scheduler but don't set the main loop running.""" if caplog: caplog.set_level(level, CYLC_LOG) - # Debug functionality - caplog.print = lambda: caplogprinter(caplog) await schd.install() @@ -161,8 +155,6 @@ async def _run_flow( """Start a scheduler and set the main loop running.""" if caplog: caplog.set_level(level, CYLC_LOG) - # Debug functionality - caplog.print = lambda: caplogprinter(caplog) await schd.install() diff --git a/tests/unit/run_modes/test_nonlive.py b/tests/unit/run_modes/test_nonlive_units.py similarity index 100% rename from tests/unit/run_modes/test_nonlive.py rename to tests/unit/run_modes/test_nonlive_units.py diff --git a/tests/unit/run_modes/test_simulation.py b/tests/unit/run_modes/test_simulation_units.py similarity index 100% rename from tests/unit/run_modes/test_simulation.py rename to tests/unit/run_modes/test_simulation_units.py diff --git a/tests/unit/run_modes/test_skip.py b/tests/unit/run_modes/test_skip_units.py similarity index 100% rename from tests/unit/run_modes/test_skip.py rename to tests/unit/run_modes/test_skip_units.py diff --git a/tests/unit/test_platforms.py b/tests/unit/test_platforms.py index 3167afabf70..89e5d2f19f8 100644 --- a/tests/unit/test_platforms.py +++ b/tests/unit/test_platforms.py @@ -34,7 +34,6 @@ PlatformLookupError, GlobalConfigError ) -from cylc.flow.task_state import RunMode PLATFORMS = { diff --git a/tests/unit/test_task_state.py b/tests/unit/test_task_state.py index 1a2041fcba5..2854bd60c29 100644 --- a/tests/unit/test_task_state.py +++ b/tests/unit/test_task_state.py @@ -19,9 +19,9 @@ from cylc.flow.taskdef import TaskDef from cylc.flow.cycling.integer import IntegerSequence, IntegerPoint +from cylc.flow.run_modes import RunMode, disable_task_event_handlers from cylc.flow.task_trigger import Dependency, TaskTrigger from cylc.flow.task_state import ( - RunMode, TaskState, TASK_STATUS_PREPARING, TASK_STATUS_SUBMIT_FAILED, @@ -147,4 +147,4 @@ def test_disable_task_event_handlers(itask_run_mode, disable_handlers, expect): 'skip': {'disable task event handlers': disable_handlers}}) ) # Check method: - assert RunMode.disable_task_event_handlers(itask) is expect + assert disable_task_event_handlers(itask) is expect From cb73b5988ef7359aeb359dac5a87ac46812cea8b Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Thu, 3 Oct 2024 16:38:18 +0100 Subject: [PATCH 03/29] Ensure that platforms for skip and simulation contain default values for ___retry_delays, as the lack of these causes failure on task retry or re-triggering. --- cylc/flow/run_modes/simulation.py | 2 ++ cylc/flow/run_modes/skip.py | 4 ++- tests/integration/run_modes/test_skip.py | 32 ++++++++++++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) diff --git a/cylc/flow/run_modes/simulation.py b/cylc/flow/run_modes/simulation.py index 4eaaf48ac56..e9dbbc26f17 100644 --- a/cylc/flow/run_modes/simulation.py +++ b/cylc/flow/run_modes/simulation.py @@ -78,6 +78,8 @@ def submit_task_job( 'hosts': ['localhost'], 'disable task event handlers': rtconfig['simulation']['disable task event handlers'], + 'submission retry delays': [], + 'execution retry delays': [] } itask.summary['job_runner_name'] = RunMode.SIMULATION.value itask.summary[task_job_mgr.KEY_EXECUTE_TIME_LIMIT] = ( diff --git a/cylc/flow/run_modes/skip.py b/cylc/flow/run_modes/skip.py index 9a05aa4729b..ae4703dfc0d 100644 --- a/cylc/flow/run_modes/skip.py +++ b/cylc/flow/run_modes/skip.py @@ -58,7 +58,9 @@ def submit_task_job( 'hosts': ['localhost'], 'disable task event handlers': rtconfig['skip']['disable task event handlers'], - 'execution polling intervals': [] + 'execution polling intervals': [], + 'submission retry delays': [], + 'execution retry delays': [] } itask.summary['job_runner_name'] = RunMode.SKIP.value itask.run_mode = RunMode.SKIP.value diff --git a/tests/integration/run_modes/test_skip.py b/tests/integration/run_modes/test_skip.py index b4c5cfdc2fb..afb4b91707e 100644 --- a/tests/integration/run_modes/test_skip.py +++ b/tests/integration/run_modes/test_skip.py @@ -226,3 +226,35 @@ async def test_prereqs_marked_satisfied_by_skip_mode( bar = schd.pool.get_task(IntegerPoint(1), 'bar') satisfied_message, = bar.state.prerequisites[0]._satisfied.values() assert satisfied_message == 'satisfied by skip mode' + + +async def test_outputs_can_be_changed(one_conf, flow, start, scheduler, validate): + + schd = scheduler(flow(one_conf), run_mode='live') + async with start(schd) as log: + # Broadcast the task into skip mode, output failed and submit it: + schd.broadcast_mgr.put_broadcast( + ["1"], + ["one"], + [ + {"run mode": "skip"}, + {"skip": {"outputs": "failed"}}, + ], + ) + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + schd.pool.get_tasks(), + None, + None + ) + + # Broadcast the task into skip mode, output succeeded and submit it: + schd.broadcast_mgr.put_broadcast( + ['1'], ['one'], [{'skip': {'outputs': 'succeeded'}}] + ) + schd.task_job_mgr.submit_task_jobs( + schd.workflow, + schd.pool.get_tasks(), + None, + None + ) From 24aef083eafc051a7206d63f1c2db44fd60f26a7 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Wed, 16 Oct 2024 10:16:16 +0100 Subject: [PATCH 04/29] Simpler Suggestions from @metronnie Co-authored-by: Ronnie Dutta <61982285+MetRonnie@users.noreply.github.com> --- changes.d/6039.feat.md | 2 +- cylc/flow/cfgspec/workflow.py | 3 +-- cylc/flow/network/schema.py | 7 ------- cylc/flow/run_modes/__init__.py | 11 ++++------- cylc/flow/run_modes/skip.py | 2 +- cylc/flow/scripts/lint.py | 6 ++---- cylc/flow/scripts/set.py | 6 +++--- 7 files changed, 12 insertions(+), 25 deletions(-) diff --git a/changes.d/6039.feat.md b/changes.d/6039.feat.md index d3a39814e2c..96f46a5829c 100644 --- a/changes.d/6039.feat.md +++ b/changes.d/6039.feat.md @@ -1 +1 @@ -Add a new mode task run mode "skip" which overrides workflow live mode task submission. \ No newline at end of file +Added a new task run mode "skip", which allows skipping of tasks in a running workflow as well as creation of dummy tasks as part of workflow design. \ No newline at end of file diff --git a/cylc/flow/cfgspec/workflow.py b/cylc/flow/cfgspec/workflow.py index 1da7401d2d5..15a5a634b25 100644 --- a/cylc/flow/cfgspec/workflow.py +++ b/cylc/flow/cfgspec/workflow.py @@ -1342,8 +1342,7 @@ def get_script_common_text(this: str, example: Optional[str] = None): options=list(TASK_CONFIG_RUN_MODES), default=RunMode.LIVE.value, desc=f''' - For a workflow run in live mode run this task in skip - mode. + Run the task in one of the following modes: {RunMode.LIVE.value}: {RunMode.LIVE.describe()} diff --git a/cylc/flow/network/schema.py b/cylc/flow/network/schema.py index 3ee37892f1e..2e7bd789dbf 100644 --- a/cylc/flow/network/schema.py +++ b/cylc/flow/network/schema.py @@ -599,13 +599,6 @@ class Meta: string_extended = String() -def describe_run_mode(run_mode: Optional['Enum']) -> str: - """Returns description for a workflow/task run mode.""" - if not run_mode: - return "" - return getattr(RunMode, run_mode.value.upper()).__doc__ - - # The run mode for the workflow. WorkflowRunMode = graphene.Enum( 'WorkflowRunMode', diff --git a/cylc/flow/run_modes/__init__.py b/cylc/flow/run_modes/__init__.py index 529be513c6c..08761780b1b 100644 --- a/cylc/flow/run_modes/__init__.py +++ b/cylc/flow/run_modes/__init__.py @@ -46,11 +46,11 @@ class RunMode(Enum): """The possible run modes of a task/workflow.""" LIVE = 'live' - """Task will run normally.""" + """Tasks will run normally.""" SIMULATION = 'simulation' """Simulates job submission with configurable exection time - and succeeded/failed outcomes(but does not submit real jobs).""" + and succeeded/failed outcomes (but does not submit real jobs).""" DUMMY = 'dummy' """Submits real jobs with empty scripts.""" @@ -76,16 +76,13 @@ def describe(self): return ( "Simulates job submission with configurable" " exection time and succeeded/failed outcomes" - "(but does not submit real jobs).") + " (but does not submit real jobs).") raise KeyError(f'No description for {self}.') @staticmethod def get(options: 'Values') -> str: """Return the workflow run mode from the options.""" - if hasattr(options, 'run_mode') and options.run_mode: - return options.run_mode - else: - return RunMode.LIVE.value + return getattr(options, 'run_mode', None) or RunMode.LIVE.value def get_submit_method(self) -> 'Optional[SubmissionInterface]': """Return the job submission method for this run mode. diff --git a/cylc/flow/run_modes/skip.py b/cylc/flow/run_modes/skip.py index ae4703dfc0d..d89cdc7887f 100644 --- a/cylc/flow/run_modes/skip.py +++ b/cylc/flow/run_modes/skip.py @@ -131,7 +131,7 @@ def process_outputs(itask: 'TaskProxy', rtconfig: Dict) -> List[str]: # Send succeeded/failed last. if TASK_OUTPUT_FAILED in conf_outputs: result.append(TASK_OUTPUT_FAILED) - elif TASK_OUTPUT_SUCCEEDED and TASK_OUTPUT_SUCCEEDED not in result: + elif TASK_OUTPUT_SUCCEEDED not in result: result.append(TASK_OUTPUT_SUCCEEDED) return result diff --git a/cylc/flow/scripts/lint.py b/cylc/flow/scripts/lint.py index bdccd6c4722..2dd3c30d36a 100755 --- a/cylc/flow/scripts/lint.py +++ b/cylc/flow/scripts/lint.py @@ -387,10 +387,8 @@ def check_for_deprecated_task_event_template_vars( def check_skip_mode_outputs(line: str) -> Dict: - """Ensure skip mode output setting doesn't include: - - * succeeded _and_ failed: Mutually exclusive. - * submitted and started: These are emitted by skip mode anyway. + """Ensure skip mode output setting doesn't include + succeeded _and_ failed, as they are mutually exclusive. n.b. diff --git a/cylc/flow/scripts/set.py b/cylc/flow/scripts/set.py index 888ba20890e..adc3cf8449a 100755 --- a/cylc/flow/scripts/set.py +++ b/cylc/flow/scripts/set.py @@ -158,9 +158,9 @@ def get_option_parser() -> COP: help=( "Complete task outputs. For multiple outputs re-use the" " option, or give a comma-separated list of outputs." - ' Use "--out=required" to complete all required outputs.' - ' Use "--out=skip" to complete outputs defined in the tasks.' - ' [skip] configuration.' + " Use '--out=required' to complete all required outputs." + " Use '--out=skip' to complete outputs defined in the task's" + " [skip] configuration." " OUTPUT format: trigger names as used in the graph." ), action="append", default=None, dest="outputs" From 3e516a97a8d442816819b368aebddb579c8a5b20 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Wed, 16 Oct 2024 11:24:01 +0100 Subject: [PATCH 05/29] * Remove nonlive.py and move it's only function into skip.py * Rename generalized run mode validation function to only checking skip mode validation. --- cylc/flow/config.py | 4 +- cylc/flow/run_modes/nonlive.py | 55 ---------------------- cylc/flow/run_modes/skip.py | 22 +++++++++ cylc/flow/task_proxy.py | 2 +- tests/unit/run_modes/test_nonlive_units.py | 51 -------------------- tests/unit/run_modes/test_skip_units.py | 36 +++++++++++++- 6 files changed, 60 insertions(+), 110 deletions(-) delete mode 100644 cylc/flow/run_modes/nonlive.py delete mode 100644 tests/unit/run_modes/test_nonlive_units.py diff --git a/cylc/flow/config.py b/cylc/flow/config.py index 73d39e2e6a8..4196568e783 100644 --- a/cylc/flow/config.py +++ b/cylc/flow/config.py @@ -82,7 +82,7 @@ ) from cylc.flow.print_tree import print_tree from cylc.flow.task_qualifiers import ALT_QUALIFIERS -from cylc.flow.run_modes.nonlive import run_mode_validate_checks +from cylc.flow.run_modes.skip import skip_mode_validate from cylc.flow.subprocctx import SubFuncContext from cylc.flow.task_events_mgr import ( EventData, @@ -563,7 +563,7 @@ def __init__( self.mem_log("config.py: end init config") - run_mode_validate_checks(self.taskdefs) + skip_mode_validate(self.taskdefs) @staticmethod def _warn_if_queues_have_implicit_tasks( diff --git a/cylc/flow/run_modes/nonlive.py b/cylc/flow/run_modes/nonlive.py deleted file mode 100644 index 0add79cba4f..00000000000 --- a/cylc/flow/run_modes/nonlive.py +++ /dev/null @@ -1,55 +0,0 @@ -# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. -# Copyright (C) NIWA & British Crown (Met Office) & Contributors. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -"""Utilities supporting all nonlive modes -""" -from typing import TYPE_CHECKING, Dict, List - -from cylc.flow import LOG -from cylc.flow.run_modes.skip import check_task_skip_config -from cylc.flow.run_modes import RunMode - -if TYPE_CHECKING: - from cylc.flow.taskdef import TaskDef - - -def run_mode_validate_checks(taskdefs: 'Dict[str, TaskDef]') -> None: - """Warn user if any tasks have "run mode" set to skip. - """ - warn_nonlive: Dict[str, List[str]] = { - RunMode.SKIP.value: [], - } - - # Run through taskdefs looking for those with nonlive modes - for taskdef in taskdefs.values(): - # Add to list of tasks to be run in non-live modes: - if ( - taskdef.rtconfig.get('run mode', None) - in { - RunMode.SIMULATION.value, - RunMode.SKIP.value, - RunMode.DUMMY.value - } - ): - warn_nonlive[taskdef.rtconfig['run mode']].append(taskdef.name) - - # Run any mode specific validation checks: - check_task_skip_config(taskdef) - - if any(warn_nonlive.values()): - message = 'The following tasks are set to run in skip mode:' - for taskname in warn_nonlive[RunMode.SKIP.value]: - message += f'\n * {taskname}' - LOG.warning(message) diff --git a/cylc/flow/run_modes/skip.py b/cylc/flow/run_modes/skip.py index d89cdc7887f..d782b613302 100644 --- a/cylc/flow/run_modes/skip.py +++ b/cylc/flow/run_modes/skip.py @@ -19,6 +19,7 @@ from typing import ( TYPE_CHECKING, Dict, List, Tuple) +from cylc.flow import LOG from cylc.flow.exceptions import WorkflowConfigError from cylc.flow.task_outputs import ( TASK_OUTPUT_SUBMITTED, @@ -158,3 +159,24 @@ def check_task_skip_config(tdef: 'TaskDef') -> None: raise WorkflowConfigError( f'Skip mode settings for task {tdef.name} has' ' mutually exclusive outputs: succeeded AND failed.') + + +def skip_mode_validate(taskdefs: 'Dict[str, TaskDef]') -> None: + """Warn user if any tasks have "run mode" set to skip. + """ + warn_nonlive: Dict[str, List[str]] = {RunMode.SKIP.value: []} + + # Run through taskdefs looking for those with nonlive modes + for taskdef in taskdefs.values(): + # Add to list of tasks to be run in non-live modes: + if (taskdef.rtconfig.get('run mode', None) == RunMode.SKIP.value): + warn_nonlive[taskdef.rtconfig['run mode']].append(taskdef.name) + + # Run any mode specific validation checks: + check_task_skip_config(taskdef) + + if any(warn_nonlive.values()): + message = 'The following tasks are set to run in skip mode:' + for taskname in warn_nonlive[RunMode.SKIP.value]: + message += f'\n * {taskname}' + LOG.warning(message) diff --git a/cylc/flow/task_proxy.py b/cylc/flow/task_proxy.py index 7219fafef18..f16696b6c0f 100644 --- a/cylc/flow/task_proxy.py +++ b/cylc/flow/task_proxy.py @@ -57,7 +57,7 @@ from cylc.flow.flow_mgr import FlowNums from cylc.flow.id import Tokens from cylc.flow.prerequisite import PrereqMessage, SatisfiedState - from cylc.flow.simulation import ModeSettings + from cylc.flow.run_modes.simulation import ModeSettings from cylc.flow.task_action_timer import TaskActionTimer from cylc.flow.taskdef import TaskDef diff --git a/tests/unit/run_modes/test_nonlive_units.py b/tests/unit/run_modes/test_nonlive_units.py deleted file mode 100644 index 71695f2c96b..00000000000 --- a/tests/unit/run_modes/test_nonlive_units.py +++ /dev/null @@ -1,51 +0,0 @@ -# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE. -# Copyright (C) NIWA & British Crown (Met Office) & Contributors. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -"""Unit tests for utilities supporting all nonlive modes -""" - -from types import SimpleNamespace - -from cylc.flow.run_modes.nonlive import run_mode_validate_checks - - -def test_run_mode_validate_checks(monkeypatch, caplog): - """It warns us if we've set a task config to nonlive mode. - - (And not otherwise) - - Point 3 from the skip mode proposal - https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md - - | If the run mode is set to simulation or skip in the workflow - | configuration, then cylc validate and cylc lint should produce - | warning (similar to development features in other languages / systems). - """ - taskdefs = { - f'{run_mode}_task': SimpleNamespace( - rtconfig={'run mode': run_mode}, - name=f'{run_mode}_task' - ) - for run_mode - in ['live', 'skip'] - } - - run_mode_validate_checks(taskdefs) - - message = caplog.messages[0] - - assert 'skip mode:\n * skip_task' in message - assert ' live mode' not in message # Avoid matching "non-live mode" - assert 'workflow mode' not in message diff --git a/tests/unit/run_modes/test_skip_units.py b/tests/unit/run_modes/test_skip_units.py index f5ad89381d7..9c580117739 100644 --- a/tests/unit/run_modes/test_skip_units.py +++ b/tests/unit/run_modes/test_skip_units.py @@ -20,7 +20,11 @@ from types import SimpleNamespace from cylc.flow.exceptions import WorkflowConfigError -from cylc.flow.run_modes.skip import check_task_skip_config, process_outputs +from cylc.flow.run_modes.skip import ( + check_task_skip_config, + process_outputs, + skip_mode_validate, +) @pytest.mark.parametrize( @@ -99,3 +103,33 @@ def test_process_outputs(outputs, required, expect): ))) assert process_outputs(itask, rtconf) == ['submitted', 'started'] + expect + + +def test_skip_mode_validate(monkeypatch, caplog): + """It warns us if we've set a task config to nonlive mode. + + (And not otherwise) + + Point 3 from the skip mode proposal + https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md + + | If the run mode is set to simulation or skip in the workflow + | configuration, then cylc validate and cylc lint should produce + | warning (similar to development features in other languages / systems). + """ + taskdefs = { + f'{run_mode}_task': SimpleNamespace( + rtconfig={'run mode': run_mode}, + name=f'{run_mode}_task' + ) + for run_mode + in ['live', 'skip'] + } + + skip_mode_validate(taskdefs) + + message = caplog.messages[0] + + assert 'skip mode:\n * skip_task' in message + assert ' live mode' not in message # Avoid matching "non-live mode" + assert 'workflow mode' not in message From d751f0dfa95057c3eefded519244268648e377c1 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Wed, 16 Oct 2024 14:32:24 +0100 Subject: [PATCH 06/29] Unit tests and improved docs for parse_fail_cycle_points --- cylc/flow/run_modes/simulation.py | 47 ++++++++++--------- tests/unit/conftest.py | 12 +++-- tests/unit/run_modes/test_simulation_units.py | 41 +++++++++++++--- 3 files changed, 68 insertions(+), 32 deletions(-) diff --git a/cylc/flow/run_modes/simulation.py b/cylc/flow/run_modes/simulation.py index e9dbbc26f17..196ce10fa16 100644 --- a/cylc/flow/run_modes/simulation.py +++ b/cylc/flow/run_modes/simulation.py @@ -25,7 +25,6 @@ from metomi.isodatetime.parsers import DurationParser from cylc.flow import LOG -from cylc.flow.cycling import PointBase from cylc.flow.cycling.loader import get_point from cylc.flow.exceptions import PointParsingError from cylc.flow.platforms import FORBIDDEN_WITH_PLATFORM @@ -40,6 +39,7 @@ if TYPE_CHECKING: + from cylc.flow.cycling import PointBase from cylc.flow.task_events_mgr import TaskEventsManager from cylc.flow.task_job_mgr import TaskJobManager from cylc.flow.task_proxy import TaskProxy @@ -189,7 +189,7 @@ def __init__( self.timeout = started_time + self.simulated_run_length -def configure_sim_mode(rtc, fallback): +def configure_sim_mode(rtc, fail_at_points_config): """Adjust task defs for simulation mode. Example: @@ -220,7 +220,7 @@ def configure_sim_mode(rtc, fallback): "fail cycle points" ] = parse_fail_cycle_points( rtc["simulation"]["fail cycle points"], - fallback + fail_at_points_config ) @@ -263,13 +263,21 @@ def disable_platforms( def parse_fail_cycle_points( - f_pts_orig: List[str], fallback + fail_at_points_updated: List[str], + fail_at_points_config, ) -> 'Union[None, List[PointBase]]': """Parse `[simulation][fail cycle points]`. - None for "fail all points". - Else a list of cycle point objects. + Args: + fail_at_points_updated: Fail cycle points from a broadcast. + fail_at_points_config: + Fail cycle points from original workflow config, which would + have caused the scheduler to fail on config parsing. This check is + designed to prevent broadcasts from taking the scheduler down. + Examples: >>> this = parse_fail_cycle_points >>> this(['all'], ['42']) is None @@ -279,26 +287,21 @@ def parse_fail_cycle_points( >>> this(None, ['42']) is None True """ - f_pts: 'Optional[List[PointBase]]' = [] + fail_at_points: 'Optional[List[PointBase]]' = [] if ( - f_pts_orig is None - or f_pts_orig and 'all' in f_pts_orig + fail_at_points_updated is None + or fail_at_points_updated + and 'all' in fail_at_points_updated ): - f_pts = None - elif f_pts_orig: - f_pts = [] - for point_str in f_pts_orig: - if isinstance(point_str, PointBase): - f_pts.append(point_str) - else: - try: - f_pts.append(get_point(point_str).standardise()) - except PointParsingError: - LOG.warning( - f'Invalid ISO 8601 date representation: {point_str}' - ) - return fallback - return f_pts + return None + elif fail_at_points_updated: + for point_str in fail_at_points_updated: + try: + fail_at_points.append(get_point(point_str).standardise()) + except PointParsingError as exc: + LOG.warning(exc.args[0]) + return fail_at_points_config + return fail_at_points def sim_time_check( diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 924b1295998..f1f17101fd5 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -172,20 +172,26 @@ def set_cycling_type(monkeypatch: pytest.MonkeyPatch): custom time zone to use. dump_format: If using ISO8601, specify custom dump format. """ + def _set_cycling_type( ctype: str = INTEGER_CYCLING_TYPE, - time_zone: Optional[str] = None, + time_zone: Optional[str] = 'Z', dump_format: Optional[str] = None, ) -> None: class _DefaultCycler: TYPE = ctype + monkeypatch.setattr( - 'cylc.flow.cycling.loader.DefaultCycler', _DefaultCycler) + 'cylc.flow.cycling.loader.DefaultCycler', _DefaultCycler + ) if ctype == ISO8601_CYCLING_TYPE: monkeypatch.setattr( 'cylc.flow.cycling.iso8601.WorkflowSpecifics', - iso8601_init(time_zone=time_zone, custom_dump_format=dump_format) + iso8601_init( + time_zone=time_zone, custom_dump_format=dump_format + ), ) + return _set_cycling_type diff --git a/tests/unit/run_modes/test_simulation_units.py b/tests/unit/run_modes/test_simulation_units.py index 109174c8b43..78ef2bd2f05 100644 --- a/tests/unit/run_modes/test_simulation_units.py +++ b/tests/unit/run_modes/test_simulation_units.py @@ -21,9 +21,9 @@ from cylc.flow.cycling.integer import IntegerPoint from cylc.flow.cycling.iso8601 import ISO8601Point from cylc.flow.run_modes.simulation import ( - parse_fail_cycle_points, disable_platforms, get_simulated_run_len, + parse_fail_cycle_points, sim_task_failed, ) @@ -75,12 +75,39 @@ def test_disable_platforms(rtc, expect): assert val is None -def test_parse_fail_cycle_points(set_cycling_type): - before = ['2', '4'] - set_cycling_type() - assert parse_fail_cycle_points(before, ['']) == [ - IntegerPoint(i) for i in before - ] +@pytest.mark.parametrize( + 'args, cycling, fallback', + ( + param((['2', '4'], ['']), 'integer', False, id='int.valid'), + param((['garbage'], []), 'integer', True, id='int.invalid'), + param((['20200101T0000Z'], []), 'iso8601', False, id='iso.valid'), + param((['garbage'], []), 'iso8601', True, id='iso.invalid'), + ), +) +def test_parse_fail_cycle_points( + caplog, set_cycling_type, args, cycling, fallback +): + """Tests for parse_fail_cycle points. + """ + set_cycling_type(cycling) + if fallback: + expect = args[1] + check_log = True + else: + expect = args[0] + check_log = False + + if cycling == 'integer': + assert parse_fail_cycle_points(*args) == [ + IntegerPoint(i) for i in expect + ] + else: + assert parse_fail_cycle_points(*args) == [ + ISO8601Point(i) for i in expect + ] + if check_log: + assert "Incompatible" in caplog.messages[0] + assert cycling in caplog.messages[0].lower() @pytest.mark.parametrize( From c71ade2b02c65d3dfc9ed53936ed570f5ec9866e Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Wed, 16 Oct 2024 14:52:51 +0100 Subject: [PATCH 07/29] Apply suggestions from code review Co-authored-by: Ronnie Dutta <61982285+MetRonnie@users.noreply.github.com> --- cylc/flow/platforms.py | 9 +++------ cylc/flow/run_modes/skip.py | 16 +++++----------- 2 files changed, 8 insertions(+), 17 deletions(-) diff --git a/cylc/flow/platforms.py b/cylc/flow/platforms.py index 02ff15c5462..110d135edeb 100644 --- a/cylc/flow/platforms.py +++ b/cylc/flow/platforms.py @@ -663,12 +663,9 @@ def get_install_target_to_platforms_map( ret.setdefault(install_target, []).append(platform) # Map jobless modes to localhost. - if 'localhost' in ret: - ret['localhost'] += [ - {'name': mode} for mode in JOBLESS_MODES] - else: - ret['localhost'] = [ - {'name': mode} for mode in JOBLESS_MODES] + ret.setdefault('localhost', []).extend( + {'name': mode} for mode in JOBLESS_MODES + ) return ret diff --git a/cylc/flow/run_modes/skip.py b/cylc/flow/run_modes/skip.py index d782b613302..6c10fbc7378 100644 --- a/cylc/flow/run_modes/skip.py +++ b/cylc/flow/run_modes/skip.py @@ -144,10 +144,7 @@ def check_task_skip_config(tdef: 'TaskDef') -> None: Raises: * Error if outputs include succeeded and failed. """ - skip_config = tdef.rtconfig.get('skip', {}) - if not skip_config: - return - skip_outputs = skip_config.get('outputs', {}) + skip_outputs = tdef.rtconfig.get('skip', {}).get('outputs', {}) if not skip_outputs: return @@ -164,19 +161,16 @@ def check_task_skip_config(tdef: 'TaskDef') -> None: def skip_mode_validate(taskdefs: 'Dict[str, TaskDef]') -> None: """Warn user if any tasks have "run mode" set to skip. """ - warn_nonlive: Dict[str, List[str]] = {RunMode.SKIP.value: []} - - # Run through taskdefs looking for those with nonlive modes + skip_mode_tasks: List[str] = [] for taskdef in taskdefs.values(): - # Add to list of tasks to be run in non-live modes: if (taskdef.rtconfig.get('run mode', None) == RunMode.SKIP.value): - warn_nonlive[taskdef.rtconfig['run mode']].append(taskdef.name) + skip_mode_tasks.append(taskdef.name) # Run any mode specific validation checks: check_task_skip_config(taskdef) - if any(warn_nonlive.values()): + if skip_mode_tasks: message = 'The following tasks are set to run in skip mode:' - for taskname in warn_nonlive[RunMode.SKIP.value]: + for taskname in skip_mode_tasks: message += f'\n * {taskname}' LOG.warning(message) From 658222582307553a00909af0e48262e659d989d2 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:00:36 +0100 Subject: [PATCH 08/29] fix broken test --- tests/unit/test_platforms.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_platforms.py b/tests/unit/test_platforms.py index 89e5d2f19f8..4e0e0c0bbd9 100644 --- a/tests/unit/test_platforms.py +++ b/tests/unit/test_platforms.py @@ -470,9 +470,12 @@ def test_get_install_target_to_platforms_map( for install_target in _map: _map[install_target] = sorted(_map[install_target], key=lambda k: k['name']) - result.pop('localhost') + expected_map.update( + {'localhost': [{'name': 'simulation'}, {'name': 'skip'}]} + ) assert result == expected_map + @pytest.mark.parametrize( 'platform, job, remote, expect', [ From 4165ee4af5ab12bfe894e26c36e105e9705b21f7 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:16:41 +0100 Subject: [PATCH 09/29] fix mypy failures --- cylc/flow/run_modes/simulation.py | 4 ++-- cylc/flow/task_job_mgr.py | 12 +++++++++--- cylc/flow/task_proxy.py | 3 ++- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/cylc/flow/run_modes/simulation.py b/cylc/flow/run_modes/simulation.py index 196ce10fa16..ba2bc04b30e 100644 --- a/cylc/flow/run_modes/simulation.py +++ b/cylc/flow/run_modes/simulation.py @@ -19,7 +19,7 @@ from dataclasses import dataclass from logging import INFO from typing import ( - TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union) + TYPE_CHECKING, Any, Dict, List, Tuple, Union) from time import time from metomi.isodatetime.parsers import DurationParser @@ -287,7 +287,7 @@ def parse_fail_cycle_points( >>> this(None, ['42']) is None True """ - fail_at_points: 'Optional[List[PointBase]]' = [] + fail_at_points: 'List[PointBase]' = [] if ( fail_at_points_updated is None or fail_at_points_updated diff --git a/cylc/flow/task_job_mgr.py b/cylc/flow/task_job_mgr.py index 40ac119f897..0338c278e35 100644 --- a/cylc/flow/task_job_mgr.py +++ b/cylc/flow/task_job_mgr.py @@ -242,8 +242,14 @@ def prep_submit_task_jobs(self, workflow, itasks, check_syntax=True): bad_tasks.append(itask) return [prepared_tasks, bad_tasks] - def submit_task_jobs(self, workflow, itasks, curve_auth, - client_pub_key_dir, run_mode=RunMode.LIVE): + def submit_task_jobs( + self, + workflow, + itasks, + curve_auth, + client_pub_key_dir, + run_mode: Union[str, RunMode] = RunMode.LIVE, + ): """Prepare for job submission and submit task jobs. Preparation (host selection, remote host init, and remote install) @@ -1022,7 +1028,7 @@ def submit_nonlive_task_jobs( self: 'TaskJobManager', workflow: str, itasks: 'List[TaskProxy]', - workflow_run_mode: str, + workflow_run_mode: Union[str, RunMode], ) -> 'Tuple[List[TaskProxy], List[TaskProxy]]': """Identify task mode and carry out alternative submission paths if required: diff --git a/cylc/flow/task_proxy.py b/cylc/flow/task_proxy.py index f16696b6c0f..5ceb8c47f16 100644 --- a/cylc/flow/task_proxy.py +++ b/cylc/flow/task_proxy.py @@ -31,6 +31,7 @@ Optional, Set, Tuple, + Union, ) from metomi.isodatetime.timezone import get_local_time_zone @@ -298,7 +299,7 @@ def __init__( self.graph_children = generate_graph_children(tdef, self.point) self.mode_settings: Optional['ModeSettings'] = None - self.run_mode: Optional[str] = None + self.run_mode: Optional[Union[str, RunMode]] = None if self.tdef.expiration_offset is not None: self.expire_time = ( From 2a4b320611d106971baa03e43f08a97d5c2bc51c Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:39:20 +0100 Subject: [PATCH 10/29] restored missing code --- cylc/flow/run_modes/simulation.py | 19 +++++++++++-------- cylc/flow/run_modes/skip.py | 4 ++-- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/cylc/flow/run_modes/simulation.py b/cylc/flow/run_modes/simulation.py index ba2bc04b30e..79b713f6647 100644 --- a/cylc/flow/run_modes/simulation.py +++ b/cylc/flow/run_modes/simulation.py @@ -25,6 +25,7 @@ from metomi.isodatetime.parsers import DurationParser from cylc.flow import LOG +from cylc.flow.cycling import PointBase from cylc.flow.cycling.loader import get_point from cylc.flow.exceptions import PointParsingError from cylc.flow.platforms import FORBIDDEN_WITH_PLATFORM @@ -39,7 +40,6 @@ if TYPE_CHECKING: - from cylc.flow.cycling import PointBase from cylc.flow.task_events_mgr import TaskEventsManager from cylc.flow.task_job_mgr import TaskJobManager from cylc.flow.task_proxy import TaskProxy @@ -189,7 +189,7 @@ def __init__( self.timeout = started_time + self.simulated_run_length -def configure_sim_mode(rtc, fail_at_points_config): +def configure_sim_mode(rtc, fallback): """Adjust task defs for simulation mode. Example: @@ -220,7 +220,7 @@ def configure_sim_mode(rtc, fail_at_points_config): "fail cycle points" ] = parse_fail_cycle_points( rtc["simulation"]["fail cycle points"], - fail_at_points_config + fallback ) @@ -296,11 +296,14 @@ def parse_fail_cycle_points( return None elif fail_at_points_updated: for point_str in fail_at_points_updated: - try: - fail_at_points.append(get_point(point_str).standardise()) - except PointParsingError as exc: - LOG.warning(exc.args[0]) - return fail_at_points_config + if isinstance(point_str, PointBase): + fail_at_points.append(point_str) + else: + try: + fail_at_points.append(get_point(point_str).standardise()) + except PointParsingError as exc: + LOG.warning(exc.args[0]) + return fail_at_points_config return fail_at_points diff --git a/cylc/flow/run_modes/skip.py b/cylc/flow/run_modes/skip.py index 6c10fbc7378..9bcb36a8ec8 100644 --- a/cylc/flow/run_modes/skip.py +++ b/cylc/flow/run_modes/skip.py @@ -166,8 +166,8 @@ def skip_mode_validate(taskdefs: 'Dict[str, TaskDef]') -> None: if (taskdef.rtconfig.get('run mode', None) == RunMode.SKIP.value): skip_mode_tasks.append(taskdef.name) - # Run any mode specific validation checks: - check_task_skip_config(taskdef) + # Run any mode specific validation checks: + check_task_skip_config(taskdef) if skip_mode_tasks: message = 'The following tasks are set to run in skip mode:' From c407e7ce3a6a95c3a56ef0ed669a54a4aa2a4080 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Wed, 16 Oct 2024 15:46:37 +0100 Subject: [PATCH 11/29] small ruff fixes --- tests/integration/test_task_events_mgr.py | 18 ++++++++++-------- tests/unit/test_task_state.py | 2 +- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/tests/integration/test_task_events_mgr.py b/tests/integration/test_task_events_mgr.py index 08ed816414d..868d42c75ca 100644 --- a/tests/integration/test_task_events_mgr.py +++ b/tests/integration/test_task_events_mgr.py @@ -14,7 +14,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from itertools import product import logging from typing import Any as Fixture @@ -22,9 +21,7 @@ from cylc.flow.scheduler import Scheduler from cylc.flow.data_store_mgr import ( JOBS, - TASK_STATUSES_ORDERED, TASK_STATUS_WAITING, - TASK_STATUS_SUBMIT_FAILED, ) @@ -80,15 +77,20 @@ async def test__insert_task_job(flow, one_conf, scheduler, start, validate): """ conf = { 'scheduling': {'graph': {'R1': 'rhenas'}}, - 'runtime': {'rhenas': {'simulation': { - 'fail cycle points': '1', - 'fail try 1 only': False, - }}}} + 'runtime': { + 'rhenas': { + 'simulation': { + 'fail cycle points': '1', + 'fail try 1 only': False, + } + } + }, + } id_ = flow(conf) schd = scheduler(id_) async with start(schd): # Set task to running: - itask = schd.pool.get_tasks()[0] + itask = schd.pool.get_tasks()[0] itask.state.status = 'running' itask.submit_num += 1 diff --git a/tests/unit/test_task_state.py b/tests/unit/test_task_state.py index 2854bd60c29..1045a930423 100644 --- a/tests/unit/test_task_state.py +++ b/tests/unit/test_task_state.py @@ -19,7 +19,7 @@ from cylc.flow.taskdef import TaskDef from cylc.flow.cycling.integer import IntegerSequence, IntegerPoint -from cylc.flow.run_modes import RunMode, disable_task_event_handlers +from cylc.flow.run_modes import disable_task_event_handlers from cylc.flow.task_trigger import Dependency, TaskTrigger from cylc.flow.task_state import ( TaskState, From 67cd77623627718807ff394ff291f2d2d9f809b5 Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Fri, 18 Oct 2024 11:49:46 +0100 Subject: [PATCH 12/29] re-added missing test --- tests/integration/run_modes/test_simulation.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/integration/run_modes/test_simulation.py b/tests/integration/run_modes/test_simulation.py index debbf117d7a..c78f6e139dc 100644 --- a/tests/integration/run_modes/test_simulation.py +++ b/tests/integration/run_modes/test_simulation.py @@ -460,3 +460,15 @@ async def test_settings_broadcast( assert itask.try_timers['execution-retry'].delays == [2.0, 2.0, 2.0] # n.b. rtconfig should remain unchanged, lest we cancel broadcasts: assert itask.tdef.rtconfig['execution retry delays'] == [5.0, 5.0] + + +async def test_db_submit_num( + flow, one_conf, scheduler, run, complete, db_select +): + """Test simulation mode correctly increments the submit_num in the DB.""" + schd = scheduler(flow(one_conf), paused_start=False) + async with run(schd): + await complete(schd, '1/one') + assert db_select(schd, False, 'task_states', 'submit_num', 'status') == [ + (1, 'succeeded'), + ] From 9333731197a0c9dfda9c33fc91f534f7ca35633f Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Sat, 19 Oct 2024 12:57:33 +0100 Subject: [PATCH 13/29] Type checking improved. --- cylc/flow/commands.py | 4 +-- cylc/flow/data_store_mgr.py | 2 +- cylc/flow/prerequisite.py | 7 ++-- cylc/flow/run_modes/__init__.py | 11 +++--- cylc/flow/run_modes/simulation.py | 3 +- cylc/flow/run_modes/skip.py | 5 ++- cylc/flow/scheduler.py | 15 ++++---- cylc/flow/task_events_mgr.py | 8 ++--- cylc/flow/task_job_mgr.py | 13 ++++--- cylc/flow/task_pool.py | 7 ++-- cylc/flow/task_proxy.py | 5 ++- cylc/flow/task_state.py | 3 +- cylc/flow/workflow_db_mgr.py | 12 ++++--- tests/integration/conftest.py | 3 +- .../run_modes/test_mode_overrides.py | 16 +++++---- tests/integration/run_modes/test_nonlive.py | 4 +-- .../integration/run_modes/test_simulation.py | 36 ++++++++++--------- tests/integration/utils/flow_tools.py | 1 + 18 files changed, 92 insertions(+), 63 deletions(-) diff --git a/cylc/flow/commands.py b/cylc/flow/commands.py index 75ed5169112..8f0fdd274f3 100644 --- a/cylc/flow/commands.py +++ b/cylc/flow/commands.py @@ -249,7 +249,7 @@ async def poll_tasks(schd: 'Scheduler', tasks: Iterable[str]): """Poll pollable tasks or a task or family if options are provided.""" validate.is_tasks(tasks) yield - if schd.get_run_mode() == RunMode.SIMULATION.value: + if schd.get_run_mode() == RunMode.SIMULATION: yield 0 itasks, _, bad_items = schd.pool.filter_task_proxies(tasks) schd.task_job_mgr.poll_task_jobs(schd.workflow, itasks) @@ -262,7 +262,7 @@ async def kill_tasks(schd: 'Scheduler', tasks: Iterable[str]): validate.is_tasks(tasks) yield itasks, _, bad_items = schd.pool.filter_task_proxies(tasks) - if schd.get_run_mode() == RunMode.SIMULATION.value: + if schd.get_run_mode() == RunMode.SIMULATION: for itask in itasks: if itask.state(*TASK_STATUSES_ACTIVE): itask.state_reset(TASK_STATUS_FAILED) diff --git a/cylc/flow/data_store_mgr.py b/cylc/flow/data_store_mgr.py index f25a6bb11ac..d1967985531 100644 --- a/cylc/flow/data_store_mgr.py +++ b/cylc/flow/data_store_mgr.py @@ -701,7 +701,7 @@ def generate_definition_elements(self): time_zone_info = TIME_ZONE_LOCAL_INFO for key, val in time_zone_info.items(): setbuff(workflow.time_zone_info, key, val) - workflow.run_mode = RunMode.get(config.options) + workflow.run_mode = RunMode.get(config.options).value workflow.cycling_mode = config.cfg['scheduling']['cycling mode'] workflow.workflow_log_dir = self.schd.workflow_log_dir workflow.job_log_names.extend(list(JOB_LOG_OPTS.values())) diff --git a/cylc/flow/prerequisite.py b/cylc/flow/prerequisite.py index ba9300bd75d..c58f54652ab 100644 --- a/cylc/flow/prerequisite.py +++ b/cylc/flow/prerequisite.py @@ -40,6 +40,7 @@ from cylc.flow.data_messages_pb2 import PbCondition, PbPrerequisite from cylc.flow.exceptions import TriggerExpressionError from cylc.flow.id import quick_relative_detokenise +from cylc.flow.run_modes import RunMode if TYPE_CHECKING: @@ -263,7 +264,7 @@ def _eval_satisfied(self) -> bool: def satisfy_me( self, outputs: Iterable['Tokens'], - mode: Literal['skip', 'live', 'simulation', 'skip'] = 'live' + mode: "RunMode" = RunMode.LIVE ) -> 'Set[Tokens]': """Attempt to satisfy me with given outputs. @@ -273,9 +274,9 @@ def satisfy_me( """ satisfied_message: SatisfiedState - if mode != 'live': + if mode != RunMode.LIVE: satisfied_message = self.DEP_STATE_SATISFIED_BY.format( - mode) # type: ignore + mode.value) # type: ignore else: satisfied_message = self.DEP_STATE_SATISFIED valid = set() diff --git a/cylc/flow/run_modes/__init__.py b/cylc/flow/run_modes/__init__.py index 08761780b1b..b2c7001bdb9 100644 --- a/cylc/flow/run_modes/__init__.py +++ b/cylc/flow/run_modes/__init__.py @@ -80,9 +80,12 @@ def describe(self): raise KeyError(f'No description for {self}.') @staticmethod - def get(options: 'Values') -> str: + def get(options: 'Values') -> "RunMode": """Return the workflow run mode from the options.""" - return getattr(options, 'run_mode', None) or RunMode.LIVE.value + run_mode = getattr(options, 'run_mode', None) + if run_mode: + return RunMode(run_mode) + return RunMode.LIVE def get_submit_method(self) -> 'Optional[SubmissionInterface]': """Return the job submission method for this run mode. @@ -113,9 +116,9 @@ def disable_task_event_handlers(itask: 'TaskProxy'): """ mode = itask.run_mode return ( - mode == RunMode.SIMULATION.value + mode == RunMode.SIMULATION or ( - mode == RunMode.SKIP.value + mode == RunMode.SKIP and itask.platform.get( 'disable task event handlers', False) ) diff --git a/cylc/flow/run_modes/simulation.py b/cylc/flow/run_modes/simulation.py index 79b713f6647..047a4526ed7 100644 --- a/cylc/flow/run_modes/simulation.py +++ b/cylc/flow/run_modes/simulation.py @@ -321,12 +321,13 @@ def sim_time_check( """ now = time() sim_task_state_changed: bool = False + for itask in itasks: if ( itask.state.status != TASK_STATUS_RUNNING or ( itask.run_mode - and itask.run_mode != RunMode.SIMULATION.value + and itask.run_mode != RunMode.SIMULATION ) ): continue diff --git a/cylc/flow/run_modes/skip.py b/cylc/flow/run_modes/skip.py index 9bcb36a8ec8..7e3f4310ee5 100644 --- a/cylc/flow/run_modes/skip.py +++ b/cylc/flow/run_modes/skip.py @@ -64,7 +64,10 @@ def submit_task_job( 'execution retry delays': [] } itask.summary['job_runner_name'] = RunMode.SKIP.value - itask.run_mode = RunMode.SKIP.value + itask.jobs.append( + task_job_mgr.get_simulation_job_conf(itask, _workflow) + ) + itask.run_mode = RunMode.SKIP task_job_mgr.workflow_db_mgr.put_insert_task_jobs( itask, { 'time_submit': now[1], diff --git a/cylc/flow/scheduler.py b/cylc/flow/scheduler.py index d73ab947f98..345be4b5335 100644 --- a/cylc/flow/scheduler.py +++ b/cylc/flow/scheduler.py @@ -433,8 +433,10 @@ async def configure(self, params): og_run_mode = self.get_run_mode() if run_mode != og_run_mode: raise InputError( - f'This workflow was originally run in {og_run_mode} mode:' - f' Will not restart in {run_mode} mode.') + "This workflow was originally run in " + f"{run_mode.value} mode:" + f" Will not restart in {run_mode.value} mode." + ) self.profiler.log_memory("scheduler.py: before load_flow_file") try: @@ -1221,7 +1223,7 @@ def run_event_handlers(self, event, reason=""): Run workflow events only in live mode or skip mode. """ - if self.get_run_mode() in WORKFLOW_ONLY_MODES: + if self.get_run_mode().value in WORKFLOW_ONLY_MODES: return self.workflow_event_handler.handle(self, event, str(reason)) @@ -1346,7 +1348,7 @@ def timeout_check(self): """Check workflow and task timers.""" self.check_workflow_timers() # check submission and execution timeout and polling timers - if self.get_run_mode() != RunMode.SIMULATION.value: + if self.get_run_mode() != RunMode.SIMULATION: self.task_job_mgr.check_task_jobs(self.workflow, self.pool) async def workflow_shutdown(self): @@ -1544,8 +1546,9 @@ async def _main_loop(self) -> None: self.xtrigger_mgr.housekeep(self.pool.get_tasks()) self.pool.clock_expire_tasks() self.release_queued_tasks() + if ( - self.options.run_mode == RunMode.SIMULATION.value + self.get_run_mode() == RunMode.SIMULATION and sim_time_check( self.task_events_mgr, self.pool.get_tasks(), @@ -2016,7 +2019,7 @@ def _check_startup_opts(self) -> None: f"option --{opt}=reload is only valid for restart" ) - def get_run_mode(self) -> str: + def get_run_mode(self) -> RunMode: return RunMode.get(self.options) async def handle_exception(self, exc: BaseException) -> NoReturn: diff --git a/cylc/flow/task_events_mgr.py b/cylc/flow/task_events_mgr.py index 03452d9a4cd..59d3ff1bd2b 100644 --- a/cylc/flow/task_events_mgr.py +++ b/cylc/flow/task_events_mgr.py @@ -773,7 +773,7 @@ def process_message( # ... but either way update the job ID in the job proxy (it only # comes in via the submission message). - if itask.run_mode != RunMode.SIMULATION.value: + if itask.run_mode != RunMode.SIMULATION: job_tokens = itask.tokens.duplicate( job=str(itask.submit_num) ) @@ -896,7 +896,7 @@ def _process_message_check( if ( itask.state(TASK_STATUS_WAITING) # Polling in live mode only: - and itask.run_mode == RunMode.LIVE.value + and itask.run_mode == RunMode.LIVE and ( ( # task has a submit-retry lined up @@ -1470,7 +1470,7 @@ def _process_message_submitted( ) itask.set_summary_time('submitted', event_time) - if itask.run_mode == RunMode.SIMULATION.value: + if itask.run_mode == RunMode.SIMULATION: # Simulate job started as well. itask.set_summary_time('started', event_time) if itask.state_reset(TASK_STATUS_RUNNING, forced=forced): @@ -1507,7 +1507,7 @@ def _process_message_submitted( 'submitted', event_time, ) - if itask.run_mode == RunMode.SIMULATION.value: + if itask.run_mode == RunMode.SIMULATION: # Simulate job started as well. self.data_store_mgr.delta_job_time( job_tokens, diff --git a/cylc/flow/task_job_mgr.py b/cylc/flow/task_job_mgr.py index 0338c278e35..3d8db951890 100644 --- a/cylc/flow/task_job_mgr.py +++ b/cylc/flow/task_job_mgr.py @@ -248,7 +248,7 @@ def submit_task_jobs( itasks, curve_auth, client_pub_key_dir, - run_mode: Union[str, RunMode] = RunMode.LIVE, + run_mode: RunMode = RunMode.LIVE, ): """Prepare for job submission and submit task jobs. @@ -1028,7 +1028,7 @@ def submit_nonlive_task_jobs( self: 'TaskJobManager', workflow: str, itasks: 'List[TaskProxy]', - workflow_run_mode: Union[str, RunMode], + workflow_run_mode: RunMode, ) -> 'Tuple[List[TaskProxy], List[TaskProxy]]': """Identify task mode and carry out alternative submission paths if required: @@ -1058,14 +1058,19 @@ def submit_nonlive_task_jobs( # Get task config with broadcasts applied: rtconfig = self.task_events_mgr.broadcast_mgr.get_updated_rtconfig( itask) + # Apply task run mode - if workflow_run_mode in WORKFLOW_ONLY_MODES: + if workflow_run_mode.value in WORKFLOW_ONLY_MODES: # Task run mode cannot override workflow run-mode sim or dummy: run_mode = workflow_run_mode else: # If workflow mode is skip or live and task mode is set, # override workflow mode, else use workflow mode. - run_mode = rtconfig.get('run mode', None) or workflow_run_mode + run_mode = rtconfig.get('run mode', None) + if run_mode: + run_mode = RunMode(run_mode) + else: + run_mode = workflow_run_mode # Store the run mode of the this submission: itask.run_mode = run_mode diff --git a/cylc/flow/task_pool.py b/cylc/flow/task_pool.py index 7aea9055137..fb9b34a28b9 100644 --- a/cylc/flow/task_pool.py +++ b/cylc/flow/task_pool.py @@ -1433,10 +1433,9 @@ def spawn_on_output(self, itask: TaskProxy, output: str) -> None: tasks = [c_task] for t in tasks: - t.satisfy_me( [itask.tokens.duplicate(task_sel=output)], - mode=itask.run_mode + mode=itask.run_mode # type: ignore ) self.data_store_mgr.delta_task_prerequisite(t) if not in_pool: @@ -1565,7 +1564,7 @@ def spawn_on_all_outputs( if completed_only: c_task.satisfy_me( [itask.tokens.duplicate(task_sel=message)], - mode=itask.run_mode + mode=itask.run_mode # type: ignore ) self.data_store_mgr.delta_task_prerequisite(c_task) self.add_to_pool(c_task) @@ -1996,7 +1995,7 @@ def _set_outputs_itask( rtconfig = bc_mgr.get_updated_rtconfig(itask) outputs.remove(RunMode.SKIP.value) skips = get_skip_mode_outputs(itask, rtconfig) - itask.run_mode = RunMode.SKIP.value + itask.run_mode = RunMode.SKIP outputs = self._standardise_outputs( itask.point, itask.tdef, outputs) outputs = list(set(outputs + skips)) diff --git a/cylc/flow/task_proxy.py b/cylc/flow/task_proxy.py index 5ceb8c47f16..8683ca7c6fd 100644 --- a/cylc/flow/task_proxy.py +++ b/cylc/flow/task_proxy.py @@ -31,7 +31,6 @@ Optional, Set, Tuple, - Union, ) from metomi.isodatetime.timezone import get_local_time_zone @@ -299,7 +298,7 @@ def __init__( self.graph_children = generate_graph_children(tdef, self.point) self.mode_settings: Optional['ModeSettings'] = None - self.run_mode: Optional[Union[str, RunMode]] = None + self.run_mode: Optional[RunMode] = None if self.tdef.expiration_offset is not None: self.expire_time = ( @@ -557,7 +556,7 @@ def state_reset( return False def satisfy_me( - self, task_messages: 'Iterable[Tokens]', mode=RunMode.LIVE.value + self, task_messages: 'Iterable[Tokens]', mode: "RunMode" = RunMode.LIVE ) -> 'Set[Tokens]': """Try to satisfy my prerequisites with given output messages. diff --git a/cylc/flow/task_state.py b/cylc/flow/task_state.py index 8447a7bed6d..bea07204cc4 100644 --- a/cylc/flow/task_state.py +++ b/cylc/flow/task_state.py @@ -41,6 +41,7 @@ from cylc.flow.cycling import PointBase from cylc.flow.id import Tokens from cylc.flow.prerequisite import PrereqMessage + from cylc.flow.run_modes import RunMode from cylc.flow.taskdef import TaskDef @@ -324,7 +325,7 @@ def __call__( def satisfy_me( self, outputs: Iterable['Tokens'], - mode, + mode: "RunMode", ) -> Set['Tokens']: """Try to satisfy my prerequisites with given outputs. diff --git a/cylc/flow/workflow_db_mgr.py b/cylc/flow/workflow_db_mgr.py index 5f1f673440c..b06367e37b3 100644 --- a/cylc/flow/workflow_db_mgr.py +++ b/cylc/flow/workflow_db_mgr.py @@ -344,11 +344,15 @@ def put_workflow_params(self, schd: 'Scheduler') -> None: value = getattr(schd.options, key, None) value = None if value == 'reload' else value self.put_workflow_params_1(key, value) - for key in ( + + self.put_workflow_params_1( + self.KEY_CYCLE_POINT_TIME_ZONE, + getattr(schd.options, self.KEY_CYCLE_POINT_TIME_ZONE, None), + ) + self.put_workflow_params_1( self.KEY_RUN_MODE, - self.KEY_CYCLE_POINT_TIME_ZONE - ): - self.put_workflow_params_1(key, getattr(schd.options, key, None)) + schd.get_run_mode().value, + ) def put_workflow_params_1( self, key: str, value: Union[AnyStr, float, None] diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 2f0aa5afab4..1bd06697cd0 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -28,6 +28,7 @@ from cylc.flow.option_parsers import Options from cylc.flow.pathutil import get_cylc_run_dir from cylc.flow.rundb import CylcWorkflowDAO +from cylc.flow.run_modes import RunMode from cylc.flow.scripts.validate import ValidateOptions from cylc.flow.scripts.install import ( install as cylc_install, @@ -686,7 +687,7 @@ def capture_live_submissions(capcall, monkeypatch): would have been submitted had this fixture not been used. """ def fake_submit(self, _workflow, itasks, *_): - self.submit_nonlive_task_jobs(_workflow, itasks, 'simulation') + self.submit_nonlive_task_jobs(_workflow, itasks, RunMode.SIMULATION) for itask in itasks: for status in (TASK_STATUS_SUBMITTED, TASK_STATUS_SUCCEEDED): self.task_events_mgr.process_message( diff --git a/tests/integration/run_modes/test_mode_overrides.py b/tests/integration/run_modes/test_mode_overrides.py index 7c75c7c69f1..ec426946244 100644 --- a/tests/integration/run_modes/test_mode_overrides.py +++ b/tests/integration/run_modes/test_mode_overrides.py @@ -31,17 +31,18 @@ import pytest from cylc.flow.cycling.iso8601 import ISO8601Point -from cylc.flow.run_modes import WORKFLOW_RUN_MODES +from cylc.flow.run_modes import WORKFLOW_RUN_MODES, RunMode -@pytest.mark.parametrize('workflow_run_mode', sorted(WORKFLOW_RUN_MODES)) +@pytest.mark.parametrize('workflow_run_mode', WORKFLOW_RUN_MODES) async def test_run_mode_override_from_config( capture_live_submissions, flow, scheduler, run, complete, - workflow_run_mode + workflow_run_mode, + validate ): """Test that `[runtime][]run mode` overrides workflow modes.""" id_ = flow({ @@ -51,11 +52,14 @@ async def test_run_mode_override_from_config( }, }, 'runtime': { + 'root': {'simulation': {'default run length': 'PT0S'}}, 'live': {'run mode': 'live'}, 'skip': {'run mode': 'skip'}, } }) - schd = scheduler(id_, run_mode=workflow_run_mode, paused_start=False) + run_mode = RunMode(workflow_run_mode) + validate(id_) + schd = scheduler(id_, run_mode=run_mode, paused_start=False) async with run(schd): await complete(schd) @@ -96,7 +100,7 @@ async def test_force_trigger_does_not_override_run_mode( schd.server.curve_auth, schd.server.client_pub_key_dir) - assert foo.run_mode == 'skip' + assert foo.run_mode.value == 'skip' async def test_run_mode_skip_abides_by_held( @@ -161,5 +165,5 @@ async def test_run_mode_override_from_broadcast( [foo_1000, foo_1001], schd.server.curve_auth, schd.server.client_pub_key_dir) - assert foo_1000.run_mode == 'skip' + assert foo_1000.run_mode.value == 'skip' assert capture_live_submissions() == {'1001/foo'} diff --git a/tests/integration/run_modes/test_nonlive.py b/tests/integration/run_modes/test_nonlive.py index eca02cf5018..90cefbf7701 100644 --- a/tests/integration/run_modes/test_nonlive.py +++ b/tests/integration/run_modes/test_nonlive.py @@ -110,8 +110,8 @@ async def test_db_task_jobs( submit_and_check_db(schd) - assert itask_live.run_mode == 'simulation' - assert itask_skip.run_mode == 'skip' + assert itask_live.run_mode.value == 'simulation' + assert itask_skip.run_mode.value == 'skip' async def test_db_task_states( diff --git a/tests/integration/run_modes/test_simulation.py b/tests/integration/run_modes/test_simulation.py index c78f6e139dc..100a30c46b1 100644 --- a/tests/integration/run_modes/test_simulation.py +++ b/tests/integration/run_modes/test_simulation.py @@ -22,6 +22,7 @@ from cylc.flow import commands from cylc.flow.cycling.iso8601 import ISO8601Point +from cylc.flow.run_modes import RunMode from cylc.flow.run_modes.simulation import sim_time_check @@ -63,7 +64,7 @@ def _run_simjob(schd, point, task): itask.state.is_queued = False monkeytime(0) schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [itask], 'simulation') + schd.workflow, [itask], RunMode.SIMULATION) monkeytime(itask.mode_settings.timeout + 1) # Run Time Check @@ -171,7 +172,7 @@ def test_fail_once(sim_time_check_setup, itask, point, results, monkeypatch): for i, result in enumerate(results): itask.try_timers['execution-retry'].num = i schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [itask], 'simulation') + schd.workflow, [itask], RunMode.SIMULATION) assert itask.mode_settings.sim_task_fails is result @@ -191,7 +192,7 @@ def test_task_finishes(sim_time_check_setup, monkeytime, caplog): fail_all_1066.state.status = 'running' fail_all_1066.state.is_queued = False schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [fail_all_1066], 'simulation') + schd.workflow, [fail_all_1066], RunMode.SIMULATION) # For the purpose of the test delete the started time set by # submit_nonlive_task_jobs. @@ -221,7 +222,7 @@ def test_task_sped_up(sim_time_check_setup, monkeytime): # Run the job submission method: monkeytime(0) schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [fast_forward_1066], 'simulation') + schd.workflow, [fast_forward_1066], RunMode.SIMULATION) fast_forward_1066.state.is_queued = False result = sim_time_check(schd.task_events_mgr, [fast_forward_1066], '') @@ -235,7 +236,7 @@ def test_task_sped_up(sim_time_check_setup, monkeytime): async def test_settings_restart( - monkeytime, flow, scheduler, start + monkeytime, flow, scheduler, start,validate ): """Check that simulation mode settings are correctly restored upon restart. @@ -269,13 +270,12 @@ async def test_settings_restart( } }) schd = scheduler(id_) - # Start the workflow: async with start(schd): og_timeouts = {} for itask in schd.pool.get_tasks(): schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [itask], 'simulation') + schd.workflow, [itask], RunMode.SIMULATION) og_timeouts[itask.identity] = itask.mode_settings.timeout @@ -285,9 +285,9 @@ async def test_settings_restart( schd.task_events_mgr, [itask], schd.workflow_db_mgr ) is False - # Stop and restart the scheduler: + # Stop and restart the scheduler: schd = scheduler(id_) - async with start(schd): + async with start(schd) as log: for itask in schd.pool.get_tasks(): # Check that we haven't got mode settings back: assert itask.mode_settings is None @@ -312,6 +312,7 @@ async def test_settings_restart( ) is False # Check that the itask.mode_settings is now re-created + assert itask.mode_settings.simulated_run_length == 60.0 assert itask.mode_settings.sim_task_fails is True @@ -393,7 +394,7 @@ async def test_settings_broadcast( # Submit the first - the sim task will fail: schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [itask], 'simulation') + schd.workflow, [itask], RunMode.SIMULATION) assert itask.mode_settings.sim_task_fails is True # Let task finish. @@ -412,13 +413,13 @@ async def test_settings_broadcast( }]) # Submit again - result is different: schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [itask], 'simulation') + schd.workflow, [itask], RunMode.SIMULATION) assert itask.mode_settings.sim_task_fails is False # Assert Clearing the broadcast works schd.broadcast_mgr.clear_broadcast() schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [itask], 'simulation') + schd.workflow, [itask], RunMode.SIMULATION) assert itask.mode_settings.sim_task_fails is True # Assert that list of broadcasts doesn't change if we submit @@ -429,7 +430,7 @@ async def test_settings_broadcast( 'simulation': {'fail cycle points': 'higadfuhasgiurguj'} }]) schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [itask], 'simulation') + schd.workflow, [itask], RunMode.SIMULATION) assert ( 'Invalid ISO 8601 date representation: higadfuhasgiurguj' in log.messages[-1]) @@ -443,7 +444,7 @@ async def test_settings_broadcast( 'simulation': {'fail cycle points': '1'} }]) schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [itask], 'simulation') + schd.workflow, [itask], RunMode.SIMULATION) assert ( 'Invalid ISO 8601 date representation: 1' in log.messages[-1]) @@ -455,7 +456,7 @@ async def test_settings_broadcast( 'execution retry delays': '3*PT2S' }]) schd.task_job_mgr.submit_nonlive_task_jobs( - schd.workflow, [itask], 'simulation') + schd.workflow, [itask], RunMode.SIMULATION) assert itask.mode_settings.sim_task_fails is True assert itask.try_timers['execution-retry'].delays == [2.0, 2.0, 2.0] # n.b. rtconfig should remain unchanged, lest we cancel broadcasts: @@ -466,9 +467,12 @@ async def test_db_submit_num( flow, one_conf, scheduler, run, complete, db_select ): """Test simulation mode correctly increments the submit_num in the DB.""" + one_conf['runtime'] = { + 'one': {'simulation': {'default run length': 'PT0S'}} + } schd = scheduler(flow(one_conf), paused_start=False) async with run(schd): - await complete(schd, '1/one') + await complete(schd, '1/one', timeout=10) assert db_select(schd, False, 'task_states', 'submit_num', 'status') == [ (1, 'succeeded'), ] diff --git a/tests/integration/utils/flow_tools.py b/tests/integration/utils/flow_tools.py index 34b80a25882..7c94e2b38a4 100644 --- a/tests/integration/utils/flow_tools.py +++ b/tests/integration/utils/flow_tools.py @@ -31,6 +31,7 @@ from uuid import uuid1 from cylc.flow import CYLC_LOG +from cylc.flow.run_modes import RunMode from cylc.flow.workflow_files import WorkflowFiles from cylc.flow.scheduler import Scheduler, SchedulerStop from cylc.flow.scheduler_cli import RunOptions From b51ff068d4f0cc72c52c128d7dac794930144f38 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Mon, 21 Oct 2024 16:40:26 +0100 Subject: [PATCH 14/29] Update cylc/flow/task_outputs.py --- cylc/flow/task_outputs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cylc/flow/task_outputs.py b/cylc/flow/task_outputs.py index 1b8b4f2709a..a66e5ade638 100644 --- a/cylc/flow/task_outputs.py +++ b/cylc/flow/task_outputs.py @@ -634,7 +634,7 @@ def iter_required_messages( e.g. "completion = succeeded or failed". Args: - exclude: Exclude one possible required messages, allowing + exclude: Exclude one possible required message, allowing specification of all required outputs if succeeded or failed. """ for compvar, is_optional in get_optional_outputs( From d051fd1675b5a7881acca6cd20978a60cf62eb31 Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Wed, 23 Oct 2024 10:15:29 +0100 Subject: [PATCH 15/29] fix outstanding broken test --- tests/unit/test_task_state.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_task_state.py b/tests/unit/test_task_state.py index 1045a930423..7fc1feadcc6 100644 --- a/tests/unit/test_task_state.py +++ b/tests/unit/test_task_state.py @@ -19,7 +19,7 @@ from cylc.flow.taskdef import TaskDef from cylc.flow.cycling.integer import IntegerSequence, IntegerPoint -from cylc.flow.run_modes import disable_task_event_handlers +from cylc.flow.run_modes import RunMode, disable_task_event_handlers from cylc.flow.task_trigger import Dependency, TaskTrigger from cylc.flow.task_state import ( TaskState, @@ -140,7 +140,7 @@ def test_disable_task_event_handlers(itask_run_mode, disable_handlers, expect): """ # Construct a fake itask object: itask = SimpleNamespace( - run_mode=itask_run_mode, + run_mode=RunMode(itask_run_mode), platform={'disable task event handlers': disable_handlers}, tdef=SimpleNamespace( rtconfig={ From 509db85e51a2079fcb3a8ab7f47d1b86a2d225f2 Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Wed, 23 Oct 2024 10:52:05 +0100 Subject: [PATCH 16/29] undo some overzealous fixes to mypy. --- cylc/flow/prerequisite.py | 8 +++++--- tests/flakyfunctional/database/00-simple.t | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/cylc/flow/prerequisite.py b/cylc/flow/prerequisite.py index c58f54652ab..4dbb4001dc7 100644 --- a/cylc/flow/prerequisite.py +++ b/cylc/flow/prerequisite.py @@ -264,7 +264,7 @@ def _eval_satisfied(self) -> bool: def satisfy_me( self, outputs: Iterable['Tokens'], - mode: "RunMode" = RunMode.LIVE + mode: "Optional[Union[RunMode, str]]" = RunMode.LIVE ) -> 'Set[Tokens]': """Attempt to satisfy me with given outputs. @@ -274,9 +274,11 @@ def satisfy_me( """ satisfied_message: SatisfiedState - if mode != RunMode.LIVE: + if mode and mode != RunMode.LIVE: + # RunMode.value actually actually restricts the results in + # SatisfiedState, but MyPy does not recognize this. satisfied_message = self.DEP_STATE_SATISFIED_BY.format( - mode.value) # type: ignore + RunMode(mode).value) # type: ignore else: satisfied_message = self.DEP_STATE_SATISFIED valid = set() diff --git a/tests/flakyfunctional/database/00-simple.t b/tests/flakyfunctional/database/00-simple.t index c3f1ad19faf..832c35e46ec 100644 --- a/tests/flakyfunctional/database/00-simple.t +++ b/tests/flakyfunctional/database/00-simple.t @@ -49,7 +49,7 @@ fcp| icp|1 is_paused|0 n_restart|0 -run_mode| +run_mode|live startcp| stop_clock_time| stop_task| From 31a753aa8ed359484e0f7b62ec976fd341b0f4d3 Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Wed, 23 Oct 2024 11:18:49 +0100 Subject: [PATCH 17/29] sort test parameters: Prevents distribution clashes --- tests/integration/run_modes/test_mode_overrides.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/run_modes/test_mode_overrides.py b/tests/integration/run_modes/test_mode_overrides.py index ec426946244..c28a2ff050f 100644 --- a/tests/integration/run_modes/test_mode_overrides.py +++ b/tests/integration/run_modes/test_mode_overrides.py @@ -34,7 +34,7 @@ from cylc.flow.run_modes import WORKFLOW_RUN_MODES, RunMode -@pytest.mark.parametrize('workflow_run_mode', WORKFLOW_RUN_MODES) +@pytest.mark.parametrize('workflow_run_mode', sorted(WORKFLOW_RUN_MODES)) async def test_run_mode_override_from_config( capture_live_submissions, flow, From 1f39b2cd712d328bb6e38ce3a98e1e09549b0b88 Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Wed, 23 Oct 2024 11:36:29 +0100 Subject: [PATCH 18/29] restore error message on restarting in a different mode --- cylc/flow/scheduler.py | 2 +- tests/integration/test_mode_on_restart.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/cylc/flow/scheduler.py b/cylc/flow/scheduler.py index 345be4b5335..b8604c54cfd 100644 --- a/cylc/flow/scheduler.py +++ b/cylc/flow/scheduler.py @@ -434,7 +434,7 @@ async def configure(self, params): if run_mode != og_run_mode: raise InputError( "This workflow was originally run in " - f"{run_mode.value} mode:" + f"{og_run_mode.value} mode:" f" Will not restart in {run_mode.value} mode." ) diff --git a/tests/integration/test_mode_on_restart.py b/tests/integration/test_mode_on_restart.py index 80628933918..0eadf4b4a92 100644 --- a/tests/integration/test_mode_on_restart.py +++ b/tests/integration/test_mode_on_restart.py @@ -20,6 +20,7 @@ from cylc.flow.exceptions import InputError from cylc.flow.scheduler import Scheduler +from cylc.flow.run_modes import RunMode MODES = [('live'), ('simulation'), ('dummy')] @@ -42,7 +43,7 @@ async def test_restart_mode( async with start(schd): if not mode_before: mode_before = 'live' - assert schd.get_run_mode() == mode_before + assert schd.get_run_mode().value == mode_before schd = scheduler(id_, run_mode=mode_after) @@ -52,7 +53,7 @@ async def test_restart_mode( ): # Restarting in the same mode is fine. async with run(schd): - assert schd.get_run_mode() == mode_before + assert schd.get_run_mode().value == mode_before else: # Restarting in a new mode is not: errormsg = f'^This.*{mode_before} mode: Will.*{mode_after} mode.$' From 7c386903856dd96145bfbafc22d1cd7b99a055e5 Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Wed, 23 Oct 2024 11:50:42 +0100 Subject: [PATCH 19/29] use Enum correctly in a test --- tests/integration/test_task_events_mgr.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/integration/test_task_events_mgr.py b/tests/integration/test_task_events_mgr.py index 868d42c75ca..9d6585896f8 100644 --- a/tests/integration/test_task_events_mgr.py +++ b/tests/integration/test_task_events_mgr.py @@ -17,6 +17,7 @@ import logging from typing import Any as Fixture +from cylc.flow.run_modes import RunMode from cylc.flow.task_events_mgr import TaskJobLogsRetrieveContext from cylc.flow.scheduler import Scheduler from cylc.flow.data_store_mgr import ( @@ -154,7 +155,7 @@ async def test__always_insert_task_job( schd.pool.get_tasks(), schd.server.curve_auth, schd.server.client_pub_key_dir, - run_mode='live' + run_mode=RunMode('live') ) # Both tasks are in a waiting state: From 0c5e6398de8e706d2c4aa6c5cf1b9f2dbb431591 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Thu, 31 Oct 2024 09:25:14 +0000 Subject: [PATCH 20/29] Review suggestions Co-authored-by: Ronnie Dutta <61982285+MetRonnie@users.noreply.github.com> --- cylc/flow/prerequisite.py | 2 +- cylc/flow/task_pool.py | 2 +- cylc/flow/task_proxy.py | 4 +++- cylc/flow/task_state.py | 3 ++- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/cylc/flow/prerequisite.py b/cylc/flow/prerequisite.py index 4dbb4001dc7..94221fd7904 100644 --- a/cylc/flow/prerequisite.py +++ b/cylc/flow/prerequisite.py @@ -264,7 +264,7 @@ def _eval_satisfied(self) -> bool: def satisfy_me( self, outputs: Iterable['Tokens'], - mode: "Optional[Union[RunMode, str]]" = RunMode.LIVE + mode: Optional[RunMode] = RunMode.LIVE ) -> 'Set[Tokens]': """Attempt to satisfy me with given outputs. diff --git a/cylc/flow/task_pool.py b/cylc/flow/task_pool.py index fb9b34a28b9..eb622a86ba5 100644 --- a/cylc/flow/task_pool.py +++ b/cylc/flow/task_pool.py @@ -1564,7 +1564,7 @@ def spawn_on_all_outputs( if completed_only: c_task.satisfy_me( [itask.tokens.duplicate(task_sel=message)], - mode=itask.run_mode # type: ignore + mode=itask.run_mode ) self.data_store_mgr.delta_task_prerequisite(c_task) self.add_to_pool(c_task) diff --git a/cylc/flow/task_proxy.py b/cylc/flow/task_proxy.py index 8683ca7c6fd..5e88b19f891 100644 --- a/cylc/flow/task_proxy.py +++ b/cylc/flow/task_proxy.py @@ -556,7 +556,9 @@ def state_reset( return False def satisfy_me( - self, task_messages: 'Iterable[Tokens]', mode: "RunMode" = RunMode.LIVE + self, + task_messages: 'Iterable[Tokens]', + mode: Optional[RunMode] = RunMode.LIVE, ) -> 'Set[Tokens]': """Try to satisfy my prerequisites with given output messages. diff --git a/cylc/flow/task_state.py b/cylc/flow/task_state.py index bea07204cc4..6314719a25a 100644 --- a/cylc/flow/task_state.py +++ b/cylc/flow/task_state.py @@ -19,6 +19,7 @@ from typing import ( TYPE_CHECKING, Dict, + Optional, Iterable, List, Set, @@ -325,7 +326,7 @@ def __call__( def satisfy_me( self, outputs: Iterable['Tokens'], - mode: "RunMode", + mode: "Optional[RunMode]", ) -> Set['Tokens']: """Try to satisfy my prerequisites with given outputs. From 2ec57e96e81b1e006873280f37d6d00ed09e7452 Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Thu, 31 Oct 2024 10:16:06 +0000 Subject: [PATCH 21/29] remove un-needed check for itask.run_mode --- cylc/flow/task_events_mgr.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/cylc/flow/task_events_mgr.py b/cylc/flow/task_events_mgr.py index 59d3ff1bd2b..8ed2a89f25f 100644 --- a/cylc/flow/task_events_mgr.py +++ b/cylc/flow/task_events_mgr.py @@ -1541,8 +1541,7 @@ def _insert_task_job( # And transient tasks, used for setting outputs and spawning children, # do not submit jobs. if ( - not itask.run_mode - or itask.run_mode in JOBLESS_MODES + itask.run_mode in JOBLESS_MODES or forced ): job_conf = {"submit_num": itask.submit_num} From 51f6585dd26818fd6ac4610c344239188e588cce Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Thu, 31 Oct 2024 12:14:46 +0000 Subject: [PATCH 22/29] restore validate --run-mode --- cylc/flow/config.py | 6 +++++ cylc/flow/run_modes/simulation.py | 20 ++++++++++++---- cylc/flow/scheduler_cli.py | 2 +- cylc/flow/scripts/validate.py | 5 ++++ cylc/flow/task_events_mgr.py | 2 +- tests/integration/test_config.py | 29 +++++++++++++++++++++++ tests/integration/test_task_events_mgr.py | 1 + 7 files changed, 59 insertions(+), 6 deletions(-) diff --git a/cylc/flow/config.py b/cylc/flow/config.py index 4196568e783..11fadf831c0 100644 --- a/cylc/flow/config.py +++ b/cylc/flow/config.py @@ -82,6 +82,7 @@ ) from cylc.flow.print_tree import print_tree from cylc.flow.task_qualifiers import ALT_QUALIFIERS +from cylc.flow.run_modes.simulation import configure_sim_mode from cylc.flow.run_modes.skip import skip_mode_validate from cylc.flow.subprocctx import SubFuncContext from cylc.flow.task_events_mgr import ( @@ -513,6 +514,11 @@ def __init__( self.process_runahead_limit() + run_mode = RunMode.get(self.options) + if run_mode in {RunMode.SIMULATION, RunMode.DUMMY}: + for taskdef in self.taskdefs.values(): + configure_sim_mode(taskdef.rtconfig, None, False) + self.configure_workflow_state_polling_tasks() self._check_task_event_handlers() diff --git a/cylc/flow/run_modes/simulation.py b/cylc/flow/run_modes/simulation.py index 047a4526ed7..47f234b0b2b 100644 --- a/cylc/flow/run_modes/simulation.py +++ b/cylc/flow/run_modes/simulation.py @@ -189,7 +189,7 @@ def __init__( self.timeout = started_time + self.simulated_run_length -def configure_sim_mode(rtc, fallback): +def configure_sim_mode(rtc, fallback, warnonly: bool = True): """Adjust task defs for simulation mode. Example: @@ -209,6 +209,13 @@ def configure_sim_mode(rtc, fallback): >>> rtc['platform'] 'localhost' """ + if not warnonly: + parse_fail_cycle_points( + rtc["simulation"]["fail cycle points"], + fallback, + warnonly + ) + return rtc['submission retry delays'] = [1] disable_platforms(rtc) @@ -220,7 +227,8 @@ def configure_sim_mode(rtc, fallback): "fail cycle points" ] = parse_fail_cycle_points( rtc["simulation"]["fail cycle points"], - fallback + fallback, + warnonly ) @@ -265,6 +273,7 @@ def disable_platforms( def parse_fail_cycle_points( fail_at_points_updated: List[str], fail_at_points_config, + warnonly: bool = True ) -> 'Union[None, List[PointBase]]': """Parse `[simulation][fail cycle points]`. @@ -302,8 +311,11 @@ def parse_fail_cycle_points( try: fail_at_points.append(get_point(point_str).standardise()) except PointParsingError as exc: - LOG.warning(exc.args[0]) - return fail_at_points_config + if warnonly: + LOG.warning(exc.args[0]) + return fail_at_points_config + else: + raise exc return fail_at_points diff --git a/cylc/flow/scheduler_cli.py b/cylc/flow/scheduler_cli.py index 739f26892c6..0406a63c441 100644 --- a/cylc/flow/scheduler_cli.py +++ b/cylc/flow/scheduler_cli.py @@ -130,7 +130,7 @@ RUN_MODE = OptionSettings( ["-m", "--mode"], help=( - f"Run mode: {WORKFLOW_RUN_MODES} (default live)." + f"Run mode: {sorted(WORKFLOW_RUN_MODES)} (default live)." " Live mode executes the tasks as defined in the runtime" " section." " Simulation, skip and dummy modes ignore part of tasks'" diff --git a/cylc/flow/scripts/validate.py b/cylc/flow/scripts/validate.py index 443557375cd..47bc5e89547 100755 --- a/cylc/flow/scripts/validate.py +++ b/cylc/flow/scripts/validate.py @@ -51,6 +51,7 @@ ICP_OPTION, ) from cylc.flow.profiler import Profiler +from cylc.flow.scheduler_cli import RUN_MODE from cylc.flow.task_proxy import TaskProxy from cylc.flow.templatevars import get_template_vars from cylc.flow.terminal import cli_function @@ -60,6 +61,8 @@ from cylc.flow.option_parsers import Values +VALIDATE_RUN_MODE = deepcopy(RUN_MODE) +VALIDATE_RUN_MODE.sources = {'validate'} VALIDATE_ICP_OPTION = deepcopy(ICP_OPTION) VALIDATE_ICP_OPTION.sources = {'validate'} VALIDATE_AGAINST_SOURCE_OPTION = deepcopy(AGAINST_SOURCE_OPTION) @@ -95,6 +98,7 @@ dest="profile_mode", sources={'validate'} ), + VALIDATE_RUN_MODE, VALIDATE_ICP_OPTION, ] @@ -149,6 +153,7 @@ async def run( src=True, constraint='workflows', ) + cfg = WorkflowConfig( workflow_id, flow_file, diff --git a/cylc/flow/task_events_mgr.py b/cylc/flow/task_events_mgr.py index 8ed2a89f25f..e60adc88f7e 100644 --- a/cylc/flow/task_events_mgr.py +++ b/cylc/flow/task_events_mgr.py @@ -1541,7 +1541,7 @@ def _insert_task_job( # And transient tasks, used for setting outputs and spawning children, # do not submit jobs. if ( - itask.run_mode in JOBLESS_MODES + itask.run_mode and itask.run_mode.value in JOBLESS_MODES or forced ): job_conf = {"submit_num": itask.submit_num} diff --git a/tests/integration/test_config.py b/tests/integration/test_config.py index e10ecf6b64f..4a1a2dc11c4 100644 --- a/tests/integration/test_config.py +++ b/tests/integration/test_config.py @@ -24,6 +24,7 @@ from cylc.flow.cfgspec.glbl_cfg import glbl_cfg from cylc.flow.cfgspec.globalcfg import GlobalConfig from cylc.flow.exceptions import ( + PointParsingError, ServiceFileError, WorkflowConfigError, XtriggerConfigError, @@ -628,3 +629,31 @@ def test_skip_forbidden_as_output(flow, validate): }) with pytest.raises(WorkflowConfigError, match='message for skip'): validate(wid) + + +def test_validate_workflow_run_mode(flow: Fixture, validate: Fixture, caplog: Fixture): + """Test that Cylc validate will only check simulation mode settings + if validate --mode simulation or dummy. + Discovered in: + https://github.com/cylc/cylc-flow/pull/6213#issuecomment-2225365825 + """ + wid = flow( + { + 'scheduling': {'graph': {'R1': 'mytask'}}, + 'runtime': { + 'mytask': { + 'simulation': {'fail cycle points': 'invalid'}, + } + }, + } + ) + + validate(wid) + + # It fails with run mode simulation: + with pytest.raises(PointParsingError, match='Incompatible value'): + validate(wid, run_mode='simulation') + + # It fails with run mode dummy: + with pytest.raises(PointParsingError, match='Incompatible value'): + validate(wid, run_mode='dummy') diff --git a/tests/integration/test_task_events_mgr.py b/tests/integration/test_task_events_mgr.py index 9d6585896f8..ac1fb2f9344 100644 --- a/tests/integration/test_task_events_mgr.py +++ b/tests/integration/test_task_events_mgr.py @@ -94,6 +94,7 @@ async def test__insert_task_job(flow, one_conf, scheduler, start, validate): itask = schd.pool.get_tasks()[0] itask.state.status = 'running' itask.submit_num += 1 + itask.run_mode = RunMode.SIMULATION # Not run _insert_task_job yet: assert not schd.data_store_mgr.added['jobs'].keys() From 40183c32402d8bad5b894175d6788febd30da317 Mon Sep 17 00:00:00 2001 From: Tim Pillinger Date: Thu, 31 Oct 2024 15:00:03 +0000 Subject: [PATCH 23/29] doc changes --- cylc/flow/scheduler_cli.py | 4 ++-- cylc/flow/task_outputs.py | 8 +++++--- cylc/flow/task_pool.py | 2 +- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/cylc/flow/scheduler_cli.py b/cylc/flow/scheduler_cli.py index 0406a63c441..9579eda011e 100644 --- a/cylc/flow/scheduler_cli.py +++ b/cylc/flow/scheduler_cli.py @@ -133,9 +133,9 @@ f"Run mode: {sorted(WORKFLOW_RUN_MODES)} (default live)." " Live mode executes the tasks as defined in the runtime" " section." - " Simulation, skip and dummy modes ignore part of tasks'" + " Simulation and dummy modes ignore part of tasks'" " runtime configurations. Simulation and dummy modes are" - " designed for testing, and skip mode is for flow control." + " designed for testing." ), metavar="STRING", action='store', dest="run_mode", choices=list(WORKFLOW_RUN_MODES), diff --git a/cylc/flow/task_outputs.py b/cylc/flow/task_outputs.py index a66e5ade638..c6867f0b090 100644 --- a/cylc/flow/task_outputs.py +++ b/cylc/flow/task_outputs.py @@ -37,6 +37,7 @@ if TYPE_CHECKING: from cylc.flow.taskdef import TaskDef + from typing_extensions import Literal # Standard task output strings, used for triggering. @@ -626,7 +627,7 @@ def _is_compvar_complete(self, compvar: str) -> Optional[bool]: def iter_required_messages( self, - exclude=None + exclude: 'Optional[Literal["succeeded", "failed"]]' = None ) -> Iterator[str]: """Yield task messages that are required for this task to be complete. @@ -634,8 +635,9 @@ def iter_required_messages( e.g. "completion = succeeded or failed". Args: - exclude: Exclude one possible required message, allowing - specification of all required outputs if succeeded or failed. + exclude: Don't check wether this output is required for + completion - in skip mode we only want to check either + succeeded or failed, but not both. """ for compvar, is_optional in get_optional_outputs( self._completion_expression, diff --git a/cylc/flow/task_pool.py b/cylc/flow/task_pool.py index eb622a86ba5..4113c44beed 100644 --- a/cylc/flow/task_pool.py +++ b/cylc/flow/task_pool.py @@ -1435,7 +1435,7 @@ def spawn_on_output(self, itask: TaskProxy, output: str) -> None: for t in tasks: t.satisfy_me( [itask.tokens.duplicate(task_sel=output)], - mode=itask.run_mode # type: ignore + mode=itask.run_mode ) self.data_store_mgr.delta_task_prerequisite(t) if not in_pool: From f9e8a830514288a9475d2aaa262846fc86810f15 Mon Sep 17 00:00:00 2001 From: Ronnie Dutta <61982285+MetRonnie@users.noreply.github.com> Date: Tue, 5 Nov 2024 13:50:53 +0000 Subject: [PATCH 24/29] Partially revert changes to `Prerequisite` (#64) --- cylc/flow/prerequisite.py | 25 +++++++------------------ tests/integration/test_task_pool.py | 20 ++++++++------------ 2 files changed, 15 insertions(+), 30 deletions(-) diff --git a/cylc/flow/prerequisite.py b/cylc/flow/prerequisite.py index 94221fd7904..1edf7aa1dac 100644 --- a/cylc/flow/prerequisite.py +++ b/cylc/flow/prerequisite.py @@ -74,7 +74,6 @@ def coerce(tuple_: AnyPrereqMessage) -> 'PrereqMessage': 'satisfied naturally', 'satisfied from database', 'satisfied by skip mode', - 'satisfied by simulation mode', 'force satisfied', False ] @@ -104,12 +103,6 @@ class Prerequisite: SATISFIED_TEMPLATE = 'bool(self._satisfied[("%s", "%s", "%s")])' MESSAGE_TEMPLATE = r'%s/%s %s' - DEP_STATE_SATISFIED: SatisfiedState = 'satisfied naturally' - DEP_STATE_SATISFIED_BY = 'satisfied by {} mode' - DEP_STATE_OVERRIDDEN = 'force satisfied' - DEP_STATE_UNSATISFIED = False - SATISFIED_MODE_RE = re.compile(r'satisfied by .* mode') - def __init__(self, point: 'PointBase'): # The cycle point to which this prerequisite belongs. # cylc.flow.cycling.PointBase @@ -263,8 +256,9 @@ def _eval_satisfied(self) -> bool: return res def satisfy_me( - self, outputs: Iterable['Tokens'], - mode: Optional[RunMode] = RunMode.LIVE + self, + outputs: Iterable['Tokens'], + mode: Optional[RunMode] = None, ) -> 'Set[Tokens]': """Attempt to satisfy me with given outputs. @@ -272,15 +266,10 @@ def satisfy_me( Return outputs that match. """ - satisfied_message: SatisfiedState - - if mode and mode != RunMode.LIVE: - # RunMode.value actually actually restricts the results in - # SatisfiedState, but MyPy does not recognize this. - satisfied_message = self.DEP_STATE_SATISFIED_BY.format( - RunMode(mode).value) # type: ignore - else: - satisfied_message = self.DEP_STATE_SATISFIED + satisfied_message: SatisfiedState = ( + 'satisfied by skip mode' if mode == RunMode.SKIP + else 'satisfied naturally' + ) valid = set() for output in outputs: prereq = PrereqMessage( diff --git a/tests/integration/test_task_pool.py b/tests/integration/test_task_pool.py index 6af4f92813b..483b67cad20 100644 --- a/tests/integration/test_task_pool.py +++ b/tests/integration/test_task_pool.py @@ -645,8 +645,7 @@ def list_tasks(schd): ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): - 'satisfied by simulation mode'}, + {('1', 'a', 'succeeded'): 'satisfied naturally'}, {('1', 'b', 'succeeded'): False}, {('1', 'c', 'succeeded'): False}, ], @@ -674,8 +673,7 @@ def list_tasks(schd): ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): - 'satisfied by simulation mode'}, + {('1', 'a', 'succeeded'): 'satisfied naturally'}, {('1', 'b', 'succeeded'): False}, ], id='removed' @@ -770,8 +768,7 @@ async def test_restart_prereqs( ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): - 'satisfied by simulation mode'}, + {('1', 'a', 'succeeded'): 'satisfied naturally'}, {('1', 'b', 'succeeded'): False}, {('1', 'c', 'succeeded'): False}, ], @@ -799,8 +796,7 @@ async def test_restart_prereqs( ('1', 'z', 'waiting'), ], [ - {('1', 'a', 'succeeded'): - 'satisfied by simulation mode'}, + {('1', 'a', 'succeeded'): 'satisfied naturally'}, {('1', 'b', 'succeeded'): False}, ], id='removed' @@ -899,7 +895,7 @@ async def _test_restart_prereqs_sat(): for prereq in task_c.state.prerequisites for key, satisfied in prereq.items() ) == [ - ('1', 'a', 'succeeded', 'satisfied by simulation mode'), + ('1', 'a', 'succeeded', 'satisfied naturally'), ('1', 'b', 'succeeded', 'satisfied from database') ] @@ -916,7 +912,7 @@ async def _test_restart_prereqs_sat(): for prereq in task_c_prereqs for condition in prereq.conditions ) == [ - ('1/a', True, 'satisfied by simulation mode'), + ('1/a', True, 'satisfied naturally'), ('1/b', True, 'satisfied from database'), ] @@ -2255,7 +2251,7 @@ async def list_data_store(): 'c': 'wall_clock(trigger_time=946688400)', } - + async def test_trigger_unqueued(flow, scheduler, start): """Test triggering an unqueued active task. @@ -2320,7 +2316,7 @@ async def test_expire_dequeue_with_retries(flow, scheduler, start, expire_type): if expire_type == 'clock-expire': conf['scheduling']['special tasks'] = {'clock-expire': 'foo(PT0S)'} - method = lambda schd: schd.pool.clock_expire_tasks() + method = lambda schd: schd.pool.clock_expire_tasks() else: method = lambda schd: schd.pool.set_prereqs_and_outputs( ['2000/foo'], prereqs=[], outputs=['expired'], flow=['1'] From 48c69fbfe5f0392b6ad6e469caf66b54251d0cd4 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Tue, 12 Nov 2024 10:33:40 +0000 Subject: [PATCH 25/29] Apply suggestions from code review Co-authored-by: Ronnie Dutta <61982285+MetRonnie@users.noreply.github.com> --- cylc/flow/prerequisite.py | 9 ++++----- cylc/flow/task_job_mgr.py | 10 +++------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/cylc/flow/prerequisite.py b/cylc/flow/prerequisite.py index 1edf7aa1dac..cd1b17442e2 100644 --- a/cylc/flow/prerequisite.py +++ b/cylc/flow/prerequisite.py @@ -266,10 +266,6 @@ def satisfy_me( Return outputs that match. """ - satisfied_message: SatisfiedState = ( - 'satisfied by skip mode' if mode == RunMode.SKIP - else 'satisfied naturally' - ) valid = set() for output in outputs: prereq = PrereqMessage( @@ -278,7 +274,10 @@ def satisfy_me( if prereq not in self._satisfied: continue valid.add(output) - self[prereq] = satisfied_message + self[prereq] = ( + 'satisfied by skip mode' if mode == RunMode.SKIP + else 'satisfied naturally' + ) return valid def api_dump(self) -> Optional[PbPrerequisite]: diff --git a/cylc/flow/task_job_mgr.py b/cylc/flow/task_job_mgr.py index 3d8db951890..886e8d33b50 100644 --- a/cylc/flow/task_job_mgr.py +++ b/cylc/flow/task_job_mgr.py @@ -1062,15 +1062,11 @@ def submit_nonlive_task_jobs( # Apply task run mode if workflow_run_mode.value in WORKFLOW_ONLY_MODES: # Task run mode cannot override workflow run-mode sim or dummy: - run_mode = workflow_run_mode + run_mode: RunMode = workflow_run_mode else: # If workflow mode is skip or live and task mode is set, # override workflow mode, else use workflow mode. - run_mode = rtconfig.get('run mode', None) - if run_mode: - run_mode = RunMode(run_mode) - else: - run_mode = workflow_run_mode + run_mode = RunMode(rtconfig.get('run mode', workflow_run_mode)) # Store the run mode of the this submission: itask.run_mode = run_mode @@ -1079,7 +1075,7 @@ def submit_nonlive_task_jobs( # tasks to list of tasks to put through live # submission pipeline - We decide based on the output # of the submit method: - submit_func = RunMode(run_mode).get_submit_method() + submit_func = run_mode.get_submit_method() if not submit_func: # Return to nonlive. nonlive_mode = False From 0cc5b9ef0cfd53ff10e6a54ddc209ce6530a458e Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Tue, 12 Nov 2024 11:00:48 +0000 Subject: [PATCH 26/29] fix merge --- cylc/flow/scripts/validate.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/cylc/flow/scripts/validate.py b/cylc/flow/scripts/validate.py index f9d570f9856..edabd6bdab2 100755 --- a/cylc/flow/scripts/validate.py +++ b/cylc/flow/scripts/validate.py @@ -57,9 +57,7 @@ from cylc.flow.templatevars import get_template_vars from cylc.flow.terminal import cli_function from cylc.flow.run_modes import RunMode -from cylc.flow.scheduler_cli import RUN_MODE from cylc.flow.workflow_files import get_workflow_run_dir -from cylc.flow.workflow_status import RunMode if TYPE_CHECKING: from cylc.flow.option_parsers import Values From 3f96ce702073bc81b7e169514a42503c7396f264 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Tue, 12 Nov 2024 11:08:50 +0000 Subject: [PATCH 27/29] remove now not needed taskdef.runmode infor --- cylc/flow/config.py | 1 - cylc/flow/taskdef.py | 5 ++--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/cylc/flow/config.py b/cylc/flow/config.py index 11fadf831c0..dcc45278dd7 100644 --- a/cylc/flow/config.py +++ b/cylc/flow/config.py @@ -2498,7 +2498,6 @@ def _get_taskdef(self, name: str) -> TaskDef: taskd = TaskDef( name, rtcfg, - RunMode.get(self.options), self.start_point, self.initial_point) diff --git a/cylc/flow/taskdef.py b/cylc/flow/taskdef.py index ec7ea0bdade..9dbecf1aa16 100644 --- a/cylc/flow/taskdef.py +++ b/cylc/flow/taskdef.py @@ -151,7 +151,7 @@ class TaskDef: # Memory optimization - constrain possible attributes to this list. __slots__ = [ - "run_mode", "rtconfig", "start_point", "initial_point", "sequences", + "rtconfig", "start_point", "initial_point", "sequences", "used_in_offset_trigger", "max_future_prereq_offset", "sequential", "is_coldstart", "workflow_polling_cfg", "expiration_offset", @@ -162,11 +162,10 @@ class TaskDef: # Store the elapsed times for a maximum of 10 cycles MAX_LEN_ELAPSED_TIMES = 10 - def __init__(self, name, rtcfg, run_mode, start_point, initial_point): + def __init__(self, name, rtcfg, start_point, initial_point): if not TaskID.is_valid_name(name): raise TaskDefError("Illegal task name: %s" % name) - self.run_mode = run_mode self.rtconfig = rtcfg self.start_point = start_point self.initial_point = initial_point From 995ab27a9e96154f1028e7facec77029b6c99b76 Mon Sep 17 00:00:00 2001 From: Tim Pillinger <26465611+wxtim@users.noreply.github.com> Date: Tue, 12 Nov 2024 11:15:48 +0000 Subject: [PATCH 28/29] Response to review --- cylc/flow/run_modes/simulation.py | 2 -- cylc/flow/scheduler.py | 4 ++-- cylc/flow/task_job_mgr.py | 11 +++-------- cylc/flow/task_pool.py | 6 +++--- cylc/flow/taskdef.py | 2 +- tests/unit/run_modes/test_skip_units.py | 17 +++++++++++------ tests/unit/test_task_state.py | 8 ++++---- tests/unit/test_xtrigger_mgr.py | 2 -- 8 files changed, 24 insertions(+), 28 deletions(-) diff --git a/cylc/flow/run_modes/simulation.py b/cylc/flow/run_modes/simulation.py index 47f234b0b2b..900a2c1fc4f 100644 --- a/cylc/flow/run_modes/simulation.py +++ b/cylc/flow/run_modes/simulation.py @@ -76,8 +76,6 @@ def submit_task_job( 'name': RunMode.SIMULATION.value, 'install target': 'localhost', 'hosts': ['localhost'], - 'disable task event handlers': - rtconfig['simulation']['disable task event handlers'], 'submission retry delays': [], 'execution retry delays': [] } diff --git a/cylc/flow/scheduler.py b/cylc/flow/scheduler.py index 7f924674c39..3632948fae3 100644 --- a/cylc/flow/scheduler.py +++ b/cylc/flow/scheduler.py @@ -114,7 +114,7 @@ from cylc.flow.workflow_db_mgr import WorkflowDatabaseManager from cylc.flow.workflow_events import WorkflowEventHandler from cylc.flow.workflow_status import StopMode, AutoRestartMode -from cylc.flow.run_modes import RunMode, WORKFLOW_ONLY_MODES +from cylc.flow.run_modes import RunMode from cylc.flow.taskdef import TaskDef from cylc.flow.task_events_mgr import TaskEventsManager from cylc.flow.task_job_mgr import TaskJobManager @@ -1264,7 +1264,7 @@ def run_event_handlers(self, event, reason=""): Run workflow events only in live mode or skip mode. """ - if self.get_run_mode().value in WORKFLOW_ONLY_MODES: + if self.get_run_mode() in {RunMode.SIMULATION, RunMode.DUMMY}: return self.workflow_event_handler.handle(self, event, str(reason)) diff --git a/cylc/flow/task_job_mgr.py b/cylc/flow/task_job_mgr.py index 1e5eff31161..ec8c84690dc 100644 --- a/cylc/flow/task_job_mgr.py +++ b/cylc/flow/task_job_mgr.py @@ -1074,14 +1074,9 @@ def submit_nonlive_task_jobs( # submission pipeline - We decide based on the output # of the submit method: submit_func = run_mode.get_submit_method() - if not submit_func: - # Return to nonlive. - nonlive_mode = False - else: - nonlive_mode = submit_func( - self, itask, rtconfig, workflow, now) - - if nonlive_mode: + if submit_func and submit_func( + self, itask, rtconfig, workflow, now + ): self.workflow_db_mgr.put_insert_task_states( itask, { diff --git a/cylc/flow/task_pool.py b/cylc/flow/task_pool.py index 0aab292b08a..8795e2814de 100644 --- a/cylc/flow/task_pool.py +++ b/cylc/flow/task_pool.py @@ -1993,12 +1993,12 @@ def _set_outputs_itask( # Check for broadcasts to task: bc_mgr = self.task_events_mgr.broadcast_mgr rtconfig = bc_mgr.get_updated_rtconfig(itask) - outputs.remove(RunMode.SKIP.value) skips = get_skip_mode_outputs(itask, rtconfig) itask.run_mode = RunMode.SKIP outputs = self._standardise_outputs( - itask.point, itask.tdef, outputs) - outputs = list(set(outputs + skips)) + itask.point, itask.tdef, outputs + ) + outputs = list(set(outputs + skips) - {RunMode.SKIP.value}) for output in sorted(outputs, key=itask.state.outputs.output_sort_key): if itask.state.outputs.is_message_complete(output): diff --git a/cylc/flow/taskdef.py b/cylc/flow/taskdef.py index 9dbecf1aa16..34461ca0d6f 100644 --- a/cylc/flow/taskdef.py +++ b/cylc/flow/taskdef.py @@ -409,7 +409,7 @@ def is_parentless(self, point): def __repr__(self) -> str: """ >>> TaskDef( - ... name='oliver', rtcfg={}, run_mode='fake', start_point='1', + ... name='oliver', rtcfg={}, start_point='1', ... initial_point='1' ... ) diff --git a/tests/unit/run_modes/test_skip_units.py b/tests/unit/run_modes/test_skip_units.py index 9c580117739..0b5316662ad 100644 --- a/tests/unit/run_modes/test_skip_units.py +++ b/tests/unit/run_modes/test_skip_units.py @@ -15,6 +15,7 @@ # along with this program. If not, see . """Unit tests for utilities supporting skip modes """ +import logging import pytest from pytest import param, raises from types import SimpleNamespace @@ -105,7 +106,7 @@ def test_process_outputs(outputs, required, expect): assert process_outputs(itask, rtconf) == ['submitted', 'started'] + expect -def test_skip_mode_validate(monkeypatch, caplog): +def test_skip_mode_validate(caplog, log_filter): """It warns us if we've set a task config to nonlive mode. (And not otherwise) @@ -128,8 +129,12 @@ def test_skip_mode_validate(monkeypatch, caplog): skip_mode_validate(taskdefs) - message = caplog.messages[0] - - assert 'skip mode:\n * skip_task' in message - assert ' live mode' not in message # Avoid matching "non-live mode" - assert 'workflow mode' not in message + assert len(caplog.records) == 1 + assert log_filter( + level=logging.WARNING, + exact_match=( + "The following tasks are set to run in skip mode:\n" + " * skip_task" + ), + log=caplog + ) diff --git a/tests/unit/test_task_state.py b/tests/unit/test_task_state.py index 7fc1feadcc6..7350a9aed74 100644 --- a/tests/unit/test_task_state.py +++ b/tests/unit/test_task_state.py @@ -41,7 +41,7 @@ ) def test_state_comparison(state, is_held): """Test the __call__ method.""" - tdef = TaskDef('foo', {}, 'live', '123', '123') + tdef = TaskDef('foo', {}, '123', '123') tstate = TaskState(tdef, '123', state, is_held) assert tstate(state, is_held=is_held) @@ -72,7 +72,7 @@ def test_state_comparison(state, is_held): ) def test_reset(state, is_held, should_reset): """Test that tasks do or don't have their state changed.""" - tdef = TaskDef('foo', {}, 'live', '123', '123') + tdef = TaskDef('foo', {}, '123', '123') # create task state: # * status: waiting # * is_held: true @@ -96,7 +96,7 @@ def test_task_prereq_duplicates(set_cycling_type): dep = Dependency([trig], [trig], False) - tdef = TaskDef('foo', {}, 'live', IntegerPoint("1"), IntegerPoint("1")) + tdef = TaskDef('foo', {}, IntegerPoint("1"), IntegerPoint("1")) tdef.add_dependency(dep, seq1) tdef.add_dependency(dep, seq2) # duplicate! @@ -110,7 +110,7 @@ def test_task_prereq_duplicates(set_cycling_type): def test_task_state_order(): """Test is_gt and is_gte methods.""" - tdef = TaskDef('foo', {}, 'live', IntegerPoint("1"), IntegerPoint("1")) + tdef = TaskDef('foo', {}, IntegerPoint("1"), IntegerPoint("1")) tstate = TaskState(tdef, IntegerPoint("1"), TASK_STATUS_SUBMITTED, False) assert tstate.is_gt(TASK_STATUS_WAITING) diff --git a/tests/unit/test_xtrigger_mgr.py b/tests/unit/test_xtrigger_mgr.py index 276fd354a95..2ec207cf25d 100644 --- a/tests/unit/test_xtrigger_mgr.py +++ b/tests/unit/test_xtrigger_mgr.py @@ -178,7 +178,6 @@ def test_housekeeping_with_xtrigger_satisfied(xtrigger_mgr): tdef = TaskDef( name="foo", rtcfg={'completion': None}, - run_mode="live", start_point=1, initial_point=1, ) @@ -232,7 +231,6 @@ def test__call_xtriggers_async(xtrigger_mgr): tdef = TaskDef( name="foo", rtcfg={'completion': None}, - run_mode="live", start_point=1, initial_point=1 ) From 60a75b381d06ce5dd8ac19bb048b7208a66da900 Mon Sep 17 00:00:00 2001 From: Ronnie Dutta <61982285+MetRonnie@users.noreply.github.com> Date: Thu, 14 Nov 2024 15:26:06 +0000 Subject: [PATCH 29/29] Clear up terminology (#66) * f * f * Clear up terminology --------- Co-authored-by: Tim Pillinger <26465611+wxtim@users.noreply.github.com> --- cylc/flow/run_modes/skip.py | 7 ++--- cylc/flow/task_outputs.py | 28 ++++++++++++------- tests/unit/run_modes/test_skip_units.py | 4 +-- tests/unit/test_task_outputs.py | 36 ++++++++++++++++--------- 4 files changed, 48 insertions(+), 27 deletions(-) diff --git a/cylc/flow/run_modes/skip.py b/cylc/flow/run_modes/skip.py index 7e3f4310ee5..49736883911 100644 --- a/cylc/flow/run_modes/skip.py +++ b/cylc/flow/run_modes/skip.py @@ -109,9 +109,10 @@ def process_outputs(itask: 'TaskProxy', rtconfig: Dict) -> List[str]: # which we hold back, to prevent warnings about pre-requisites being # unmet being shown because a "finished" output happens to come first. for message in itask.state.outputs.iter_required_messages( - exclude=( - TASK_OUTPUT_SUCCEEDED if TASK_OUTPUT_FAILED - in conf_outputs else TASK_OUTPUT_FAILED + disable=( + TASK_OUTPUT_SUCCEEDED + if TASK_OUTPUT_FAILED in conf_outputs + else TASK_OUTPUT_FAILED ) ): trigger = itask.state.outputs._message_to_trigger[message] diff --git a/cylc/flow/task_outputs.py b/cylc/flow/task_outputs.py index c6867f0b090..8548ab405e4 100644 --- a/cylc/flow/task_outputs.py +++ b/cylc/flow/task_outputs.py @@ -195,7 +195,7 @@ def get_completion_expression(tdef: 'TaskDef') -> str: def get_optional_outputs( expression: str, outputs: Iterable[str], - force_optional: "Optional[str]" = None + disable: "Optional[str]" = None ) -> Dict[str, Optional[bool]]: """Determine which outputs in an expression are optional. @@ -204,8 +204,9 @@ def get_optional_outputs( The completion expression. outputs: All outputs that apply to this task. - force_optional: - Don't have the CompletionEvaluator consider this output. + disable: + Disable this output and any others it is joined with by `and` + (which will mean they are necessarily optional). Returns: dict: compvar: is_optional @@ -236,7 +237,14 @@ def get_optional_outputs( >>> sorted(get_optional_outputs( ... '(succeeded and towel) or (failed and bugblatter)', ... {'succeeded', 'towel', 'failed', 'bugblatter'}, - ... 'failed' + ... ).items()) + [('bugblatter', True), ('failed', True), + ('succeeded', True), ('towel', True)] + + >>> sorted(get_optional_outputs( + ... '(succeeded and towel) or (failed and bugblatter)', + ... {'succeeded', 'towel', 'failed', 'bugblatter'}, + ... disable='failed' ... ).items()) [('bugblatter', True), ('failed', True), ('succeeded', False), ('towel', False)] @@ -249,7 +257,7 @@ def get_optional_outputs( all_compvars = {trigger_to_completion_variable(out) for out in outputs} # Allows exclusion of additional outcomes: - extra_excludes = {force_optional: False} if force_optional else {} + extra_excludes = {disable: False} if disable else {} return { # output: is_optional # the outputs that are used in the expression @@ -627,7 +635,7 @@ def _is_compvar_complete(self, compvar: str) -> Optional[bool]: def iter_required_messages( self, - exclude: 'Optional[Literal["succeeded", "failed"]]' = None + disable: 'Optional[Literal["succeeded", "failed"]]' = None ) -> Iterator[str]: """Yield task messages that are required for this task to be complete. @@ -635,14 +643,14 @@ def iter_required_messages( e.g. "completion = succeeded or failed". Args: - exclude: Don't check wether this output is required for - completion - in skip mode we only want to check either - succeeded or failed, but not both. + disable: Consider this output and any others it is joined with by + `and` to not exist. In skip mode we only want to check either + succeeded or failed, but not both. """ for compvar, is_optional in get_optional_outputs( self._completion_expression, set(self._message_to_compvar.values()), - force_optional=exclude + disable=disable ).items(): if is_optional is False: for message, _compvar in self._message_to_compvar.items(): diff --git a/tests/unit/run_modes/test_skip_units.py b/tests/unit/run_modes/test_skip_units.py index 0b5316662ad..bf5ffa7be60 100644 --- a/tests/unit/run_modes/test_skip_units.py +++ b/tests/unit/run_modes/test_skip_units.py @@ -99,7 +99,7 @@ def test_process_outputs(outputs, required, expect): rtconfig=rtconf), state=SimpleNamespace( outputs=SimpleNamespace( - iter_required_messages=lambda exclude: iter(required), + iter_required_messages=lambda *a, **k: iter(required), _message_to_trigger={v: v for v in required} ))) @@ -115,7 +115,7 @@ def test_skip_mode_validate(caplog, log_filter): https://github.com/cylc/cylc-admin/blob/master/docs/proposal-skip-mode.md | If the run mode is set to simulation or skip in the workflow - | configuration, then cylc validate and cylc lint should produce + | configuration, then cylc validate and cylc lint should produce | warning (similar to development features in other languages / systems). """ taskdefs = { diff --git a/tests/unit/test_task_outputs.py b/tests/unit/test_task_outputs.py index 28abb27dbb1..2306e412342 100644 --- a/tests/unit/test_task_outputs.py +++ b/tests/unit/test_task_outputs.py @@ -288,6 +288,8 @@ def test_iter_required_outputs(): 'y', } + +def test_iter_required_outputs__disable(): # Get all outputs required for success path (excluding failure, what # is still required): outputs = TaskOutputs( @@ -298,21 +300,31 @@ def test_iter_required_outputs(): ) ) - # Excluding succeeded leaves us with failure required outputs: - assert set(outputs.iter_required_messages( - exclude=TASK_OUTPUT_SUCCEEDED)) == { - TASK_OUTPUT_FAILED, 'x', 'y',} + assert set(outputs.iter_required_messages()) == set() - # Excluding failed leaves us with succeeded required outputs: - assert set(outputs.iter_required_messages( - exclude=TASK_OUTPUT_FAILED)) == { - TASK_OUTPUT_SUCCEEDED, 'a', 'b',} + # Disabling succeeded leaves us with failure required outputs: + assert set( + outputs.iter_required_messages(disable=TASK_OUTPUT_SUCCEEDED) + ) == { + TASK_OUTPUT_FAILED, + 'x', + 'y', + } - # Excluding an abitrary output leaves us with required outputs + # Disabling failed leaves us with succeeded required outputs: + assert set(outputs.iter_required_messages(disable=TASK_OUTPUT_FAILED)) == { + TASK_OUTPUT_SUCCEEDED, + 'a', + 'b', + } + + # Disabling an abitrary output leaves us with required outputs # from another branch: - assert set(outputs.iter_required_messages( - exclude='a')) == { - TASK_OUTPUT_FAILED, 'x', 'y',} + assert set(outputs.iter_required_messages(disable='a')) == { + TASK_OUTPUT_FAILED, + 'x', + 'y', + } def test_get_trigger_completion_variable_maps():