diff --git a/magma/magma_constants.py b/magma/magma_constants.py new file mode 100644 index 0000000..d178223 --- /dev/null +++ b/magma/magma_constants.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 + +################################################################# +# Vars +################################################################# +TITLE = "title" + +# MetaWorkflow Handler attributes +PROJECT = "project" +INSTITUTION = "institution" +UUID = "uuid" +META_WORKFLOWS = "meta_workflows" +ORDERED_META_WORKFLOWS = "ordered_meta_workflows" +META_WORKFLOW = "meta_workflow" +NAME = "name" +DEPENDENCIES = "dependencies" +ITEMS_FOR_CREATION_PROP_TRACE = "items_for_creation_property_trace" +ITEMS_FOR_CREATION_UUID = "items_for_creation_uuid" + +# MetaWorkflow Run Handler attributes +COST = "cost" +STATUS = "status" +FINAL_STATUS = "final_status" +ASSOCIATED_META_WORKFLOW_HANDLER = "meta_workflow_handler" +ASSOCIATED_ITEM = "associated_item" +META_WORKFLOW_RUN = "meta_workflow_run" +META_WORKFLOW_RUNS = "meta_workflow_runs" +ITEMS_FOR_CREATION = "items_for_creation" +ERROR = "error" +# statuses +PENDING = "pending" +RUNNING = "running" +COMPLETED = "completed" +FAILED = "failed" +STOPPED = "stopped" + +INACTIVE = "inactive" +QC_FAIL = "quality metric failed" + + +#TODO: the following is here in case dup flag is added in the future +#TODO: add back in +# MWFR_TO_HANDLER_STEP_STATUS_DICT = { +# "pending": "pending", +# "running": "running", +# "completed": "completed", +# "failed": "failed", +# "inactive": "pending", +# "stopped": "stopped", +# "quality metric failed": "failed" +# } diff --git a/magma/metawfl_handler.py b/magma/metawfl_handler.py new file mode 100644 index 0000000..31eec4a --- /dev/null +++ b/magma/metawfl_handler.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python3 + +################################################ +# Libraries +################################################ +from copy import deepcopy + +from magma.validated_dictionary import ValidatedDictionary +from magma.topological_sort import TopologicalSortHandler +from magma.magma_constants import * +from dcicutils.misc_utils import CycleError + +################################################ +# Custom Exception classes +################################################ +class MetaWorkflowStepCycleError(CycleError): + """Custom exception for cycle error tracking.""" + pass + +class MetaWorkflowStepDuplicateError(ValueError): + """Custom ValueError when MetaWorkflows don't have unique name attributes.""" + pass + +class MetaWorkflowStepSelfDependencyError(ValueError): + """Custom ValueError when MetaWorkflow Step has a dependency on itself.""" + pass + +################################################ +# MetaWorkflowStep +################################################ +class MetaWorkflowStep(ValidatedDictionary): + """ + Class to represent a MetaWorkflow, + as a step within a MetaWorkflow Handler object + """ + + def __init__(self, input_dict): + """ + Constructor method, initialize object and attributes. + + :param input_dict: a dictionary of MetaWorkflow step metadata + :type input_dict: dict + """ + super().__init__(input_dict) + + # Validate presence of basic attributes of this MetaWorkflow step + self._validate_basic_attributes(META_WORKFLOW, NAME) + + self._check_self_dependency() + + def _validate_basic_attributes(self, *list_of_attributes): + """ + Validation of the input dictionary for the MetaWorkflow step. + Checks that necessary MetaWorkflow attributes are present for this MetaWorkflow step. + + :param list_of_attributes: attributes that are checked + :type list_of_attributes: str(s) + :return: None, if all specified attributes are present + :raises ValueError: if this object doesn't have a specified attribute + :raises AttributeError: if not one (and only one) of items_for_creation attributes is present + """ + super()._validate_basic_attributes(*list_of_attributes) + + ## Check that one (and only one) of the following attributes is defined on this step: + ## ITEMS_FOR_CREATION_UUID or ITEMS_FOR_CREATION_PROP_TRACE + try: + # set None for [default] arg to not throw AttributeError + #TODO: handle this within ff instead? It is CGAP portal-specific + if not getattr(self, ITEMS_FOR_CREATION_UUID, None): + getattr(self, ITEMS_FOR_CREATION_PROP_TRACE) + except AttributeError as e: + raise AttributeError("Object validation error, {0}\n" + .format(e.args[0])) + + # for items for creation, this object can only have + # either the UUID or property trace, but not both + if hasattr(self, ITEMS_FOR_CREATION_PROP_TRACE) and hasattr(self, ITEMS_FOR_CREATION_UUID): + raise AttributeError("Object validation error, 'MetaWorkflowStep' object cannot have both of the following attributes: 'items_for_creation_property_trace' and 'items_for_creation_uuid'") + + def _check_self_dependency(self): + """ + Check that this MetaWorkflow Step object doesn't have a self-dependency. + + :return: None, if no self-dependencies present + :raises MetaWorkflowStepSelfDependencyError: if there is a self-dependency + """ + if hasattr(self, DEPENDENCIES): + dependencies = getattr(self, DEPENDENCIES) + for dependency in dependencies: + if dependency == getattr(self, NAME): + raise MetaWorkflowStepSelfDependencyError(f'"{dependency}" has a self dependency.') + + +################################################ +# MetaWorkflowHandler +################################################ +class MetaWorkflowHandler(ValidatedDictionary): + """ + Class representing a MetaWorkflow Handler object, + including a list of MetaWorkflows with specified dependencies & other metadata + """ + + def __init__(self, input_dict): + """ + Constructor method, initialize object and attributes. + + :param input_dict: MetaWorkflow Handler dict, defined by json file from CGAP portal + :type input_dict: dict + """ + ### Basic attributes ### + super().__init__(input_dict) + + super()._validate_basic_attributes(UUID) + + ### Calculated attributes ### + # set meta_workflows attribute + # Using meta_workflows array of dicts from CGAP MetaWorkflow Handler + # create dict of the form {meta_workflow_name: MetaWorkflow Step object} + self._set_meta_workflows_dict() + # TODO: NOTE: nowhere in magma is there a check that meta_workflows + # is an empty list. I am putting the burden of that on the user + # would y'all like me to add a check for an empty list? or NoneType? + # right now I only catch instances where meta_workflows doesn't exist, + # and I create an empty dict + + # Create ordered MetaWorkflows name list based on dependencies + # This ordered list is what's used to create the array of MetaWorkflow Runs in Run handler + setattr(self, ORDERED_META_WORKFLOWS, self._create_ordered_meta_workflows_list()) + + def _set_meta_workflows_dict(self): + """ + Checks for meta_workflows attribute (an array of MetaWorkflows and their metadata) from CGAP portal. + + If nonexistent, set handler's meta_workflows attribute as an empty dictionary + If present, copy that list temporarily and redefine meta_workflows attribute + as a dictionary of the form {meta_workflow_name: MetaWorkflow Step object,....} + checking for duplicate steps in the process (i.e. non-unique MetaWorkflow names) + + :return: None, if all MetaWorkflowSteps are created successfully + :raises MetaWorkflowStepDuplicateError: if there are duplicate MetaWorkflows, by name + """ + if not hasattr(self, META_WORKFLOWS): + # if not present, set attribute as empty dictionary + setattr(self, META_WORKFLOWS, {}) + else: + orig_meta_workflow_list_copy = deepcopy(getattr(self, META_WORKFLOWS)) + + temp_meta_workflow_step_dict = {} + + for meta_workflow in orig_meta_workflow_list_copy: + # create MetaWorkflowStep object for this MetaWorkflow + meta_workflow_step = MetaWorkflowStep(meta_workflow) + + # then add to the meta_workflows dictionary + # of the form {meta_workflow["name"]: MetaWorkflowStep(meta_workflow)} + if temp_meta_workflow_step_dict.setdefault(meta_workflow["name"], meta_workflow_step) != meta_workflow_step: + raise MetaWorkflowStepDuplicateError(f'"{meta_workflow["name"]}" is a duplicate MetaWorkflow, \ + all MetaWorkflow names must be unique.') + + # redefine the "meta_workflows" attribute to this generated dictionary of MetaWorkflowStep objects + setattr(self, META_WORKFLOWS, temp_meta_workflow_step_dict) + + def _create_ordered_meta_workflows_list(self): + """ + Using dictionary of MetaWorkflow name and their corresponding MetaWorkflowStep objects, + generate ordered list of MetaWorkflows, by name. + Uses TopologicalSorter to order these steps based on their defined dependencies. + + :return: list of valid topological sorting of MetaWorkflows (by name) + :rtype: list[str] + :raises MetaWorkflowStepCycleError: if there are cyclic dependencies among MetaWorkflow steps + i.e. no valid topological sorting of steps + """ + meta_workflows_dict = getattr(self, META_WORKFLOWS) + + try: + # create "graph" that will be passed into the topological sorter + sorter = TopologicalSortHandler(meta_workflows_dict) + # now topologically sort the steps + return sorter.sorted_graph_list() + except CycleError: + raise MetaWorkflowStepCycleError() diff --git a/magma/metawflrun_handler.py b/magma/metawflrun_handler.py new file mode 100644 index 0000000..061069e --- /dev/null +++ b/magma/metawflrun_handler.py @@ -0,0 +1,256 @@ +#!/usr/bin/env python3 + +################################################ +# Libraries +################################################ +from magma.validated_dictionary import ValidatedDictionary +from magma.magma_constants import * + +################################################ +# MetaWorkflowRunStep +################################################ +class MetaWorkflowRunStep(ValidatedDictionary): + """ + Class to represent a MetaWorkflow Run object, + as a step within a MetaWorkflow Run Handler object. + Assumption that this is based on ordered_meta_workflows (name) list + from a MetaWorkflow Handler. + """ + + def __init__(self, input_dict): + """ + Constructor method, initialize object and attributes. + + :param input_dict: dictionary representing a MetaWorkflow step (object) and accompanying info within handler + :type input_dict: dict + """ + super().__init__(input_dict) + + # for automatically setting initial status to "pending", unless explicitly defined not to + if not hasattr(self, STATUS): + setattr(self, STATUS, PENDING) + + # Validate presence of basic attributes of this MetaWorkflow step + # TODO: make items_for_creation a required attr? + # !!!AND!!! meta_workflow_run --> not necessarily, not defined until creation of mwfr + self._validate_basic_attributes(NAME, DEPENDENCIES) + +################################################ +# MetaWorkflowRunHandler +################################################ +class MetaWorkflowRunHandler(ValidatedDictionary): + """ + Class representing a MetaWorkflowRun Handler object, + a list of MetaWorkflowsRuns with specified dependencies, + and their status. + """ + + def __init__(self, input_dict): + """ + Constructor method, initialize object and attributes. + + :param input_dict: MetaWorkflow Handler Run object + :type input_dict: dict + """ + + ### Basic attributes ### + + super().__init__(input_dict) + + self._validate_basic_attributes(UUID, ASSOCIATED_META_WORKFLOW_HANDLER, META_WORKFLOW_RUNS) + + ### Calculated attributes ### + + # by nature of how a MetaWorkflowRun Handler is created from the MetaWorkflow Handler, + # the array "meta_workflow_runs" will already be in some valid topologically sorted order + # here, though, we create a dictionary of the form {mwfr_name: MetaWorkflowRunStep_object,...} + # for faster lookup and updating of steps + self.meta_workflow_run_steps_dict = self._set_meta_workflow_runs_dict() + + # initial final_status attribute upon creation should be "pending" + # setattr(self, FINAL_STATUS, PENDING) + self.update_final_status() + + + def _set_meta_workflow_runs_dict(self): + """ + Using meta_workflow_runs attribute (an array of MetaWorkflow Runs and their metadata), + create a dictionary of the form {meta_workflow_run_name_a: meta_workflow_run_step_obj_a, ...}, + allowing for quicker lookup and updating of MetaWorkflowRunStep objects. + + :return: dictionary containing {MetaWorkflowRun name: MetaWorkflowRunStep object} key-value pairs + """ + meta_workflow_run_step_dict = {} + for meta_workflow_run in self.meta_workflow_runs: + meta_workflow_run_step_object = MetaWorkflowRunStep(meta_workflow_run) + step_name = meta_workflow_run[NAME] + meta_workflow_run_step_dict[step_name] = meta_workflow_run_step_object + return meta_workflow_run_step_dict + + + def update_final_status(self): + """ + Update final_status of handler based on combined statuses of + all MetaWorkflowRunStep objects. + + If all steps are pending, final_status = pending. + If a step is running and none others have failed or stopped, final_status = running. + If all steps are completed, final_status = completed. + If a step has failed, final_status = failed. + If a step has been stopped, final_status = stopped. + + :return: final_status of the MetaWorkflow Run Handler + :rtype: str + """ + setattr(self, FINAL_STATUS, PENDING) + + all_steps_completed = True + all_steps_pending = True + + for meta_workflow_run_step in self.meta_workflow_run_steps_dict.values(): + current_step_status = getattr(meta_workflow_run_step, STATUS) + + # checking if all steps are "completed" or "pending" and toggling corresponding flags + if current_step_status != COMPLETED: + all_steps_completed = False + if current_step_status != PENDING: + all_steps_pending = False + + # if step neither "completed" or "pending", update final_status accordingly + if current_step_status == RUNNING: + setattr(self, FINAL_STATUS, RUNNING) + elif current_step_status == FAILED: + setattr(self, FINAL_STATUS, FAILED) + break + elif current_step_status == STOPPED: + setattr(self, FINAL_STATUS, STOPPED) + break + + # if all the steps were successfully completed + if all_steps_completed: + setattr(self, FINAL_STATUS, COMPLETED) + + # if all the steps were pending + if all_steps_pending: + setattr(self, FINAL_STATUS, PENDING) + + return getattr(self, FINAL_STATUS) + + + def _retrieve_meta_workflow_run_step_obj_by_name(self, meta_workflow_run_name): + """ + Given a MetaWorkflow Run name, + retrieve its corresponding MetaWorkflowRunStep object. + + :param meta_workflow_run_name: name of MetaWorkflow Run to be retrieved + :type meta_workflow_run_name: str + :return: MetaWorkflowRunStep object corresponding to the given name + :raises: KeyError if the MetaWorkflow Run name is invalid + """ + try: + step_obj = self.meta_workflow_run_steps_dict[meta_workflow_run_name] + return step_obj + except KeyError as key_err: + raise KeyError("{0} is not a valid MetaWorkflowRun Step name.\n" + .format(key_err.args[0])) + #TODO: sharding of mwfrs.... + + + def get_meta_workflow_run_step_attr(self, meta_workflow_run_name, attribute_to_fetch): + """ + Given a MetaWorkflow Run name and an attribute to fetch, + retrieve this attribute from the corresponding MetaWorkflowRunStep object, + or None if the attribute to fetch doesn't exist on the MetaWorkflowRunStep object. + + :param meta_workflow_run_name: name of MetaWorkflow Run to be accessed + :type meta_workflow_run_name: str + :return: attribute_to_fetch's value from the MetaWorkflowRunStep object specified + :rtype: varied, or None if not an existing attribute on the given Run Step + :raises: KeyError if the MetaWorkflow Run name is invalid + """ + step_obj = self._retrieve_meta_workflow_run_step_obj_by_name(meta_workflow_run_name) + # Return the attribute_to_fetch + return getattr(step_obj, attribute_to_fetch, None) + + + def update_meta_workflow_run_step_obj(self, meta_workflow_run_name, attribute, value): + """ + Given a MetaWorkflow Run name, an attribute to update, and value to update it to, + retrieve its corresponding MetaWorkflowRunStep object by name + and redefine the given attribute with the provided new value. + + :param meta_workflow_run_name: name of MetaWorkflow Run to be retrieved and updated + :type meta_workflow_run_name: str + :param attribute: attribute to update + :type attribute: str + :param value: new value of the updated attribute + :type value: varies + :raises: KeyError if the MetaWorkflow Run name is invalid + """ + # Retrieve the specified step object + step_obj = self._retrieve_meta_workflow_run_step_obj_by_name(meta_workflow_run_name) + # Reset the given attribute + setattr(step_obj, attribute, value) + + + def pending_steps(self): + """ + Returns a list of names of MetaWorkflowRunStep objects whose status is "pending". + Returns empty list if none are pending. + + :returns: list of pending steps, by name + :rtype: list[str] + """ + pending_steps_list = [] + + for meta_workflow_run_step in self.meta_workflow_runs: + step_name = meta_workflow_run_step[NAME] + if self.get_meta_workflow_run_step_attr(step_name, STATUS) == PENDING: + pending_steps_list.append(step_name) + + return pending_steps_list + + def running_steps(self): + """ + Returns a list of names of MetaWorkflowRunStep objects whose status is "running". + Returns empty list if none are running. + + :returns: list of running steps, by name + :rtype: list[str] + """ + running_steps_list = [] + for meta_workflow_run in self.meta_workflow_runs: + associated_meta_workflow_name = meta_workflow_run[NAME] + if self.get_meta_workflow_run_step_attr(associated_meta_workflow_name, STATUS) == RUNNING: + running_steps_list.append(associated_meta_workflow_name) + + return running_steps_list + + # TODO: move to ff because portal specific + def update_meta_workflow_runs_array(self): + """ + Following any updates to MetaWorkflowRunStep objects in meta_workflow_run_steps_dict, + this method is called in order to update the original meta_workflow_runs array of dicts. + Possible attributes that are updated are meta_workflow_run (a linkTo), + status, and error. + + This allows for future PATCHing of a meta_workflow_runs array on the CGAP portal, + by providing the updated meta_workflow_runs. + + :returns: updated meta_workflow_runs array + """ + #TODO: make sure this works with sharding + for meta_workflow_run_dict in self.meta_workflow_runs: + meta_workflow_run_name = meta_workflow_run_dict[NAME] + meta_workflow_run_linkto = self.get_meta_workflow_run_step_attr(meta_workflow_run_name, META_WORKFLOW_RUN) + status = self.get_meta_workflow_run_step_attr(meta_workflow_run_name, STATUS) + error = self.get_meta_workflow_run_step_attr(meta_workflow_run_name, ERROR) + + if meta_workflow_run_linkto: + meta_workflow_run_dict[META_WORKFLOW_RUN] = meta_workflow_run_linkto + if status: + meta_workflow_run_dict[STATUS] = status + if error: + meta_workflow_run_dict[ERROR] = error + + return self.meta_workflow_runs \ No newline at end of file diff --git a/magma/topological_sort.py b/magma/topological_sort.py new file mode 100644 index 0000000..f9af993 --- /dev/null +++ b/magma/topological_sort.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 + +################################################ +# Libraries +################################################ +from dcicutils.misc_utils import TopologicalSorter +from magma.magma_constants import DEPENDENCIES + +################################################ +# Functions +################################################ +class TopologicalSortHandler(object): + + def __init__(self, meta_workflows_dict): + """ + Constructor method, initialize object and attributes. + Calls method to create graph input (dict) for TopologicalSorter class, + then sorts this graph, or raises CycleError if sort not possible. + + :param meta_workflows_dict: input dictionary of meta_workflows from MetaWorkflowHandler + :type meta_workflows_dict: dict + """ + # Create graph for TopologicalSorter + self.graph = self._create_topo_sort_graph_input(meta_workflows_dict) + + # Create the sorter itself + self.sorter = TopologicalSorter(self.graph) + + def _create_topo_sort_graph_input(self, meta_workflows_dict): + """ + Using the meta_workflows_dict defined in the MetaWorkflow Handler, + convert to appropriate form to input into a TopologicalSorter. + + :param meta_workflows_dict: input dictionary of meta_workflows from MetaWorkflowHandler + :type meta_workflows_dict: dict + :return: graph input dict for TopologicalSorter + :rtype: dict + """ + # the graph dict should be of the form {mwf_name: set(dependencies),...} + graph = {} + # the meta_workflows_dict is of the form {mwf_name: MetaWorkflowStep object,...} + for mwf_step_name, mwf_step_obj in meta_workflows_dict.items(): + dependencies = getattr(mwf_step_obj, DEPENDENCIES) + # if there are dependencies for this step, add to the input graph + if dependencies: + graph[mwf_step_name] = set(dependencies) + else: + graph[mwf_step_name] = {} + return graph + + def sorted_graph_list(self): + """ + Using the TopologicalSorter object, sorts input graph + and returns list of meta_workflow names in a valid + topological ordering. + + :return: list of meta_workflow names, ordered + :rtype: list[str] + """ + sorted_meta_workflows_list = list(self.sorter.static_order()) + return sorted_meta_workflows_list \ No newline at end of file diff --git a/magma/validated_dictionary.py b/magma/validated_dictionary.py new file mode 100644 index 0000000..7021be8 --- /dev/null +++ b/magma/validated_dictionary.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +################################################ +# ValidatedDictionary TODO: eventually make part of dcicutils? +################################################ +class ValidatedDictionary(object): + """ + Parent class for MetaWorkflow(Run)Step and MetaWorkflow(Run) Handler classes. + Takes in an input dictionary, and validates basic attributes (makes sure given attributes are present). + """ + + def __init__(self, input_dict): + """ + Constructor method, initialize object and attributes. + + :param input_dict: input dictionary, defined by json file, which defines basic attributes of this object + :type input_dict: dict + """ + # Set basic (non-calculated) attributes # + for key in input_dict: + setattr(self, key, input_dict[key]) + + def _validate_basic_attributes(self, *attributes_to_check): + """ + Validation of the JSON input for this object. + Checks that given attributes are present in the created object. + + :param attributes_to_check: attributes that are checked (variable number of non-keyword arguments) + :type attributes_to_check: str(s) + :return: None, if all specified attributes are present + :raises ValueError: if this Validated Dictionary object doesn't have a specified attribute + """ + for attribute in attributes_to_check: + try: + # retrieved_attr = getattr(self, attribute) + getattr(self, attribute) + # if retrieved_attr is None: # if not retrieved_attr --> for falsy values raise AttributeError("attribute %s cannot have value 'None'." % attribute) + # TODO: add this to the pytests + except AttributeError as e: + raise AttributeError("Object validation error, {0}\n" + .format(e.args[0])) \ No newline at end of file diff --git a/magma_ff/checkstatus.py b/magma_ff/checkstatus.py index 0ec6b70..bcf7257 100644 --- a/magma_ff/checkstatus.py +++ b/magma_ff/checkstatus.py @@ -1,23 +1,14 @@ -#!/usr/bin/env python3 +from magma_ff.wfrutils import FFWfrUtils, FFMetaWfrUtils +from magma_ff.utils import JsonObject +from magma_ff.metawflrun_handler import MetaWorkflowRunHandler +from magma.checkstatus import AbstractCheckStatus -################################################ -# -# dcicutils wrapper -# -################################################ +from typing import List, Dict, Union, Any, Optional +from functools import cached_property -################################################ -# Libraries -################################################ -import sys, os +from magma.magma_constants import * -# magma -from magma.checkstatus import AbstractCheckStatus -from magma_ff.wfrutils import FFWfrUtils -################################################ -# CheckStatusFF -################################################ class CheckStatusFF(AbstractCheckStatus): """Customized CheckStatus class for the portal. """ @@ -34,7 +25,7 @@ def __init__(self, wflrun_obj, env=None): # Portal-related attributes self._env = env - # Cache for FFWfrUtils object + # For FFWfrUtils object self._ff = None #end def @@ -86,3 +77,117 @@ def ff(self): return self._ff #end class + + + +class CheckStatusRunHandlerFF(object): + """ + Customized CheckStatus class for MetaWorkflow Run Handler from the CGAP portal. + """ + + def __init__(self, meta_workflow_run_handler: JsonObject, auth_key: JsonObject) -> None: + """ + Initialize CheckStatusRunHandlerFF object. + + :param meta_workflow_run_handler: MetaWorkflowRunHandler input dict + :param auth_key: Authorization keys for C4 account + """ + self.meta_workflow_run_handler = meta_workflow_run_handler + self.auth_key = auth_key + + + def update_running_steps(self) -> Optional[Dict[str, Union[str, List[Any]]]]: + """ + For each running MetaWorkflow Run Step: + - updates status of that MetaWorkflow Run Step to its current portal output + - generates updated meta_workflow_runs array and final_status (of handler) + for MetaWorkflowRunHandler instance, yielded as + {final_status, meta_workflow_runs} for PATCHing + """ + # Iterate through list of running MetaWorkflow Run steps (array of objects) + for running_step_name in self.handler.running_steps(): + + # Get run uuid + run_step_uuid = self.handler.get_meta_workflow_run_step_attr(running_step_name, UUID) + + # Check current status of this MetaWorkflow Run step + curr_status = self.get_meta_workflow_run_step_status(run_step_uuid) + + # TODO: is there any case where a uuid of a "running" step doesn't exist? + # I don't think so but check with Doug + + # TODO: is there any way to catch traceback from Tibanna of a failed job? + # if so, can add attr to run handler schema to save these, otherwise it is + # manually searched/inspected (I imagine it is the latter) + + # TODO: worry about other attrs at all (like uuid?) + + if curr_status == RUNNING: + yield None # yield None so iteration isn't terminated + continue + + # Update run status + self.handler.update_meta_workflow_run_step_obj(running_step_name, STATUS, curr_status) + + # Return the json to PATCH meta_workflow_runs and final_status in handler + yield {FINAL_STATUS: self.handler.update_final_status(), + META_WORKFLOW_RUNS: self.handler.update_meta_workflow_runs_array()} + + def updated_run_handler_cost(self) -> Dict[str, float]: + """ + For each running MetaWorkflow Run Step: + - retrieve its Tibanna cost from CGAP portal. Returns 0 if it doesn't have this attribute + - add this step's cost to the overall run handler cost + - once loop is completed, generates updated cost for MetaWorkflowRunHandler instance, + yielded as a dict for PATCHing on CGAP portal + """ + curr_cost = float(0) + for run_step_name in self.handler.meta_workflow_run_steps_dict: + # Get run uuid + run_step_uuid = self.handler.get_meta_workflow_run_step_attr(run_step_name, UUID) + # Get its cost and add to overall handler cost + run_step_cost = self.portal_run_attr_getter.get_meta_workflow_run_cost(run_step_uuid) + curr_cost += run_step_cost + # Return the json to PATCH cost attribute in handler + return {COST: curr_cost} + # TODO: is there actually any case where we don't need to check non-running + # steps for cost? other than when initializing cost of a newly created handler to 0... + + def get_meta_workflow_run_step_status(self, meta_workflow_run_identifier: str) -> str: + """ + Using the CGAP portal, gets the current status of given MetaWorkflow Run step. + + :param meta_workflow_run_identifier: Identifier (e.g. UUID, @id) for + MetaWorkflow Run to be searched + :return: the status of the specified MetaWorkflow Run + """ + current_status = self.portal_run_attr_getter.get_meta_workflow_run_status(meta_workflow_run_identifier) + return self.run_status_mapping[current_status] + + @property + def run_status_mapping(self) -> dict: + """ + Mapping from possible CGAP portal final_status value for a MetaWorkflow Run, + to possible status values for a MetaWorkflow Run step within a Run Handler, + according to CGAP schema for a Run Handler. + """ + #TODO: add this to constants + return { + PENDING: PENDING, + RUNNING: RUNNING, + COMPLETED: COMPLETED, + FAILED: FAILED, + INACTIVE: PENDING, + STOPPED: STOPPED, + QC_FAIL: FAILED + } + + @cached_property + def portal_run_attr_getter(self): + """Used for accessing status and cost attributes of MetaWorkflow Runs from CGAP portal.""" + return FFMetaWfrUtils(self.auth_key) + + @cached_property + def handler(self): + """Using JSON object of Run Handler from CGAP portal, create magma_ff MetaWorkflowRunHandler instance.""" + return MetaWorkflowRunHandler(self.meta_workflow_run_handler) \ No newline at end of file diff --git a/magma_ff/create_metawflrun_handler.py b/magma_ff/create_metawflrun_handler.py new file mode 100644 index 0000000..c5457a0 --- /dev/null +++ b/magma_ff/create_metawflrun_handler.py @@ -0,0 +1,359 @@ +#!/usr/bin/env python3 + +################################################ +# Libraries +################################################ +from datetime import date +import json +import uuid + +from dcicutils import ff_utils +from functools import cached_property + +# magma +from magma_ff.metawfl_handler import MetaWorkflowHandler + +# from magma_ff.metawflrun_handler import MetaWorkflowRunHandler +from magma_ff.utils import make_embed_request, JsonObject +from magma.magma_constants import * + + +################################################ +# Custom Exception class +################################################ +class MetaWorkflowRunHandlerCreationError(Exception): + """Custom Exception when MetaWorkflow Run Handler encounters error during creation.""" + + pass + + +################################################ +# MetaWorkflow Run Handler from Item +################################################ +class MetaWorkflowRunHandlerFromItem: + """ + Base class to hold common methods required to create and POST a + MetaWorkflowRun Handler, and PATCH the Item used to create it (the "associated item"). + """ + + # for embed requests + ASSOCIATED_ITEM_FIELDS = [ + "project", + "institution", + "uuid", + # "meta_workflow_runs.uuid", + # "meta_workflow_runs.meta_workflow", + # "meta_workflow_runs.final_status" + # TODO: these last three are for the case of reintegrating duplication flag + ] + + META_WORKFLOW_HANDLER_FIELDS = [ + "uuid", + "title", # TODO: test when no title present + "meta_workflows", + "meta_workflows.*", + ] + + # TODO: is this correct?? also, will we end up patching on assoc item?? + # TODO: if so, create a schema mixin (seems unnecessary, for now) + META_WORKFLOW_RUN_HANDLER_ENDPOINT = "meta-workflow-run-handlers" + + def __init__( + self, associated_item_identifier, meta_workflow_handler_identifier, auth_key + ): + """ + Initialize the MWF Run Handler object, set basic attributes. + + :param associated_item_identifier: Item identifier (UUID, @id, or accession) + on which this MetaWorkflow Run Handler is being created + :type associated_item_identifier: str + :param meta_workflow_handler_identifier: Associated MetaWorkflow Handler identifier + (UUID, @id, or accession) -- TODO: does embed request work with an accession (yes) + :type meta_workflow_handler_identifier: str + :param auth_key: Portal authorization key + :type auth_key: dict + """ + if associated_item_identifier is None: + raise MetaWorkflowRunHandlerCreationError( + f"Invalid argument: 'associated_item_identifier' is {str(associated_item_identifier)}" + ) + if meta_workflow_handler_identifier is None: + raise MetaWorkflowRunHandlerCreationError( + f"Invalid argument: 'meta_workflow_handler_identifier' is {str(meta_workflow_handler_identifier)}" + ) + if auth_key is None: + raise MetaWorkflowRunHandlerCreationError( + f"Invalid argument: 'auth_key' is {str(auth_key)}" + ) + + self.auth_key = auth_key + self.associated_item_identifier = associated_item_identifier + self.meta_workflow_handler_identifier = meta_workflow_handler_identifier + + def create_meta_workflow_run_handler(self): + """ + Create MetaWorkflowRun Handler dictionary, which can later be POSTed to the CGAP portal. + + :return: MetaWorkflowRun Handler dictionary (for the portal JSON object) + :rtype: dict + """ + + # Create basic MetaWorkflow Run Handler dictionary, using instance variables + meta_workflow_run_handler = { + PROJECT: self.get_project, + INSTITUTION: self.get_institution, + UUID: str(uuid.uuid4()), + ASSOCIATED_META_WORKFLOW_HANDLER: self.meta_workflow_handler_identifier, + ASSOCIATED_ITEM: self.associated_item_identifier, + FINAL_STATUS: PENDING, + } + # Create the title of the Run Handler, based on associated MetaWorkflow Handler's title + # and the timestamp at the time of creation of this class instance + meta_workflow_handler_title = self.retrieved_meta_workflow_handler.get(TITLE) + if meta_workflow_handler_title: + creation_date = date.today() + # creation_date = datetime.date.today().isoformat() + title = f"MetaWorkflowRun Handler {meta_workflow_handler_title} created {creation_date.isoformat()}" + meta_workflow_run_handler[TITLE] = title + + # now call helper method to create and populate the meta_workflow_runs array + meta_workflow_run_handler[ + META_WORKFLOW_RUNS + ] = self._create_meta_workflow_runs_array() + # TODO: check for whether this is empty or nah? I'm not for now + # putting the burden of this error on the user + # see my note in magma/metawfl_handler.py regarding this + + # return the completed MetaWorkflow Run Handler dictionary, which follows the CGAP schema + self.meta_workflow_run_handler = meta_workflow_run_handler + return meta_workflow_run_handler + + def _create_meta_workflow_runs_array(self): + """ + Creates meta_workflow_runs array for a MetaWorkflowRun Handler dictionary. + These objects are in correct order due to topological sorting in + the MetaWorkflowHandler class, and uses the associated MetaWorkflow Handler's + ordered_meta_workflows array as a template. + + :return: array of meta_workflow_runs metadata, following Run Handler CGAP schema + :rtype: list[dict] + """ + + # Will eventually be the meta_workflow_runs array, with the runs in order + ordered_meta_workflow_runs = [] + + # Go through the ordered MetaWorkflow steps to populate basic MetaWorkflow Runs + for meta_workflow_step_name in self.ordered_meta_workflow_names: + + # self.meta_workflow_steps is a dict of step dicts, keys are step names + meta_workflow_step = self.meta_workflow_steps[meta_workflow_step_name] + + # will become the populated MetaWorkflowRun step object + meta_workflow_run_step = {} + + # Attrs that stay the same: name, dependencies + meta_workflow_run_step[NAME] = getattr(meta_workflow_step, NAME) + meta_workflow_run_step[DEPENDENCIES] = getattr(meta_workflow_step, DEPENDENCIES) + + # Handle conversion of MetaWorkflow items_for_creation_(uuid/prop_trace) + # to MetaWorkflow Run items_for_creation with embed requests + meta_workflow_run_step[ITEMS_FOR_CREATION] = self._embed_items_for_creation( + meta_workflow_step + ) + + # Basic dict for current MetaWorkflow Run step complete. Now append. + ordered_meta_workflow_runs.append(meta_workflow_run_step) + + return ordered_meta_workflow_runs + + def _embed_items_for_creation(self, meta_workflow_step): + """ + From a MetaWorkflow Step, extract the items_for_creation attribute, which + may be uuids or property traces (in relation to the associated item). + + If uuids, return this list of uuids. + If property traces, use embed requests to convert to identifiers. + + :param meta_workflow_step: object containing a MetaWorkflow Step's metadata + :type meta_workflow_step: dict + :return: list of items_for_creation identifiers + :rtype: list[str] + :raises MetaWorkflowRunHandlerCreationError: if a property trace cannot be embedded + """ + # if items_for_creation_uuid, just copy over + # if ITEMS_FOR_CREATION_UUID in meta_workflow_step.keys(): + if getattr(meta_workflow_step, ITEMS_FOR_CREATION_UUID, None): + return getattr(meta_workflow_step, ITEMS_FOR_CREATION_UUID) + #TODO: have to do embed calls to check these actually exist? + # otherwise, dealing with property traces. Make necessary embed requests + # and convert property trace(s) to uuid(s) + else: + property_traces = getattr(meta_workflow_step, ITEMS_FOR_CREATION_PROP_TRACE, None) + if not isinstance(property_traces, list): + item_uuid = make_embed_request( + self.associated_item_identifier, + property_traces + + ".uuid", # TODO: are we assuming the user will include ".uuid" or @id as part of prop trace? + self.auth_key, + single_item=True, + ) + if not item_uuid: + raise MetaWorkflowRunHandlerCreationError( + f"Invalid property trace '{property_traces}' on item with the following ID: {self.associated_item_identifier}" + ) + return item_uuid + + + items_for_creation_uuids = [] + for item_prop_trace in property_traces: + item_uuid = make_embed_request( + self.associated_item_identifier, + item_prop_trace + + ".uuid", # TODO: are we assuming the user will include ".uuid" or @id as part of prop trace? + self.auth_key, + single_item=True, + ) + if not item_uuid: + raise MetaWorkflowRunHandlerCreationError( + f"Invalid property trace '{item_prop_trace}' on item with the following ID: {self.associated_item_identifier}" + ) + items_for_creation_uuids.append(item_uuid) + return items_for_creation_uuids + + def post_meta_workflow_run_handler(self): + """ + Posts meta_workflow_run_handler dict to CGAP portal. + + :raises: Exception when the dict cannot be POSTed. Could be due to schema incongruencies, for example. + """ + try: + ff_utils.post_metadata( + self.meta_workflow_run_handler, #TODO: add check to see if this exists? + self.META_WORKFLOW_RUN_HANDLER_ENDPOINT, + key=self.auth_key, + ) + except Exception as error_msg: + raise MetaWorkflowRunHandlerCreationError( + f"MetaWorkflowRunHandler not POSTed: \n{str(error_msg)}" + ) from error_msg + + # TODO: PATCH associated item's meta_workflow_runs array? + # I've chosen to do this in the running function instead + + @cached_property + def retrieved_associated_item(self): + """ + Acquire associated item fields needed to create the Run Handler + """ + associated_item = make_embed_request( + self.associated_item_identifier, + self.ASSOCIATED_ITEM_FIELDS, + self.auth_key, + single_item=True, + ) + if not associated_item: + raise MetaWorkflowRunHandlerCreationError( + f"No Item found for given 'associated_item' identifier: {self.associated_item_identifier}" + ) + return associated_item + + @cached_property + def retrieved_meta_workflow_handler(self): + """ + Acquire fields from associated MetaWorkflow Handler needed to create the Run Handler + """ + # TODO: a check to make sure it is indeed of mwf handler type? does this function exist on ff_utils? + # same for above associated item request + meta_workflow_handler = make_embed_request( + self.meta_workflow_handler_identifier, + self.META_WORKFLOW_HANDLER_FIELDS, + self.auth_key, + single_item=True, + ) + if meta_workflow_handler: + raise MetaWorkflowRunHandlerCreationError( + f"No MetaWorkflow Handler found for given 'meta_workflow_handler' identifier: {self.meta_workflow_handler_identifier}" + ) + return meta_workflow_handler + + @cached_property # made cached because topological sort can return different valid results + def meta_workflow_handler_instance(self): + """ + Creates MetaWorkflowHandler object. + This induces topological sort of steps and validation of attributes. + """ + return MetaWorkflowHandler(self.retrieved_meta_workflow_handler) + + @property + def ordered_meta_workflow_names(self): + """ + Initializes a MetaWorkflowHandler object, which topologically sorts its + MetaWorkflow steps and contains attribute of these steps in order, + the ordered_meta_workflows array. + + :returns: ordered_meta_workflows attribute + :rtype: list[str] + """ + # Extract the ordered list of MetaWorkflow names + return getattr( + self.meta_workflow_handler_instance, ORDERED_META_WORKFLOWS + ) + + @property + def meta_workflow_steps(self): + """ + Initializes a MetaWorkflowHandler object, which topologically sorts its + MetaWorkflow steps and contains attribute of these steps in order, + the meta_workflows array. + + :returns: meta_workflows attribute + :rtype: list[dict] + """ + # Create MetaWorkflowHandler object + # This ensures all necessary attrs are present in the following Run Handler creation + # and that MetaWorkflow Steps are topologically sorted + associated_meta_workflow_handler_object = MetaWorkflowHandler( + self.retrieved_meta_workflow_handler + ) + + # Extract the ordered list of MetaWorkflow names + return getattr( + associated_meta_workflow_handler_object, META_WORKFLOWS + ) + + @property + def get_project(self): + """Retrieves project attribute from the associated item.""" + return self.retrieved_associated_item.get(PROJECT) + + @property + def get_institution(self): + """Retrieves institution attribute from the associated item.""" + return self.retrieved_associated_item.get(INSTITUTION) + +#################################################### +# Wrapper Fxn: MetaWorkflow Run Handler from Item +#################################################### +def create_meta_workflow_run_handler( + associated_item_identifier: str, + meta_workflow_handler_identifier: str, + auth_key: JsonObject, + post: bool = True, +) -> JsonObject: + """Create a MetaWorkflowRunHandler for the given associated item and MetaWorkflow Handler. + + POST MetaWorkflowRun as instructed. + + :param associated_item_identifier: Identifier (e.g. UUID, @id) for item from + which to create the MetaWorkflowRun Handler + :param meta_workflow_handler_identifier: Identifier for the MetaWorkflow Handler + from which to create the MetaWorkflowRun Handler + :param auth_key: Authorization keys for C4 account + :param post: Whether to POST the MetaWorkflowRun Handler created + :returns: MetaWorkflowRun Handler created + """ + meta_workflow_run_handler_creator = MetaWorkflowRunHandlerFromItem(associated_item_identifier, meta_workflow_handler_identifier, auth_key) + run_handler = meta_workflow_run_handler_creator.create_meta_workflow_run_handler() + if post: + meta_workflow_run_handler_creator.post_meta_workflow_run_handler() + return run_handler diff --git a/magma_ff/metawfl_handler.py b/magma_ff/metawfl_handler.py new file mode 100644 index 0000000..aea4584 --- /dev/null +++ b/magma_ff/metawfl_handler.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 + +################################################ +# Libraries +################################################ + +# magma +from magma.metawfl_handler import MetaWorkflowHandler as MetaWorkflowHandlerFromMagma + +################################################ +# MetaWorkflow Handler, Fourfront +################################################ +class MetaWorkflowHandler(MetaWorkflowHandlerFromMagma): + + def __init__(self, input_dict): + """ + Constructor method, initialize object and attributes. + + :param input_dict: MetaWorkflow Handler object defined by json file, from portal + :type input_dict: dict + """ + super().__init__(input_dict) + + #TODO: name filling with property traces + #change design so mwf handler from magma only has uuids + #prop trace handled here (change may be within mwf steps) \ No newline at end of file diff --git a/magma_ff/metawflrun_handler.py b/magma_ff/metawflrun_handler.py new file mode 100644 index 0000000..f4e739f --- /dev/null +++ b/magma_ff/metawflrun_handler.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 + +################################################ +# Libraries +################################################ + +# magma +from magma.metawflrun_handler import MetaWorkflowRunHandler as MetaWorkflowRunHandlerFromMagma + +# from magma import metawfl #TODO: do this in FF +# from magma_ff.utils import make_embed_request #check_status, chunk_ids + +################################################ +# MetaWorkflow Handler, Fourfront +################################################ +class MetaWorkflowRunHandler(MetaWorkflowRunHandlerFromMagma): + + def __init__(self, input_dict): + """ + Constructor method, initialize object and attributes. + + :param input_dict: MetaWorkflow Handler object defined by json file, from portal + :type input_dict: dict + """ + super().__init__(input_dict) + + #TODO: update cost \ No newline at end of file diff --git a/magma_ff/run_metawflrun_handler.py b/magma_ff/run_metawflrun_handler.py new file mode 100644 index 0000000..f972e6a --- /dev/null +++ b/magma_ff/run_metawflrun_handler.py @@ -0,0 +1,184 @@ +from typing import Optional, List, Dict, Union, Any +from functools import cached_property +from dcicutils import ff_utils + +from magma_ff.metawflrun_handler import MetaWorkflowRunHandler +from magma_ff.utils import JsonObject, make_embed_request, check_status +from magma_ff.create_metawfr import ( + create_meta_workflow_run, + MetaWorkflowRunCreationError, +) + +from magma.magma_constants import * + + +def execute_metawflrun_handler( + meta_workflow_run_handler_id: str, + auth_key: JsonObject, + valid_final_status: Optional[List[str]] = None, + verbose: bool = False, +) -> None: + """ + Wrapper function to ExecuteMetaWorkflowRunHandler class method calls. + Executes the Run Handler for the given MetaWorkflow Run Handler ID. + Checks pending MetaWorkflow Run steps and, if dependencies are completed, + creates a corresponding MetaWorkflow Run, updates the step's status + and handler status, and PATCHes to CGAP portal. + + :param meta_workflow_run_handler_id: Identifier for the MetaWorkflow Run Handler + (e.g. UUID, @id) to be executed + :param auth_key: Authorization keys for C4 account + :param verbose: Whether to print the PATCH response(s) + :param valid_final_status: Run Handler final status(es) considered valid in CGAP portal + """ + # Retrieve Run Handler portal JSON from CGAP portal + fields_to_embed = [ + "*", + "meta_workflow_runs.*", + ] # TODO: double check this with integrated testing + meta_workflow_run_handler = make_embed_request( + meta_workflow_run_handler_id, fields_to_embed, auth_key, single_item=True + ) + if not meta_workflow_run_handler: + raise ValueError( + f"No MetaWorkflow Run Handler found for given identifier: {meta_workflow_run_handler_id}" + ) + + # Check that status of Run Handler retrieved is valid + perform_action = check_status(meta_workflow_run_handler, valid_final_status) + + # Start executing this Run Handler is its status is considered valid, PATCHing MWFRs as they're created + if perform_action: + newly_running_meta_workflow_runs = ExecuteMetaWorkflowRunHandler( + meta_workflow_run_handler, auth_key + ).generator_of_created_meta_workflow_run_steps() + for patch_dict in newly_running_meta_workflow_runs: + response_from_patch = ff_utils.patch_metadata( + patch_dict, meta_workflow_run_handler_id, key=auth_key + ) + if verbose: + print(response_from_patch) + # TODO: add patch to the associated item list of metaworkflow runs? + + +class ExecuteMetaWorkflowRunHandler: + """ + Class that generates updated dictionaries for PATCHing a MetaWorkflow Run Handler, + as each MetaWorkflow Run Step is executed in order, based on user-defined dependencies. + """ + + def __init__( + self, meta_workflow_run_handler: JsonObject, auth_key: JsonObject + ) -> None: + """ + Initialize the ExecuteMetaWorkflowRunHandler object, set basic attributes. + + :param meta_workflow_run_handler: JSON object of MetaWorkflowRun Handler, + retrieved from CGAP portal + :param auth_key: Portal authorization key + """ + self.auth_key = auth_key + self.meta_workflow_run_handler = meta_workflow_run_handler + + def generator_of_created_meta_workflow_run_steps( + self, + ) -> Dict[str, Union[str, List[Any]]]: + """ + For each pending (ready to run) MetaWorkflow Run Step, if all dependencies are complete: + - updates status of that MetaWorkflow Run Step to "running" + - creates a corresponding MetaWorkflow Run + - generates updated meta_workflow_runs array and final_status + for MetaWorkflowRunHandler instance, yielded as + {final_status, meta_workflow_runs} for PATCHing + """ + # going through all steps that are ready to run (pending) + for pending_meta_workflow_run_name in self.handler.pending_steps(): + # current_pending_meta_workflow_run_step = self.meta_workflow_run_handler_instance.retrieve_meta_workflow_run_step_obj_by_name(pending_meta_workflow_run_name) + + dependencies_completed = self._check_pending_step_dependencies( + pending_meta_workflow_run_name + ) + + # if all dependencies for this pending step have run to completion + if dependencies_completed: + # Create this MetaWorkflow Run and POST to portal + # set this step's status to running too + self._create_and_update_meta_workflow_run_step( + pending_meta_workflow_run_name + ) + + # update final status & meta_workflow_runs array of the handler, yield for PATCHING + yield { + FINAL_STATUS: self.handler.update_final_status(), + META_WORKFLOW_RUNS: self.handler.update_meta_workflow_runs_array(), + } + + def _check_pending_step_dependencies(self, pending_step_name: str) -> bool: + """ + Given the name of a pending MetaWorkflowRun Step, check if all the Run Steps it is + dependent on are completed. + + :param pending_step_name: name of the pending MetaWorkflowRun Step + :returns: True if all dependencies are completed, otherwise False + """ + + current_dependencies = self.handler.get_meta_workflow_run_step_attr( + pending_step_name, DEPENDENCIES + ) + + for dependency_name in current_dependencies: + dependency_step_status = self.handler.get_meta_workflow_run_step_attr( + dependency_name, STATUS + ) + if dependency_step_status != COMPLETED: + return False + + return True + + def _create_and_update_meta_workflow_run_step(self, pending_step_name: str) -> None: + """ + For a given pending MetaWorkflow Run step name within a Run Handler, + create its corresponding MetaWorkflow Run and update appropriate attributes (status & MetaWorkflow Run LinkTo). + If there is any error in creation of the Run, update the error attribute. + + :param pending_step_name: name of MetaWorkflow Run to be created and updated + :raises MetaWorkflowRunCreationError: if the MetaWorkflow Run for the given name can't be created + """ + try: + # TODO: iterate through all items for creation, + meta_workflow_run_portal_object = create_meta_workflow_run( + self.handler.get_meta_workflow_run_step_attr( + pending_step_name, ITEMS_FOR_CREATION + ), + self.handler.get_meta_workflow_run_step_attr( + pending_step_name, META_WORKFLOW + ), + self.auth_key, + ) # TODO: !!! have to add run_uuid attr to schema!! arrray? to match items_for_creation + # TODO: will this be the actual output of this function or do i have to parse more? + + # update the meta_workflow_run linkTo + self.handler.update_meta_workflow_run_step_obj( + pending_step_name, + META_WORKFLOW_RUN, + meta_workflow_run_portal_object[UUID], + ) + # update the run step's status to running + self.handler.update_meta_workflow_run_step_obj( + pending_step_name, STATUS, RUNNING + ) + # if there is any error in creation of the MetaWorkflowRun + except MetaWorkflowRunCreationError as err: + # update error attr + self.handler.update_meta_workflow_run_step_obj( + pending_step_name, ERROR, str(err) + ) + # update run step's status to failed + self.handler.update_meta_workflow_run_step_obj( + pending_step_name, STATUS, FAILED + ) + + @cached_property + def handler(self): + """Using JSON object of Run Handler from CGAP portal, create MetaWorkflowRunHandler instance.""" + return MetaWorkflowRunHandler(self.meta_workflow_run_handler) diff --git a/magma_ff/status_metawflrun_handler.py b/magma_ff/status_metawflrun_handler.py new file mode 100644 index 0000000..39a075a --- /dev/null +++ b/magma_ff/status_metawflrun_handler.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 + +################################################ +# Libraries +################################################ +from dcicutils import ff_utils + +from magma_ff.checkstatus import CheckStatusRunHandlerFF +from magma_ff.utils import check_status + +################################################ +# Status Function: +# Checks & patches status of MWFR in run handler +################################################ +def status_metawfr_handler( + metawfr_handler_uuid, + auth_key, + env="fourfront-cgap", + verbose=False, + valid_status=None +): + perform_action = True + #TODO: what's good with the add_on here + run_handler_json = ff_utils.get_metadata( + metawfr_handler_uuid, add_on="frame=raw&datastore=database", key=auth_key + ) + perform_action = check_status(run_handler_json, valid_status) + if perform_action: + patch_dict = None + handler_status_check_obj = CheckStatusRunHandlerFF(run_handler_json, env) + + # get list of all updates and isolate most recent update + status_updates = list(handler_status_check_obj.check_running_mwfr_steps()) + if status_updates: + patch_dict = status_updates[-1] + + if patch_dict: + response_from_patch = ff_utils.patch_metadata(patch_dict, metawfr_handler_uuid, key=auth_key) + if verbose: + print(response_from_patch) \ No newline at end of file diff --git a/magma_ff/utils.py b/magma_ff/utils.py index 3e6470a..cdd8593 100644 --- a/magma_ff/utils.py +++ b/magma_ff/utils.py @@ -81,25 +81,26 @@ def chunk_ids(ids): result.append(ids[idx : idx + chunk_size]) return result +#TODO: add to tests for the handler too, and add constants +def check_status(portal_json, valid_final_status=None): + """ + Check if MetaWorkflowRun.status or MetaWorkflowRunHandler.status is valid. -def check_status(meta_workflow_run, valid_final_status=None): - """Check if MetaWorkflowRun.status is valid. - - If given valid final status, check MetaWorkflowRun.final_status + If given valid status, check MetaWorkflowRu(Handler).final_status as well. - :param meta_workflow_run: MetaWorkflowRun[json] - :type meta_workflow_run: dict + :param portal_json: MetaWorkflowRun(Handler)[json] + :type portal_json: dict :param valid_status: Final status considered valid :type valid_status: list - :return: Whether MetaWorkflowRun final_status is valid + :return: Whether MetaWorkflowRun (Handler) status & final_status are valid :rtype: bool """ - item_status = meta_workflow_run.get("status", "deleted") + item_status = portal_json.get("status", "deleted") if item_status not in ["obsolete", "deleted"]: result = True if valid_final_status: - final_status = meta_workflow_run.get("final_status") + final_status = portal_json.get("final_status") if final_status not in valid_final_status: result = False else: diff --git a/magma_ff/wfrutils.py b/magma_ff/wfrutils.py index c0f932b..a321674 100644 --- a/magma_ff/wfrutils.py +++ b/magma_ff/wfrutils.py @@ -1,25 +1,15 @@ -#!/usr/bin/env python3 - -################################################ -# -# -# -################################################ - -################################################ -# Libraries -################################################ import sys, os -# dcicutils from dcicutils import ff_utils from dcicutils.s3_utils import s3Utils from tibanna.job import Job +from functools import cached_property +from magma.magma_constants import * +from magma_ff.utils import JsonObject +from typing import Optional +from requests.exceptions import HTTPError -################################################ -# FFWfrUtils -################################################ class FFWfrUtils(object): def __init__(self, env): """ @@ -122,8 +112,85 @@ def filter_wfr_output_minimal_processed(wfr_output): #end class - class FdnConnectionException(Exception): pass #end class + + +class FFMetaWfrUtils(object): + """Class for accessing status and cost metadata of a MetaWorkflow Run from CGAP portal.""" + + def __init__(self, auth_key: JsonObject) -> None: + """ + Initialize FFMetaWfrUtils object, setting basic attributes. + + :param auth_key: Authorization keys for C4 account + """ + self._auth_key = auth_key + + def get_meta_workflow_run_status(self, meta_workflow_run_identifier: str) -> str: + """ + Return the status of the MetaWorkflow Run associated with specified ID. + + :param meta_workflow_run_identifier: Identifier (e.g. UUID, @id) for + MetaWorkflow Run to be searched + :return: the status of the specified MetaWorkflow Run + """ + meta_workflow_run_portal_output = self._retrieve_meta_workflow_run(meta_workflow_run_identifier) + + # TODO: for now, just assuming it will have this attribute + # check this in integrated testing + return meta_workflow_run_portal_output[FINAL_STATUS] + + def get_meta_workflow_run_cost(self, meta_workflow_run_identifier: str) -> float: + """ + Return the cost of the MetaWorkflow Run associated with specified ID. + If no cost attribute found, return cost as 0. + + :param meta_workflow_run_identifier: Identifier (e.g. UUID, @id) for + MetaWorkflow Run to be searched + :return: the cost of the specified MetaWorkflow Run + """ + meta_workflow_run_portal_output = self._retrieve_meta_workflow_run(meta_workflow_run_identifier) + + if COST in meta_workflow_run_portal_output: + return meta_workflow_run_portal_output[COST] + + return float(0) + + def _retrieve_meta_workflow_run(self, meta_workflow_run_identifier: str) -> JsonObject: + """ + Get portal MetaWorkflow Run metadata JSON using its identifier. + Raises Exception if GET request is unsuccessful. + + :param meta_workflow_run_identifier: Identifier (e.g. UUID, @id) for + MetaWorkflow Run to be searched + :return: Portal JSON object representing this MetaWorkflow Run and its metadata + """ + # Use cache if ID is an existent key + if meta_workflow_run_identifier in self._meta_workflow_runs_cache: + return self._meta_workflow_runs_cache[meta_workflow_run_identifier] + + # Otherwise retrieve this metadata from the portal + try: + result = ff_utils.get_metadata( + meta_workflow_run_identifier, key=self._auth_key + ) + except Exception as err: + raise HTTPError(err, f"GET request unsuccessful for MetaWorkflow Run using the following ID:\ + {meta_workflow_run_identifier}") from err + + # Add GET request result to cache + self._meta_workflow_runs_cache[meta_workflow_run_identifier] = result + return result + + @cached_property + def _meta_workflow_runs_cache(self) -> dict: + """ + Cache for MetaWorkflowRun metadata retrieved from CGAP portal. + Can save several MetaWorkflow Run metadata dicts at a time. + Initially empty, modified as MetaWorkflow Runs are retrieved. + Key-value = uuid-metadata_dict + """ + return {} diff --git a/test/meta_workflow_handler_constants.py b/test/meta_workflow_handler_constants.py new file mode 100644 index 0000000..c9d0d6c --- /dev/null +++ b/test/meta_workflow_handler_constants.py @@ -0,0 +1,152 @@ +from copy import deepcopy +from magma.magma_constants import * + +MWF_HANDLER_NAME = "test_mwf_handler" +MWF_HANDLER_PROJECT = "test_project" +MWF_HANDLER_INSTITUTION = "test_institution" +MWF_HANDLER_UUID = "test_mwf_handler_uuid" + +TESTER_UUID = "uuid" + +# Basic meta_workflow steps (dicts) used in meta_workflows array +MWF_A = {"meta_workflow": "test_mwf_uuid_0", "name": "A"} +MWF_B = {"meta_workflow": "test_mwf_uuid_1", "name": "B"} +MWF_C = {"meta_workflow": "test_mwf_uuid_2", "name": "C"} +MWF_D = {"meta_workflow": "test_mwf_uuid_3", "name": "D"} + +# Dependencies +DEP_ON_A = ["A"] +DEP_ON_B = ["B"] +DEP_ON_C = ["C"] +DEP_ON_D = ["D"] + +#TODO: I never use the prop trace for tests... +def meta_workflow_with_added_attrs(meta_workflow_dict, items_for_creation_property_trace=None, items_for_creation_uuid=None, dependencies=None): + """ + Helper function used to add non-required attributes to a MetaWorkflow step input dictionary. + Returns new MetaWorkflow step dictionary with added attributes. + + :param meta_workflow_dict: dictionary containing required attributes for MetaWorkflow step ("name" and "meta_workflow"): + :type meta_workflow_dict: dic + :param items_for_creation_property_trace: property trace(s) of item(s) required to create MetaWorkflow Run from MetaWorkflow + :type items_for_creation_property_trace: str or list[str] or None + :param items_for_creation_uuid: uuid(s) of item(s) required to create MetaWorkflow Run from MetaWorkflow + :type items_for_creation_uuid: str or list[str] or None + :param dependencies: list of MetaWorkflows (names) that the current MetaWorkflow is dependent on + :type dependencies: list[str] + :return: reformatted MetaWorkflow dictionary with added attributes + """ + dict_copy = deepcopy(meta_workflow_dict) + if items_for_creation_property_trace: + dict_copy[ITEMS_FOR_CREATION_PROP_TRACE] = items_for_creation_property_trace + if items_for_creation_uuid: + dict_copy[ITEMS_FOR_CREATION_UUID] = items_for_creation_uuid + if dependencies is not None: + dict_copy[DEPENDENCIES] = dependencies + return dict_copy + + +# meta_workflows arrays for MetaWorkflow Handler +# handler without uuid -- fails validation of basic attributes +HANDLER_WITHOUT_UUID_DICT = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION +} + +# handler without meta_workflows array -- passes validation, should set empty metaworkflows array +HANDLER_WITHOUT_MWF_ARRAY_DICT = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID +} + +# DAG_0 +# A B -----> C +MWF_A_DAG_0 = meta_workflow_with_added_attrs(MWF_A, None, TESTER_UUID, []) +MWF_B_DAG_0 = meta_workflow_with_added_attrs(MWF_B, None, TESTER_UUID, []) +MWF_B_DAG_0_W_DEP = meta_workflow_with_added_attrs(MWF_B, None, TESTER_UUID, DEP_ON_A) +MWF_C_DAG_0 = meta_workflow_with_added_attrs(MWF_C, None, TESTER_UUID, DEP_ON_B) +DAG_0_MWF_ARRAY = [MWF_B_DAG_0, MWF_A_DAG_0, MWF_C_DAG_0] # purposely in this order to test toposort +HANDLER_DAG_0 = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: DAG_0_MWF_ARRAY +} +DAG_0_MWF_ARRAY_W_DUPLICATES = [MWF_B_DAG_0, MWF_A_DAG_0, MWF_C_DAG_0, MWF_B_DAG_0] +HANDLER_DAG_0_W_DUPLICATES = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: DAG_0_MWF_ARRAY_W_DUPLICATES +} +DAG_0_MWF_ARRAY_W_DUPLICATES_BY_MWF_NAME = [MWF_B_DAG_0, MWF_A_DAG_0, MWF_C_DAG_0, MWF_B_DAG_0_W_DEP] +HANDLER_DAG_0_W_DUPLICATES_BY_MWF_NAME = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: DAG_0_MWF_ARRAY_W_DUPLICATES_BY_MWF_NAME +} +REORDERED_MWFS_DAG_0 = [["A", "B", "C"], ["B", "A", "C"], ["B", "C", "A"]] + +# DAG_1 +# B -----> D +# | ⋀ ⋀ +# | / | +# ⋁ / | +# A <----- C +MWF_A_DAG_1 = meta_workflow_with_added_attrs(MWF_A, None, TESTER_UUID, DEP_ON_B + DEP_ON_C) +MWF_B_DAG_1 = meta_workflow_with_added_attrs(MWF_B, None, TESTER_UUID, []) +MWF_C_DAG_1 = meta_workflow_with_added_attrs(MWF_C, None, TESTER_UUID, []) +MWF_D_DAG_1 = meta_workflow_with_added_attrs(MWF_D, None, TESTER_UUID, DEP_ON_A + DEP_ON_B + DEP_ON_C) +DAG_1_MWF_ARRAY = [MWF_A_DAG_1, MWF_B_DAG_1, MWF_C_DAG_1, MWF_D_DAG_1] +HANDLER_DAG_1 = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: DAG_1_MWF_ARRAY +} +REORDERED_MWFS_DAG_1 = [["B", "C", "A", "D"], ["C", "B", "A", "D"]] + +# CYCLIC_0 +# A B__ +# ⋀ \_____ +# | | +# | | +# C <----- | +MWF_A_CYCLIC_0 = meta_workflow_with_added_attrs(MWF_A, None, TESTER_UUID, []) +MWF_B_CYCLIC_0 = meta_workflow_with_added_attrs(MWF_B, None, TESTER_UUID, DEP_ON_C) +MWF_C_CYCLIC_0 = meta_workflow_with_added_attrs(MWF_C, None, TESTER_UUID, DEP_ON_B) +CYCLIC_0_MWF_ARRAY = [MWF_A_CYCLIC_0, MWF_B_CYCLIC_0, MWF_C_CYCLIC_0] +HANDLER_CYCLIC_0 = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: CYCLIC_0_MWF_ARRAY +} + +# CYCLIC_1 +# A -----> B +# ⋀ | +# | | +# | ⋁ +# D <----- C +MWF_A_CYCLIC_1 = meta_workflow_with_added_attrs(MWF_A, None, TESTER_UUID, DEP_ON_D) +MWF_B_CYCLIC_1 = meta_workflow_with_added_attrs(MWF_B, None, TESTER_UUID, DEP_ON_A) +MWF_C_CYCLIC_1 = meta_workflow_with_added_attrs(MWF_C, None, TESTER_UUID, DEP_ON_B) +MWF_D_CYCLIC_1 = meta_workflow_with_added_attrs(MWF_D, None, TESTER_UUID, DEP_ON_C) +CYCLIC_1_MWF_ARRAY = [MWF_A_CYCLIC_1, MWF_B_CYCLIC_1, MWF_C_CYCLIC_1, MWF_D_CYCLIC_1] +HANDLER_CYCLIC_1 = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: CYCLIC_1_MWF_ARRAY +} \ No newline at end of file diff --git a/test/meta_workflow_run_handler_constants.py b/test/meta_workflow_run_handler_constants.py new file mode 100644 index 0000000..94ef6ba --- /dev/null +++ b/test/meta_workflow_run_handler_constants.py @@ -0,0 +1,245 @@ +#!/usr/bin/env python3 + +################################################################# +# Libraries +################################################################# +from copy import deepcopy + +from magma.magma_constants import * + +################################################################# +# Vars +################################################################# + +MWF_RUN_HANDLER_NAME = "test_mwf_run_handler" +MWF_RUN_PROJECT = "test_project" +MWF_RUN_INSTITUTION = "test_institution" +MWF_RUN_HANDLER_UUID = "test_mwf_run_handler_uuid" + +TESTER_UUID = "test_item_uuid" + +TEST_MWFR_SIMPLE_GET_OUTPUT = { + "project": MWF_RUN_PROJECT, + "institution": MWF_RUN_INSTITUTION, + # "final_status": "completed", + "meta_workflow": "/meta-workflows/GAPMWIC28HMB/", + "@id": "/meta-workflow-runs/1734e9ac-af8c-4312-ac35-8b0018ef7411/", + "@type": ["MetaWorkflowRun", "Item"], + "uuid": TESTER_UUID +} + + +# basic meta_workflow_run dicts used in meta_workflow_runs array +# will have attributes added to them using mwf_run_with_added_attrs() +MWFR_A = {"name": "A", "meta_workflow": "link_to_mwf_A"} +MWFR_B = {"name": "B", "meta_workflow": "link_to_mwf_B"} +MWFR_C = {"name": "C", "meta_workflow": "link_to_mwf_C"} +MWFR_D = {"name": "D", "meta_workflow": "link_to_mwf_D"} + +MWF_NAMES_LIST = ["B", "C", "A", "D"] + +DEP_ON_A = ["A"] +DEP_ON_B = ["B"] +DEP_ON_C = ["C"] +DEP_ON_D = ["D"] + + +def mwf_run_with_added_attrs( + meta_workflow_run_dict, + dependencies=None, + items_for_creation=None, + status=None, + meta_workflow_run_linkto=None, + error=None, +): + """ + Generates an updated meta_workflow_run_dict given a basic meta_workflow_run_dict and attributes to add. + These attributes are limited to dependencies, items_for_creation, and status for these tests. + + :param meta_workflow_run_dict: Dictionary with basic attribute(s) of a MetaWorkflow Run + :type meta_workflow_run_dict: dict + :param dependencies: MetaWorkflow Runs, by name, that the given MetaWorkflow Run depends on + :type dependencies: list + :param items_for_creation: Item linkTo(s) needed to created the given MetaWorkflow Run + :type items_for_creation: str or list[str] + :param status: the status of the given MetaWorkflow Run + :type status: str + :param meta_workflow_run_linkto: the linkTo to a "created" MetaWorkflow Run on CGAP portal + :type meta_workflow_run_linkto: str + :param error: error traceback at "creation" of a MetaWorkflow Run + :type error: str + :return: updated meta_workflow_run_dict + """ + dict_copy = deepcopy(meta_workflow_run_dict) + if dependencies is not None: + dict_copy[DEPENDENCIES] = dependencies + if items_for_creation is not None: + dict_copy[ITEMS_FOR_CREATION] = items_for_creation + if status is not None: + dict_copy[STATUS] = status + if meta_workflow_run_linkto is not None: + dict_copy[META_WORKFLOW_RUN] = meta_workflow_run_linkto + if error is not None: + dict_copy[ERROR] = error + return dict_copy + + +def mwfr_handler_dict_generator(meta_workflow_runs_array): + """ + Given a meta_workflow_runs array, returns an input dict for + creation of a MetaWorkflow Run Handler object. + + :param meta_workflow_runs_array: list of meta_workflow_run dicts + :type meta_workflow_runs_array: list[dict] + :return: dictionary to be used as input to instantiate a MetaWorkflow Run Handler object + """ + return { + NAME: MWF_RUN_HANDLER_NAME, + PROJECT: MWF_RUN_PROJECT, + INSTITUTION: MWF_RUN_INSTITUTION, + UUID: MWF_RUN_HANDLER_UUID, + ASSOCIATED_META_WORKFLOW_HANDLER: TESTER_UUID, + META_WORKFLOW_RUNS: meta_workflow_runs_array, + } + + +# handler without uuid -- fails validation of basic attributes +full_handler_dict_0 = mwfr_handler_dict_generator([]) +full_handler_dict_0.pop(UUID) +HANDLER_WITHOUT_UUID_DICT = full_handler_dict_0 + + +# handler without associated MetaWorkflow Handler uuid -- fails validation of basic attributes +full_handler_dict_1 = mwfr_handler_dict_generator([]) +full_handler_dict_1.pop(ASSOCIATED_META_WORKFLOW_HANDLER) +HANDLER_WITHOUT_ASSOC_MWFH_DICT = full_handler_dict_1 + +# handler without meta_workflow_runs array -- fails validation of basic attributes +HANDLER_WITHOUT_META_WORKFLOW_RUNS_ARRAY = mwfr_handler_dict_generator(None) + +# Constructing a Run Handler with the below step dependencies +# B -----> D +# | ⋀ ⋀ +# | / | +# ⋁ / | +# A <----- C + +# Pending MetaWorkflow Run dicts +MWFR_A_PENDING = mwf_run_with_added_attrs( + MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, PENDING +) +MWFR_B_PENDING = mwf_run_with_added_attrs(MWFR_B, [], TESTER_UUID, PENDING) +MWFR_C_PENDING = mwf_run_with_added_attrs(MWFR_C, [], TESTER_UUID, PENDING) +MWFR_D_PENDING = mwf_run_with_added_attrs( + MWFR_D, DEP_ON_A + DEP_ON_B + DEP_ON_C, TESTER_UUID, PENDING +) + +# Running MetaWorkflow Run dicts +MWFR_A_RUNNING = mwf_run_with_added_attrs( + MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, RUNNING, TESTER_UUID +) +MWFR_B_RUNNING = mwf_run_with_added_attrs(MWFR_B, [], TESTER_UUID, RUNNING, TESTER_UUID) +MWFR_C_RUNNING = mwf_run_with_added_attrs(MWFR_C, [], TESTER_UUID, RUNNING, TESTER_UUID) +MWFR_D_RUNNING = mwf_run_with_added_attrs( + MWFR_D, DEP_ON_A + DEP_ON_B + DEP_ON_C, TESTER_UUID, RUNNING, TESTER_UUID +) + +# Failed/stopped MetaWorkflowRun dicts +MWFR_A_FAILED = mwf_run_with_added_attrs( + MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, FAILED, TESTER_UUID +) +MWFR_A_STOPPED = mwf_run_with_added_attrs( + MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, STOPPED, TESTER_UUID +) + +# Completed MetaWorkflow Run dicts +MWFR_A_COMPLETED = mwf_run_with_added_attrs( + MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, COMPLETED, TESTER_UUID +) +MWFR_B_COMPLETED = mwf_run_with_added_attrs(MWFR_B, [], TESTER_UUID, COMPLETED, TESTER_UUID) +MWFR_C_COMPLETED = mwf_run_with_added_attrs(MWFR_C, [], TESTER_UUID, COMPLETED, TESTER_UUID) +MWFR_D_COMPLETED = mwf_run_with_added_attrs( + MWFR_D, DEP_ON_A + DEP_ON_B + DEP_ON_C, TESTER_UUID, COMPLETED, TESTER_UUID +) + + +# Magma FF - specific attributes handled here (for updating meta_workflow_runs array method) +MWFR_B_COMPLETED_W_LINKTO = mwf_run_with_added_attrs( + MWFR_B, [], TESTER_UUID, COMPLETED, "a_link_to" +) +MWFR_A_FAILED_W_ERROR = mwf_run_with_added_attrs( + MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, FAILED, None, "error_message" +) +MWFR_A_STOPPED_W_LINKTO_AND_ERROR = mwf_run_with_added_attrs( + MWFR_A, + DEP_ON_B + DEP_ON_C, + TESTER_UUID, + STOPPED, + "another_link_to", + "and_another_error_message", +) + +# Note: these MetaWorkflowRuns above will be mixed and matched for testing purposes +# See meta_workflow_runs arrays and Run Handler input dicts below + +# All steps pending +PENDING_ARRAY = [MWFR_B_PENDING, MWFR_C_PENDING, MWFR_A_PENDING, MWFR_D_PENDING] +HANDLER_PENDING = mwfr_handler_dict_generator(PENDING_ARRAY) +HANDLER_PENDING_COPY = deepcopy(HANDLER_PENDING) #TODO: fix this hoe + +# Handlers currently running +FIRST_STEP_RUNNING_ARRAY = [MWFR_B_RUNNING, MWFR_C_PENDING, MWFR_A_PENDING, MWFR_D_PENDING] +FIRST_STEP_COMPLETED_ARRAY = [MWFR_B_COMPLETED, MWFR_C_PENDING, MWFR_A_PENDING, MWFR_D_PENDING] +RUNNING_MWFR_ARRAY = [MWFR_B_RUNNING, MWFR_C_RUNNING, MWFR_A_PENDING, MWFR_D_PENDING] +RUNNING_MWFR_ARRAY_2 = [ + MWFR_B_COMPLETED_W_LINKTO, + MWFR_C_RUNNING, + MWFR_A_PENDING, + MWFR_D_PENDING, +] +# this wouldn't happen with THIS dag in particular, +# but could in other cases (made for the sake of the final_status test for the handler TODO:) +# RUNNING_MWFR_ARRAY_3 = [MWFR_B_COMPLETED, MWFR_C_PENDING, MWFR_A_RUNNING, MWFR_D_PENDING] +HANDLER_STEPS_RUNNING = mwfr_handler_dict_generator(RUNNING_MWFR_ARRAY) +HANDLER_STEPS_RUNNING_2 = mwfr_handler_dict_generator(RUNNING_MWFR_ARRAY_2) +# HANDLER_STEPS_RUNNING_3 = mwfr_handler_dict_generator(RUNNING_MWFR_ARRAY_3) + +# Handlers that have failed +HALFWAY_DONE_N_FAIL_ARRAY = [ + MWFR_B_COMPLETED, + MWFR_C_COMPLETED, + MWFR_A_FAILED, + MWFR_D_PENDING, +] +HALFWAY_DONE_N_FAIL_ARRAY_2 = [ + MWFR_B_COMPLETED, + MWFR_C_COMPLETED, + MWFR_A_FAILED_W_ERROR, + MWFR_D_RUNNING, +] +HANDLER_FAILED = mwfr_handler_dict_generator(HALFWAY_DONE_N_FAIL_ARRAY) +HANDLER_FAILED_2 = mwfr_handler_dict_generator(HALFWAY_DONE_N_FAIL_ARRAY_2) + +# Handler that has been stopped +HALFWAY_DONE_N_STOPPED_ARRAY = [ + MWFR_B_COMPLETED, + MWFR_C_COMPLETED, + MWFR_A_STOPPED, + MWFR_D_PENDING, +] +HALFWAY_DONE_N_STOPPED_ARRAY_2 = [ + MWFR_B_COMPLETED, + MWFR_C_COMPLETED, + MWFR_A_STOPPED_W_LINKTO_AND_ERROR, + MWFR_D_PENDING, +] +HANDLER_STOPPED = mwfr_handler_dict_generator(HALFWAY_DONE_N_STOPPED_ARRAY) + +# Handler that is completed +COMPLETED_ARRAY = [ + MWFR_B_COMPLETED, + MWFR_C_COMPLETED, + MWFR_A_COMPLETED, + MWFR_D_COMPLETED, +] +HANDLER_COMPLETED = mwfr_handler_dict_generator(COMPLETED_ARRAY) diff --git a/test/test_checkstatus_ff.py b/test/test_checkstatus_ff.py index 4a65e07..6c58947 100644 --- a/test/test_checkstatus_ff.py +++ b/test/test_checkstatus_ff.py @@ -1,103 +1,191 @@ +from contextlib import contextmanager +from test.utils import patch_context +from typing import Iterator, List, Any + import json import mock +import pytest -from magma_ff import checkstatus +import magma_ff.checkstatus as checkstatus_module +from magma_ff.checkstatus import CheckStatusFF, CheckStatusRunHandlerFF from magma_ff import metawflrun as run_ff - -def test_CheckStatusFF(): - """This check does not actually connect to the portal. - It uses mocks for get_status and get_output - """ - with open('test/files/CGAP_WGS_trio_scatter_ff.run.json') as json_file: - data_wflrun = json.load(json_file) - - # fake that the first one is running - data_wflrun['workflow_runs'][0]['status'] = 'running' - data_wflrun['workflow_runs'][0]['jobid'] = 'somejobid' - - # Create MetaWorkflowRun object and check_running generator - wflrun_obj = run_ff.MetaWorkflowRun(data_wflrun) - cs = checkstatus.CheckStatusFF(wflrun_obj) - cr = cs.check_running() - - # mock get_status and get_output - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_status', return_value='complete'): - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_output', - return_value=[{'argument_name': 'raw_bam', 'files': 'abc'}]): - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_uuid', return_value='run_uuid'): - res = next(cr) - - # check yielded result - assert len(res['workflow_runs']) == len(data_wflrun['workflow_runs']) # same as original - assert res['workflow_runs'][0] == {'name': 'workflow_bwa-mem_no_unzip-check', - 'workflow_run': 'run_uuid', - 'shard': '0:0', - 'jobid': 'somejobid', - 'status': 'completed', # changed from running to completed - 'output': [{'argument_name': 'raw_bam', 'files': 'abc'}]} # output is filled in - assert 'failed_jobs' not in res # if nothing failed, '' failed_jobs should not be in the patch dict - - -def test_CheckStatusFF_failed(): - """This check does not actually connect to the portal. - It uses mocks for get_status and get_output - """ - with open('test/files/CGAP_WGS_trio_scatter_ff.run.json') as json_file: - data_wflrun = json.load(json_file) - - # fake that the first one is running - data_wflrun['workflow_runs'][0]['status'] = 'running' - data_wflrun['workflow_runs'][0]['jobid'] = 'somejobid' - - # Create MetaWorkflowRun object and check_running generator - wflrun_obj = run_ff.MetaWorkflowRun(data_wflrun) - cs = checkstatus.CheckStatusFF(wflrun_obj) - cr = cs.check_running() - - # mock get_status and get_output - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_status', return_value='error'): - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_output', - return_value=[{'argument_name': 'raw_bam', 'files': 'abc'}]): - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_uuid', return_value='run_uuid'): - res = next(cr) - - # check yielded result - assert len(res['workflow_runs']) == len(data_wflrun['workflow_runs']) # same as original - assert res['workflow_runs'][0] == {'name': 'workflow_bwa-mem_no_unzip-check', - 'workflow_run': 'run_uuid', - 'shard': '0:0', - 'jobid': 'somejobid', - 'status': 'failed'} # changed from running to failed, no output - assert res['failed_jobs'] == ['somejobid'] - - -def test_CheckStatusFF_running(): - """This check does not actually connect to the portal. - It uses mocks for get_status and get_output - """ - with open('test/files/CGAP_WGS_trio_scatter_ff.run.json') as json_file: - data_wflrun = json.load(json_file) - # fake that the first one is running - data_wflrun['workflow_runs'][0]['status'] = 'running' - data_wflrun['workflow_runs'][0]['jobid'] = 'somejobid' - # Create MetaWorkflowRun object and check_running generator - wflrun_obj = run_ff.MetaWorkflowRun(data_wflrun) - cs = checkstatus.CheckStatusFF(wflrun_obj) - cr = cs.check_running() - # Mock WorkflowRun with "started" status - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_status', return_value='started'): - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_output', - return_value=[{'argument_name': 'raw_bam', 'files': 'abc'}]): - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_uuid', return_value='run_uuid'): - result = list(cr) - assert result == [] - - cr = cs.check_running() - # Mock WorkflowRun with "complete" status - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_status', return_value='complete'): - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_output', - return_value=[{'argument_name': 'raw_bam', 'files': 'abc'}]): - with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_uuid', return_value='run_uuid'): - result = list(cr) - assert len(result) == 1 +from magma_ff.utils import JsonObject + +from test.meta_workflow_run_handler_constants import * + +class TestCheckStatusFF: + def test_CheckStatusFF(self): + """This check does not actually connect to the portal. + It uses mocks for get_status and get_output + """ + with open('test/files/CGAP_WGS_trio_scatter_ff.run.json') as json_file: + data_wflrun = json.load(json_file) + + # fake that the first one is running + data_wflrun['workflow_runs'][0]['status'] = 'running' + data_wflrun['workflow_runs'][0]['jobid'] = 'somejobid' + + # Create MetaWorkflowRun object and check_running generator + wflrun_obj = run_ff.MetaWorkflowRun(data_wflrun) + cs = CheckStatusFF(wflrun_obj) + cr = cs.check_running() + + # mock get_status and get_output + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_status', return_value='complete'): + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_output', + return_value=[{'argument_name': 'raw_bam', 'files': 'abc'}]): + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_uuid', return_value='run_uuid'): + res = next(cr) + + # check yielded result + assert len(res['workflow_runs']) == len(data_wflrun['workflow_runs']) # same as original + assert res['workflow_runs'][0] == {'name': 'workflow_bwa-mem_no_unzip-check', + 'workflow_run': 'run_uuid', + 'shard': '0:0', + 'jobid': 'somejobid', + 'status': 'completed', # changed from running to completed + 'output': [{'argument_name': 'raw_bam', 'files': 'abc'}]} # output is filled in + assert 'failed_jobs' not in res # if nothing failed, '' failed_jobs should not be in the patch dict + + + def test_CheckStatusFF_failed(self): + """This check does not actually connect to the portal. + It uses mocks for get_status and get_output + """ + with open('test/files/CGAP_WGS_trio_scatter_ff.run.json') as json_file: + data_wflrun = json.load(json_file) + + # fake that the first one is running + data_wflrun['workflow_runs'][0]['status'] = 'running' + data_wflrun['workflow_runs'][0]['jobid'] = 'somejobid' + + # Create MetaWorkflowRun object and check_running generator + wflrun_obj = run_ff.MetaWorkflowRun(data_wflrun) + cs = CheckStatusFF(wflrun_obj) + cr = cs.check_running() + + # mock get_status and get_output + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_status', return_value='error'): + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_output', + return_value=[{'argument_name': 'raw_bam', 'files': 'abc'}]): + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_uuid', return_value='run_uuid'): + res = next(cr) + + # check yielded result + assert len(res['workflow_runs']) == len(data_wflrun['workflow_runs']) # same as original + assert res['workflow_runs'][0] == {'name': 'workflow_bwa-mem_no_unzip-check', + 'workflow_run': 'run_uuid', + 'shard': '0:0', + 'jobid': 'somejobid', + 'status': 'failed'} # changed from running to failed, no output + assert res['failed_jobs'] == ['somejobid'] + + + def test_CheckStatusFF_running(self): + """This check does not actually connect to the portal. + It uses mocks for get_status and get_output + """ + with open('test/files/CGAP_WGS_trio_scatter_ff.run.json') as json_file: + data_wflrun = json.load(json_file) + # fake that the first one is running + data_wflrun['workflow_runs'][0]['status'] = 'running' + data_wflrun['workflow_runs'][0]['jobid'] = 'somejobid' + # Create MetaWorkflowRun object and check_running generator + wflrun_obj = run_ff.MetaWorkflowRun(data_wflrun) + cs = CheckStatusFF(wflrun_obj) + cr = cs.check_running() + # Mock WorkflowRun with "started" status + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_status', return_value='started'): + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_output', + return_value=[{'argument_name': 'raw_bam', 'files': 'abc'}]): + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_uuid', return_value='run_uuid'): + result = list(cr) + assert result == [] + + cr = cs.check_running() + # Mock WorkflowRun with "complete" status + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_status', return_value='complete'): + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_output', + return_value=[{'argument_name': 'raw_bam', 'files': 'abc'}]): + with mock.patch('magma_ff.checkstatus.CheckStatusFF.get_uuid', return_value='run_uuid'): + result = list(cr) + assert len(result) == 1 + +################################################################## +AUTH_KEY = {"server": "some_server"} + +@contextmanager +def patch_get_meta_workflow_run_status(**kwargs) -> Iterator[mock.MagicMock]: + """Patch _meta_workflow_runs_cache property within FFMetaWfrUtils class.""" + with patch_context( + checkstatus_module.FFMetaWfrUtils, + "get_meta_workflow_run_status", + # new_callable=mock.PropertyMock, + **kwargs + ) as mock_item: + yield mock_item + +class TestCheckStatusRunHandlerFF: + """Testing for customized CheckStatus class for MetaWorkflow Run Handler (CGAP portal).""" + + @pytest.mark.parametrize( + "portal_run_status, expected_value", + [ + (PENDING, PENDING), + (RUNNING, RUNNING), + (COMPLETED, COMPLETED), + (FAILED, FAILED), + (INACTIVE, PENDING), + (STOPPED, STOPPED), + (QC_FAIL, FAILED), + ], + ) + def test_get_meta_workflow_run_step_status( + self, portal_run_status: str, expected_value: str + ) -> None: + """ + Tests retrieval of MetaWorkflow Run status from portal, and status mapping to magma. + """ + with patch_get_meta_workflow_run_status() as mock_get_status: + mock_get_status.return_value = portal_run_status + returned_step_status = CheckStatusRunHandlerFF(HANDLER_PENDING, AUTH_KEY).get_meta_workflow_run_step_status("tester") + assert returned_step_status == expected_value + + @pytest.mark.parametrize( + "run_handler, orig_final_status, yielded_statuses, yielded_mwf_run_arrays", + [ + ( + HANDLER_STEPS_RUNNING, + RUNNING, + [COMPLETED], + [FIRST_STEP_COMPLETED_ARRAY], + ) + ], + ) + def test_update_running_steps( + self, + run_handler: JsonObject, + orig_final_status: str, + yielded_statuses: List[str], + yielded_mwf_run_arrays: List[List[Any]], + ) -> None: + """ + Tests generator of dictionaries used to PATCH running MetaWorkflow Runs + and the final status of the overall MetaWorkflow Run Handler. + """ + status_checker = CheckStatusRunHandlerFF(run_handler, AUTH_KEY) + assert ( + getattr(status_checker.handler, FINAL_STATUS) == orig_final_status + ) + + with patch_get_meta_workflow_run_status() as mock_get_status: + mock_get_status.side_effect = yielded_statuses + patch_dict_generator = ( + status_checker.update_running_steps() + ) + import pdb; pdb.set_trace() + assert len(yielded_statuses) == len(list(patch_dict_generator)) + for idx, step in enumerate(patch_dict_generator): + assert step[FINAL_STATUS] == yielded_statuses[idx] + assert step[META_WORKFLOW_RUNS] == yielded_mwf_run_arrays[idx] diff --git a/test/test_create_metawflrun_handler_ff.py b/test/test_create_metawflrun_handler_ff.py new file mode 100644 index 0000000..80c04c7 --- /dev/null +++ b/test/test_create_metawflrun_handler_ff.py @@ -0,0 +1,446 @@ +import mock +import pytest +from contextlib import contextmanager +from test.utils import patch_context +from typing import Iterator # thx Doug mwehehe + +from magma_ff.utils import JsonObject +from magma.magma_constants import * +import magma_ff.create_metawflrun_handler as create_metaworkflow_run_handler_module +from magma_ff.create_metawflrun_handler import ( + MetaWorkflowRunHandlerFromItem, + MetaWorkflowRunHandlerCreationError, + create_meta_workflow_run_handler +) + +from test.meta_workflow_handler_constants import * + +from magma_ff.metawfl_handler import MetaWorkflowHandler +from magma.metawfl_handler import MetaWorkflowStep + + +@contextmanager +def patch_post_metadata(**kwargs) -> Iterator[mock.MagicMock]: + """Patch ff_utils.post_metadata call within MetaWorkflowRunHAndlerFromItem class.""" + with patch_context( + create_metaworkflow_run_handler_module.ff_utils, "post_metadata", **kwargs + ) as mock_item: + yield mock_item + + +@contextmanager +def patch_make_embed_request(**kwargs) -> Iterator[mock.MagicMock]: + """Patch make_embed_request function defined in magma_ff/utils.py, + which is called within MetaWorkflowRunHandlerFromItem class.""" + with patch_context( + create_metaworkflow_run_handler_module, + "make_embed_request", + **kwargs + ) as mock_item: + yield mock_item + + +@contextmanager +def patch_create_meta_workflow_runs_array(**kwargs) -> Iterator[mock.MagicMock]: + with patch_context( + create_metaworkflow_run_handler_module.MetaWorkflowRunHandlerFromItem, + "_create_meta_workflow_runs_array", + **kwargs + ) as mock_item: + yield mock_item + + + +@contextmanager +def patch_embed_items_for_creation(**kwargs) -> Iterator[mock.MagicMock]: + """Patch function that uses embed requests to convert property traces to IDs.""" + with patch_context( + create_metaworkflow_run_handler_module.MetaWorkflowRunHandlerFromItem, + "_embed_items_for_creation", + **kwargs + ) as mock_item: + yield mock_item + +@contextmanager +def patch_retrieved_meta_workflow_handler(**kwargs) -> Iterator[mock.MagicMock]: + """Patch cached property of embedded meta_workflow_handler""" + with patch_context( + create_metaworkflow_run_handler_module.MetaWorkflowRunHandlerFromItem, + "retrieved_meta_workflow_handler", + new_callable=mock.PropertyMock, + **kwargs + ) as mock_item: + yield mock_item + +@contextmanager +def patch_retrieved_associated_item(**kwargs) -> Iterator[mock.MagicMock]: + """Patch cached property of embedded meta_workflow_handler""" + with patch_context( + create_metaworkflow_run_handler_module.MetaWorkflowRunHandlerFromItem, + "retrieved_associated_item", + new_callable=mock.PropertyMock, + **kwargs + ) as mock_item: + yield mock_item + + +@contextmanager +def patch_generate_uuid4(**kwargs) -> Iterator[mock.MagicMock]: + """Patch generator of uuids, + which is called within MetaWorkflowRunHandlerFromItem class.""" + with patch_context( + create_metaworkflow_run_handler_module.uuid, "uuid4", **kwargs + ) as mock_item: + yield mock_item + + +TODAY = "2023-05-12" +TESTER_PROJECT = "project_tester" +TESTER_INSTITUTION = "institution_tester" +TESTER_TITLE = "title_tester" +TESTER_UUID = "uuid" +TESTER_PROP_TRACE = "property.trace" + +ASSOCIATED_ITEM_UUID = "associated_item_tester_uuid" +ASSOCIATED_ITEM_SIMPLE_DICT = { + UUID: ASSOCIATED_ITEM_UUID, + PROJECT: TESTER_PROJECT, + INSTITUTION: TESTER_INSTITUTION, + # META_WORKFLOW_RUNS: [] # in the case that we wanna add dup flag back in future development + # TODO: and patching this array? should be handled in run mwfr handler +} + +META_WORKFLOW_HANDLER_UUID = "meta_workflow_handler_tester_uuid" + +MWF_STEP_NO_EMBEDS = { + META_WORKFLOW: "foo", + NAME: "bar", + ITEMS_FOR_CREATION_UUID: TESTER_UUID +} +MWF_STEP_NO_EMBEDS_2 = { + META_WORKFLOW: "foo", + NAME: "bar", + ITEMS_FOR_CREATION_UUID: [TESTER_UUID] +} +MWF_STEP_EMBED_SIMPLE = { + META_WORKFLOW: "foo", + NAME: "bar", + ITEMS_FOR_CREATION_PROP_TRACE: TESTER_PROP_TRACE +} +MWF_STEP_EMBED_SEVERAL = { + META_WORKFLOW: "foo", + NAME: "bar", + ITEMS_FOR_CREATION_PROP_TRACE: [TESTER_PROP_TRACE, TESTER_PROP_TRACE] +} + +# just redefining the uuids from the mwf handler dicts for consistency +# DAG_0 +# A B -----> C +HANDLER_DAG_0[UUID] = META_WORKFLOW_HANDLER_UUID +# DAG_1 +# B -----> D +# | ⋀ ⋀ +# | / | +# ⋁ / | +# A <----- C +HANDLER_DAG_1[UUID] = META_WORKFLOW_HANDLER_UUID + +# with title +HANDLER_DAG_0_W_TITLE = deepcopy(HANDLER_DAG_0) +HANDLER_DAG_0_W_TITLE[TITLE] = "DAG 0" + + +META_WORKFLOW_RUN_HANDLER_UUID = "meta_workflow_run_handler_tester_uuid" +AUTH_KEY = {"key": "foo"} + + +@pytest.fixture +def meta_workflow_run_handler_from_item_fixture(): + """Fixture of MetaWorkflowRunHandlerFromItem instance""" + return MetaWorkflowRunHandlerFromItem(ASSOCIATED_ITEM_UUID, META_WORKFLOW_HANDLER_UUID, AUTH_KEY) + + +class TestMetaWorkflowRunHandlerFromItem: + """Tests for methods/properties for MetaWorkflowRunHandlerFromItem class.""" + + @pytest.mark.parametrize( + "attribute, expected_value, assoc_item_id, mwf_handler_id, auth_key", + [ + ( + "auth_key", + AUTH_KEY, + ASSOCIATED_ITEM_UUID, + META_WORKFLOW_HANDLER_UUID, + AUTH_KEY, + ), + ( + "associated_item_identifier", + ASSOCIATED_ITEM_UUID, + ASSOCIATED_ITEM_UUID, + META_WORKFLOW_HANDLER_UUID, + AUTH_KEY, + ), + ( + "meta_workflow_handler_identifier", + META_WORKFLOW_HANDLER_UUID, + ASSOCIATED_ITEM_UUID, + META_WORKFLOW_HANDLER_UUID, + AUTH_KEY, + ), + ( + "auth_key", + None, + ASSOCIATED_ITEM_UUID, + META_WORKFLOW_HANDLER_UUID, + None, + ), + ( + "associated_item_identifier", + None, + None, + META_WORKFLOW_HANDLER_UUID, + AUTH_KEY, + ), + ( + "meta_workflow_handler_identifier", + None, + ASSOCIATED_ITEM_UUID, + None, + AUTH_KEY, + ), + ], + ) + def test_init( + self, attribute, expected_value, assoc_item_id, mwf_handler_id, auth_key + ): + """Test that instance attributes are set correctly.""" + try: + meta_workflow_run_handler_from_item = MetaWorkflowRunHandlerFromItem( + assoc_item_id, mwf_handler_id, auth_key + ) + result = getattr(meta_workflow_run_handler_from_item, attribute) + assert result == expected_value + except MetaWorkflowRunHandlerCreationError as creation_err: + assert attribute in str(creation_err) + + + @pytest.mark.parametrize( + "meta_workflow_step, exception_expected, return_value, num_embed_calls", + [ + ( + MetaWorkflowStep(MWF_STEP_EMBED_SIMPLE), + True, + None, + 1 + ), + ( + MetaWorkflowStep(MWF_STEP_EMBED_SEVERAL), + True, + None, + 1 + ), + ( + MetaWorkflowStep(MWF_STEP_NO_EMBEDS), + False, + TESTER_UUID, + 0 + ), + ( + MetaWorkflowStep(MWF_STEP_NO_EMBEDS_2), + False, + [TESTER_UUID], + 0 + ), + ( + MetaWorkflowStep(MWF_STEP_EMBED_SIMPLE), + False, + TESTER_UUID, + 1 + ), + ( + MetaWorkflowStep(MWF_STEP_EMBED_SEVERAL), + False, + [TESTER_UUID, TESTER_UUID], + 2 + ) + ], + ) + def test_embed_items_for_creation( + self, meta_workflow_step, exception_expected, return_value, num_embed_calls, meta_workflow_run_handler_from_item_fixture + ): + """ + Tests the conversion of the items_for_creation_(uuid/prop_trace) in MetaWorkflow Steps + to items_for_creation in MetaWorkflow Run Steps in the Run Handler. + """ + with patch_make_embed_request() as mock_embed_request: + if exception_expected: + mock_embed_request.return_value = None + with pytest.raises(MetaWorkflowRunHandlerCreationError): + result = meta_workflow_run_handler_from_item_fixture._embed_items_for_creation(meta_workflow_step) + assert mock_embed_request.call_count == num_embed_calls + else: + mock_embed_request.return_value = TESTER_UUID + result = meta_workflow_run_handler_from_item_fixture._embed_items_for_creation(meta_workflow_step) + assert result == return_value + assert mock_embed_request.call_count == num_embed_calls + + @pytest.mark.parametrize( + "meta_workflow_handler, num_step_calls", + [ + (HANDLER_DAG_0, 3), + (HANDLER_DAG_1, 4) + ], + ) + def test_create_meta_workflow_runs_array( + self, meta_workflow_handler, num_step_calls, meta_workflow_run_handler_from_item_fixture + ): + """ + Tests the conversion of the ordered MetaWorkflow Steps + to MetaWorkflow Run Steps in the Run Handler. + Implicitly testing the property ordered_meta_workflows, + and cached property retrieved_meta_workflow_handler. + """ + with patch_retrieved_meta_workflow_handler(return_value=meta_workflow_handler): + with patch_embed_items_for_creation(return_value=TESTER_UUID) as mock_embed_request: + handler = meta_workflow_run_handler_from_item_fixture + result = handler._create_meta_workflow_runs_array() + + orig_ordered_mwf_names = getattr(handler.meta_workflow_handler_instance, ORDERED_META_WORKFLOWS) + orig_mwf_steps = getattr(handler.meta_workflow_handler_instance, META_WORKFLOWS) + + for idx, name in enumerate(orig_ordered_mwf_names): + assert result[idx][NAME] == name + assert result[idx][DEPENDENCIES] == getattr(orig_mwf_steps[name], DEPENDENCIES) + assert result[idx][ITEMS_FOR_CREATION] == TESTER_UUID + + assert mock_embed_request.call_count == num_step_calls + + + @pytest.mark.parametrize( + "meta_workflow_handler", + [ + (HANDLER_DAG_0), + (HANDLER_DAG_1) + ], + ) + def test_create_meta_workflow_run_handler_no_title( + self, meta_workflow_handler, meta_workflow_run_handler_from_item_fixture + ): + """ + Tests creation of run handler function, + using regular handler as template. + """ + with patch_retrieved_meta_workflow_handler(return_value=meta_workflow_handler): + with patch_retrieved_associated_item(return_value=ASSOCIATED_ITEM_SIMPLE_DICT) as mocked_assoc_item: + with patch_generate_uuid4(return_value=META_WORKFLOW_RUN_HANDLER_UUID) as mocked_uuid: + with mock.patch('datetime.date') as mocked_current_date: + with patch_create_meta_workflow_runs_array() as mocked_mwfr_arr_creation: + completed_handler = meta_workflow_run_handler_from_item_fixture.create_meta_workflow_run_handler() + mocked_uuid.assert_called_once() + mocked_current_date.assert_not_called() + assert mocked_assoc_item.call_count == 2 + mocked_mwfr_arr_creation.assert_called_once() + + assert completed_handler[PROJECT] == TESTER_PROJECT + assert completed_handler[INSTITUTION] == TESTER_INSTITUTION + assert completed_handler[UUID] == META_WORKFLOW_RUN_HANDLER_UUID + assert completed_handler[ASSOCIATED_META_WORKFLOW_HANDLER] == META_WORKFLOW_HANDLER_UUID + assert completed_handler[ASSOCIATED_ITEM] == ASSOCIATED_ITEM_UUID + assert completed_handler[FINAL_STATUS] == PENDING + assert completed_handler.get(TITLE) is None + assert getattr(meta_workflow_run_handler_from_item_fixture, "meta_workflow_run_handler", None) is not None + + @mock.patch('magma_ff.create_metawflrun_handler.date') + def test_create_meta_workflow_run_handler_with_title( + self, mocked_date, meta_workflow_run_handler_from_item_fixture + ): + """ + Tests creation of run handler function, + using regular handler as template, including title formatting. + """ + with patch_retrieved_meta_workflow_handler(return_value=HANDLER_DAG_0_W_TITLE): + with patch_retrieved_associated_item(return_value=ASSOCIATED_ITEM_SIMPLE_DICT): + with patch_generate_uuid4(return_value=META_WORKFLOW_RUN_HANDLER_UUID): + with patch_create_meta_workflow_runs_array(): + mocked_date.today.return_value.isoformat.return_value = TODAY + completed_handler = meta_workflow_run_handler_from_item_fixture.create_meta_workflow_run_handler() + mocked_date.today.assert_called_once() + assert completed_handler[TITLE] == f"MetaWorkflowRun Handler {HANDLER_DAG_0_W_TITLE[TITLE]} created {TODAY}" + + + @pytest.mark.parametrize("exception", [True, False]) + def test_post_meta_workflow_run_handler(self, exception, meta_workflow_run_handler_from_item_fixture): + """Test MetaWorkflow Run Handler POST to CGAP portal.""" + + with patch_retrieved_meta_workflow_handler(return_value=HANDLER_DAG_0): + with patch_retrieved_associated_item(return_value=ASSOCIATED_ITEM_SIMPLE_DICT): + with patch_generate_uuid4(return_value=META_WORKFLOW_RUN_HANDLER_UUID): + meta_workflow_run_handler_from_item_fixture.create_meta_workflow_run_handler() + if exception: + with patch_post_metadata(side_effect=Exception) as mock_post_with_error: + with pytest.raises(MetaWorkflowRunHandlerCreationError) as creation_err: + meta_workflow_run_handler_from_item_fixture.post_meta_workflow_run_handler() + assert "MetaWorkflowRunHandler not POSTed" in creation_err + mock_post_with_error.assert_called_once() + else: + with patch_post_metadata() as mock_post: + meta_workflow_run_handler_from_item_fixture.post_meta_workflow_run_handler() + mock_post.assert_called_once_with( + getattr(meta_workflow_run_handler_from_item_fixture, "meta_workflow_run_handler"), + MetaWorkflowRunHandlerFromItem.META_WORKFLOW_RUN_HANDLER_ENDPOINT, + key=AUTH_KEY + ) + +##################################################### +META_WORKFLOW_RUN_HANDLER_SIMPLE_DICT = { + UUID: META_WORKFLOW_RUN_HANDLER_UUID, + PROJECT: TESTER_PROJECT, + INSTITUTION: TESTER_INSTITUTION, + "auth_key": AUTH_KEY, + ASSOCIATED_META_WORKFLOW_HANDLER: META_WORKFLOW_HANDLER_UUID, + ASSOCIATED_ITEM: ASSOCIATED_ITEM_UUID, + FINAL_STATUS: PENDING, + META_WORKFLOW_RUNS: [], #TODO: is this correct +} + +@contextmanager +def patch_create_meta_workflow_run_handler(**kwargs) -> Iterator[mock.MagicMock]: + """Patch function that uses embed requests to convert property traces to IDs.""" + with patch_context( + create_metaworkflow_run_handler_module.MetaWorkflowRunHandlerFromItem, + "create_meta_workflow_run_handler", + **kwargs + ) as mock_item: + yield mock_item + +@contextmanager +def patch_post_meta_workflow_run_handler(**kwargs) -> Iterator[mock.MagicMock]: + """Patch function that uses embed requests to convert property traces to IDs.""" + with patch_context( + create_metaworkflow_run_handler_module.MetaWorkflowRunHandlerFromItem, + "post_meta_workflow_run_handler", + **kwargs + ) as mock_item: + yield mock_item + + +@pytest.mark.parametrize("post", [True, False]) +def test_create_meta_workflow_run_handler( + post: bool +) -> None: + """Test of wrapper function to Run Handler creation class.""" + with patch_create_meta_workflow_run_handler(return_value=META_WORKFLOW_RUN_HANDLER_SIMPLE_DICT) as mock_handler_creation: + with patch_post_meta_workflow_run_handler() as mock_post_handler: + result = create_meta_workflow_run_handler( + ASSOCIATED_ITEM_UUID, + META_WORKFLOW_HANDLER_UUID, + AUTH_KEY, + post + ) + mock_handler_creation.assert_called_once() + if post: + mock_post_handler.assert_called_once() + else: + mock_post_handler.assert_not_called() + + assert result == META_WORKFLOW_RUN_HANDLER_SIMPLE_DICT \ No newline at end of file diff --git a/test/test_metawfl_handler.py b/test/test_metawfl_handler.py new file mode 100644 index 0000000..0e35c9c --- /dev/null +++ b/test/test_metawfl_handler.py @@ -0,0 +1,307 @@ +#!/usr/bin/env python3 + +################################################################# +# Libraries +################################################################# +import pytest +from copy import deepcopy + +from magma.metawfl_handler import * +from magma.magma_constants import * + +################################################################# +# Vars +################################################################# + +MWF_HANDLER_NAME = "test_mwf_handler" +MWF_HANDLER_PROJECT = "test_project" +MWF_HANDLER_INSTITUTION = "test_institution" +MWF_HANDLER_UUID = "test_mwf_handler_uuid" + +TESTER_UUID = "test_item_uuid" + +# Basic meta_workflow steps (dicts) used in meta_workflows array +MWF_A = {"meta_workflow": "test_mwf_uuid_0", "name": "A"} +MWF_B = {"meta_workflow": "test_mwf_uuid_1", "name": "B"} +MWF_C = {"meta_workflow": "test_mwf_uuid_2", "name": "C"} +MWF_D = {"meta_workflow": "test_mwf_uuid_3", "name": "D"} + +# Dependencies +DEP_ON_A = ["A"] +DEP_ON_B = ["B"] +DEP_ON_C = ["C"] +DEP_ON_D = ["D"] + +def meta_workflow_with_added_attrs(meta_workflow_dict, items_for_creation_property_trace=None, items_for_creation_uuid=None, dependencies=None): + """ + Helper function used to add non-required attributes to a MetaWorkflow step input dictionary. + Returns new MetaWorkflow step dictionary with added attributes. + + :param meta_workflow_dict: dictionary containing required attributes for MetaWorkflow step ("name" and "meta_workflow"): + :type meta_workflow_dict: dic + :param items_for_creation_property_trace: property trace(s) of item(s) required to create MetaWorkflow Run from MetaWorkflow + :type items_for_creation_property_trace: str or list[str] or None + :param items_for_creation_uuid: uuid(s) of item(s) required to create MetaWorkflow Run from MetaWorkflow + :type items_for_creation_uuid: str or list[str] or None + :param dependencies: list of MetaWorkflows (names) that the current MetaWorkflow is dependent on + :type dependencies: list[str] + :return: reformatted MetaWorkflow dictionary with added attributes + """ + dict_copy = deepcopy(meta_workflow_dict) + if items_for_creation_property_trace: + dict_copy[ITEMS_FOR_CREATION_PROP_TRACE] = items_for_creation_property_trace + if items_for_creation_uuid: + dict_copy[ITEMS_FOR_CREATION_UUID] = items_for_creation_uuid + if dependencies is not None: + dict_copy[DEPENDENCIES] = dependencies + return dict_copy + + +# meta_workflows arrays for MetaWorkflow Handler +# handler without uuid -- fails validation of basic attributes +HANDLER_WITHOUT_UUID_DICT = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION +} + +# handler without meta_workflows array -- passes validation, should set empty metaworkflows array +HANDLER_WITHOUT_MWF_ARRAY_DICT = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID +} + +# DAG_0 +# A B -----> C +MWF_A_DAG_0 = meta_workflow_with_added_attrs(MWF_A, None, TESTER_UUID, []) +MWF_B_DAG_0 = meta_workflow_with_added_attrs(MWF_B, None, TESTER_UUID, []) +MWF_B_DAG_0_W_DEP = meta_workflow_with_added_attrs(MWF_B, None, TESTER_UUID, DEP_ON_A) +MWF_C_DAG_0 = meta_workflow_with_added_attrs(MWF_C, None, TESTER_UUID, DEP_ON_B) +DAG_0_MWF_ARRAY = [MWF_B_DAG_0, MWF_A_DAG_0, MWF_C_DAG_0] # purposely in this order to test toposort +HANDLER_DAG_0 = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: DAG_0_MWF_ARRAY +} +DAG_0_MWF_ARRAY_W_DUPLICATES = [MWF_B_DAG_0, MWF_A_DAG_0, MWF_C_DAG_0, MWF_B_DAG_0] +HANDLER_DAG_0_W_DUPLICATES = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: DAG_0_MWF_ARRAY_W_DUPLICATES +} +DAG_0_MWF_ARRAY_W_DUPLICATES_BY_MWF_NAME = [MWF_B_DAG_0, MWF_A_DAG_0, MWF_C_DAG_0, MWF_B_DAG_0_W_DEP] +HANDLER_DAG_0_W_DUPLICATES_BY_MWF_NAME = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: DAG_0_MWF_ARRAY_W_DUPLICATES_BY_MWF_NAME +} +REORDERED_MWFS_DAG_0 = [["A", "B", "C"], ["B", "A", "C"], ["B", "C", "A"]] + +# DAG_1 +# B -----> D +# | ⋀ ⋀ +# | / | +# ⋁ / | +# A <----- C +MWF_A_DAG_1 = meta_workflow_with_added_attrs(MWF_A, None, TESTER_UUID, DEP_ON_B + DEP_ON_C) +MWF_B_DAG_1 = meta_workflow_with_added_attrs(MWF_B, None, TESTER_UUID, []) +MWF_C_DAG_1 = meta_workflow_with_added_attrs(MWF_C, None, TESTER_UUID, []) +MWF_D_DAG_1 = meta_workflow_with_added_attrs(MWF_D, None, TESTER_UUID, DEP_ON_A + DEP_ON_B + DEP_ON_C) +DAG_1_MWF_ARRAY = [MWF_A_DAG_1, MWF_B_DAG_1, MWF_C_DAG_1, MWF_D_DAG_1] +HANDLER_DAG_1 = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: DAG_1_MWF_ARRAY +} +REORDERED_MWFS_DAG_1 = [["B", "C", "A", "D"], ["C", "B", "A", "D"]] + +# CYCLIC_0 +# A B__ +# ⋀ \_____ +# | | +# | | +# C <----- | +MWF_A_CYCLIC_0 = meta_workflow_with_added_attrs(MWF_A, None, TESTER_UUID, []) +MWF_B_CYCLIC_0 = meta_workflow_with_added_attrs(MWF_B, None, TESTER_UUID, DEP_ON_C) +MWF_C_CYCLIC_0 = meta_workflow_with_added_attrs(MWF_C, None, TESTER_UUID, DEP_ON_B) +CYCLIC_0_MWF_ARRAY = [MWF_A_CYCLIC_0, MWF_B_CYCLIC_0, MWF_C_CYCLIC_0] +HANDLER_CYCLIC_0 = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: CYCLIC_0_MWF_ARRAY +} + +# CYCLIC_1 +# A -----> B +# ⋀ | +# | | +# | ⋁ +# D <----- C +MWF_A_CYCLIC_1 = meta_workflow_with_added_attrs(MWF_A, None, TESTER_UUID, DEP_ON_D) +MWF_B_CYCLIC_1 = meta_workflow_with_added_attrs(MWF_B, None, TESTER_UUID, DEP_ON_A) +MWF_C_CYCLIC_1 = meta_workflow_with_added_attrs(MWF_C, None, TESTER_UUID, DEP_ON_B) +MWF_D_CYCLIC_1 = meta_workflow_with_added_attrs(MWF_D, None, TESTER_UUID, DEP_ON_C) +CYCLIC_1_MWF_ARRAY = [MWF_A_CYCLIC_1, MWF_B_CYCLIC_1, MWF_C_CYCLIC_1, MWF_D_CYCLIC_1] +HANDLER_CYCLIC_1 = { + NAME: MWF_HANDLER_NAME, + PROJECT: MWF_HANDLER_PROJECT, + INSTITUTION: MWF_HANDLER_INSTITUTION, + UUID: MWF_HANDLER_UUID, + META_WORKFLOWS: CYCLIC_1_MWF_ARRAY +} + +################################################################# +# Tests +################################################################# +class TestMetaWorkflowStep: + @pytest.mark.parametrize( + "mwf_step_dict, items_for_creation_property_trace, items_for_creation_uuid, dependencies, num_attributes", + [ + (MWF_A, "sample_processing.samples", None, None, 3), + (MWF_B, None, TESTER_UUID, None, 3), + (MWF_B, None, TESTER_UUID, DEP_ON_A, 4) + ] + ) + def test_attribute_validation_no_errors(self, mwf_step_dict, items_for_creation_property_trace, items_for_creation_uuid, dependencies, num_attributes): + """ + Tests creation of appropriate MetaWorkflowStep objects, + no errors raised. + """ + completed_dict = meta_workflow_with_added_attrs(mwf_step_dict, items_for_creation_property_trace, items_for_creation_uuid, dependencies) + meta_workflow_step_object = MetaWorkflowStep(completed_dict) + assert num_attributes == len(meta_workflow_step_object.__dict__) + + required_attributes = [META_WORKFLOW, NAME] + for attr in required_attributes: + assert hasattr(meta_workflow_step_object, attr) == True + + @pytest.mark.parametrize( + "mwf_step_dict, items_for_creation_property_trace, items_for_creation_uuid, dependencies", + [ + (MWF_C, "sample_processing.samples", TESTER_UUID, None), # has both uuid and property trace for items for creation + (MWF_A, None, None, None), # missing items for creation + ] + ) + def test_attribute_validation_attribute_errors(self, mwf_step_dict, items_for_creation_property_trace, items_for_creation_uuid, dependencies): + """ + Tests creation of appropriate MetaWorkflowStep objects, + Attribute Error raised due to missing required attributes. + """ + with pytest.raises(AttributeError) as attr_err_info: + completed_dict = meta_workflow_with_added_attrs(mwf_step_dict, items_for_creation_property_trace, items_for_creation_uuid, dependencies) + MetaWorkflowStep(completed_dict) + assert "Object validation error" in str(attr_err_info.value) + + @pytest.mark.parametrize( + "mwf_step_dict, items_for_creation_property_trace, items_for_creation_uuid, dependencies", + [ + (MWF_A, None, TESTER_UUID, DEP_ON_A) + ] + ) + def test_check_self_dep(self, mwf_step_dict, items_for_creation_property_trace, items_for_creation_uuid, dependencies): + """ + Tests the method that checks that a MetaWorkflow Step doesn't depend on itself. + """ + with pytest.raises(MetaWorkflowStepSelfDependencyError) as self_dep_err_err_info: + completed_dict = meta_workflow_with_added_attrs(mwf_step_dict, items_for_creation_property_trace, items_for_creation_uuid, dependencies) + MetaWorkflowStep(completed_dict) + assert mwf_step_dict[NAME] in str(self_dep_err_err_info.value) + + +class TestMetaWorkflowHandler: + @pytest.mark.parametrize( + "mwf_handler_dict", + [(HANDLER_WITHOUT_UUID_DICT), (HANDLER_WITHOUT_MWF_ARRAY_DICT)] + ) + def test_attribute_validation_mwf_handler(self, mwf_handler_dict): + """ + Tests that makes sure handler has all required attributes ("uuid"). + """ + try: + handler_obj = MetaWorkflowHandler(mwf_handler_dict) + except AttributeError as attr_err_info: + assert "Object validation error" in str(attr_err_info) + else: + assert hasattr(handler_obj, UUID) == True + + + + @pytest.mark.parametrize( + "mwf_handler_dict, length_of_mwf_dict", + [ + (HANDLER_WITHOUT_MWF_ARRAY_DICT, 0), # sets empty dict if attr not present + (HANDLER_DAG_0, 3), + ] + ) + def test_set_meta_workflows_dict(self, mwf_handler_dict, length_of_mwf_dict): + """ + Tests the creation of MetaWorkflow Step(s) dictionary. + """ + meta_workflow_handler = MetaWorkflowHandler(mwf_handler_dict) + assert len(getattr(meta_workflow_handler, META_WORKFLOWS)) == length_of_mwf_dict + + meta_workflow_steps_dict = getattr(meta_workflow_handler, META_WORKFLOWS) + assert isinstance(meta_workflow_steps_dict, dict) + for step in meta_workflow_steps_dict.values(): + assert isinstance(step, MetaWorkflowStep) + + @pytest.mark.parametrize( + "mwf_handler_dict", + [ + (HANDLER_DAG_0_W_DUPLICATES), # complete duplicates + (HANDLER_DAG_0_W_DUPLICATES_BY_MWF_NAME) # duplicates by mwf name + ] + ) + def test_set_meta_workflows_dict_w_error(self, mwf_handler_dict): + """ + Tests for the check of duplicate MetaWorkflow Steps, by name, during + creation of the MetaWorkflow Step(s) dictionary. + """ + with pytest.raises(MetaWorkflowStepDuplicateError) as dup_err_info: + MetaWorkflowHandler(mwf_handler_dict) + assert '"B" is a duplicate MetaWorkflow' in str(dup_err_info) + + + @pytest.mark.parametrize( + "mwf_handler_dict, possible_reordered_mwf_lists", + [ + (HANDLER_WITHOUT_MWF_ARRAY_DICT, [[]]), + (HANDLER_DAG_0, REORDERED_MWFS_DAG_0), + (HANDLER_DAG_1, REORDERED_MWFS_DAG_1) + ] + ) + def test_create_ordered_meta_workflows_list(self, mwf_handler_dict, possible_reordered_mwf_lists): + """ + Tests the topological sorting of MetaWorkflow steps. + """ + meta_workflow_handler = MetaWorkflowHandler(mwf_handler_dict) + assert getattr(meta_workflow_handler, ORDERED_META_WORKFLOWS) in possible_reordered_mwf_lists + + + @pytest.mark.parametrize( + "mwf_handler_dict", + [ + (HANDLER_CYCLIC_0), + (HANDLER_CYCLIC_1) + ] + ) + def test_cycles(self, mwf_handler_dict): + """ + Tests the topological sorting of MetaWorkflow steps, + raising MetaWorkflowStepCycleError because of presence of cycles. + """ + with pytest.raises(MetaWorkflowStepCycleError) as cycle_err_info: + MetaWorkflowHandler(mwf_handler_dict) + assert "nodes are in a cycle" in str(cycle_err_info) \ No newline at end of file diff --git a/test/test_metawflrun_handler.py b/test/test_metawflrun_handler.py new file mode 100644 index 0000000..428143a --- /dev/null +++ b/test/test_metawflrun_handler.py @@ -0,0 +1,395 @@ +#!/usr/bin/env python3 + +################################################################# +# Libraries +################################################################# +import pytest +from copy import deepcopy + +from magma.metawflrun_handler import MetaWorkflowRunStep, MetaWorkflowRunHandler +from magma.magma_constants import * + +################################################################# +# Vars +################################################################# + +MWF_RUN_HANDLER_NAME = "test_mwf_run_handler" +MWF_RUN_PROJECT = "test_project" +MWF_RUN_INSTITUTION = "test_institution" +MWF_RUN_HANDLER_UUID = "test_mwf_run_handler_uuid" + +TESTER_UUID = "test_item_uuid" + + +# basic meta_workflow_run dicts used in meta_workflow_runs array +# will have attributes added to them using mwf_run_with_added_attrs() +MWFR_A = {"name": "A"} +MWFR_B = {"name": "B"} +MWFR_C = {"name": "C"} +MWFR_D = {"name": "D"} + +MWF_NAMES_LIST = ["B", "C", "A", "D"] + +DEP_ON_A = ["A"] +DEP_ON_B = ["B"] +DEP_ON_C = ["C"] +DEP_ON_D = ["D"] + +def mwf_run_with_added_attrs(meta_workflow_run_dict, dependencies=None, items_for_creation=None, \ + status=None, meta_workflow_run_linkto=None, error=None): + """ + Generates an updated meta_workflow_run_dict given a basic meta_workflow_run_dict and attributes to add. + These attributes are limited to dependencies, items_for_creation, and status for these tests. + + :param meta_workflow_run_dict: Dictionary with basic attribute(s) of a MetaWorkflow Run + :type meta_workflow_run_dict: dict + :param dependencies: MetaWorkflow Runs, by name, that the given MetaWorkflow Run depends on + :type dependencies: list + :param items_for_creation: Item linkTo(s) needed to created the given MetaWorkflow Run + :type items_for_creation: str or list[str] + :param status: the status of the given MetaWorkflow Run + :type status: str + :param meta_workflow_run_linkto: the linkTo to a "created" MetaWorkflow Run on CGAP portal + :type meta_workflow_run_linkto: str + :param error: error traceback at "creation" of a MetaWorkflow Run + :type error: str + :return: updated meta_workflow_run_dict + """ + dict_copy = deepcopy(meta_workflow_run_dict) + if dependencies is not None: + dict_copy[DEPENDENCIES] = dependencies + if items_for_creation is not None: + dict_copy[ITEMS_FOR_CREATION] = items_for_creation + if status is not None: + dict_copy[STATUS] = status + if meta_workflow_run_linkto is not None: + dict_copy[META_WORKFLOW_RUN] = meta_workflow_run_linkto + if error is not None: + dict_copy[ERROR] = error + return dict_copy + +def mwfr_handler_dict_generator(meta_workflow_runs_array): + """ + Given a meta_workflow_runs array, returns an input dict for + creation of a MetaWorkflow Run Handler object. + + :param meta_workflow_runs_array: list of meta_workflow_run dicts + :type meta_workflow_runs_array: list[dict] + :return: dictionary to be used as input to instantiate a MetaWorkflow Run Handler object + """ + return { + NAME: MWF_RUN_HANDLER_NAME, + PROJECT: MWF_RUN_PROJECT, + INSTITUTION: MWF_RUN_INSTITUTION, + UUID: MWF_RUN_HANDLER_UUID, + ASSOCIATED_META_WORKFLOW_HANDLER: TESTER_UUID, + META_WORKFLOW_RUNS: meta_workflow_runs_array + } + + +# handler without uuid -- fails validation of basic attributes +full_handler_dict_0 = mwfr_handler_dict_generator([]) +full_handler_dict_0.pop(UUID) +HANDLER_WITHOUT_UUID_DICT = full_handler_dict_0 + + +# handler without associated MetaWorkflow Handler uuid -- fails validation of basic attributes +full_handler_dict_1 = mwfr_handler_dict_generator([]) +full_handler_dict_1.pop(ASSOCIATED_META_WORKFLOW_HANDLER) +HANDLER_WITHOUT_ASSOC_MWFH_DICT = full_handler_dict_1 + +# handler without meta_workflow_runs array -- fails validation of basic attributes +HANDLER_WITHOUT_META_WORKFLOW_RUNS_ARRAY = mwfr_handler_dict_generator(None) + +# Constructing a Run Handler with the below step dependencies +# B -----> D +# | ⋀ ⋀ +# | / | +# ⋁ / | +# A <----- C + +# Pending MetaWorkflow Run dicts +MWFR_A_PENDING = mwf_run_with_added_attrs(MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, PENDING) +MWFR_B_PENDING = mwf_run_with_added_attrs(MWFR_B, [], TESTER_UUID, PENDING) +MWFR_C_PENDING = mwf_run_with_added_attrs(MWFR_C, [], TESTER_UUID, PENDING) +MWFR_D_PENDING = mwf_run_with_added_attrs(MWFR_D, DEP_ON_A + DEP_ON_B + DEP_ON_C, TESTER_UUID, PENDING) + +# Running MetaWorkflow Run dicts +MWFR_A_RUNNING = mwf_run_with_added_attrs(MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, RUNNING) +MWFR_B_RUNNING = mwf_run_with_added_attrs(MWFR_B, [], TESTER_UUID, RUNNING) +MWFR_C_RUNNING = mwf_run_with_added_attrs(MWFR_C, [], TESTER_UUID, RUNNING) +MWFR_D_RUNNING = mwf_run_with_added_attrs(MWFR_D, DEP_ON_A + DEP_ON_B + DEP_ON_C, TESTER_UUID, RUNNING) + +# Failed/stopped MetaWorkflowRun dicts +MWFR_A_FAILED = mwf_run_with_added_attrs(MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, FAILED) +MWFR_A_STOPPED = mwf_run_with_added_attrs(MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, STOPPED) + +# Completed MetaWorkflow Run dicts +MWFR_A_COMPLETED = mwf_run_with_added_attrs(MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, COMPLETED) +MWFR_B_COMPLETED = mwf_run_with_added_attrs(MWFR_B, [], TESTER_UUID, COMPLETED) +MWFR_C_COMPLETED = mwf_run_with_added_attrs(MWFR_C, [], TESTER_UUID, COMPLETED) +MWFR_D_COMPLETED = mwf_run_with_added_attrs(MWFR_D, DEP_ON_A + DEP_ON_B + DEP_ON_C, TESTER_UUID, COMPLETED) + + +# Magma FF - specific attributes handled here (for updating meta_workflow_runs array method) +MWFR_B_COMPLETED_W_LINKTO = mwf_run_with_added_attrs(MWFR_B, [], TESTER_UUID, COMPLETED, "a_link_to") +MWFR_A_FAILED_W_ERROR = mwf_run_with_added_attrs(MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, FAILED, None, "error_message") +MWFR_A_STOPPED_W_LINKTO_AND_ERROR = mwf_run_with_added_attrs(MWFR_A, DEP_ON_B + DEP_ON_C, TESTER_UUID, STOPPED,\ + "another_link_to", "and_another_error_message") + +# Note: these MetaWorkflowRuns above will be mixed and matched for testing purposes +# See meta_workflow_runs arrays and Run Handle input dicts below + +# All steps pending +PENDING_ARRAY = [MWFR_B_PENDING, MWFR_C_PENDING, MWFR_A_PENDING, MWFR_D_PENDING] +HANDLER_PENDING = mwfr_handler_dict_generator(PENDING_ARRAY) + +# Handlers currently running +RUNNING_MWFR_ARRAY = [MWFR_B_RUNNING, MWFR_C_RUNNING, MWFR_A_PENDING, MWFR_D_PENDING] +RUNNING_MWFR_ARRAY_2 = [MWFR_B_COMPLETED_W_LINKTO, MWFR_C_RUNNING, MWFR_A_PENDING, MWFR_D_PENDING] +# this wouldn't happen with THIS dag in particular, +# but could in other cases (made for the sake of the final_status test for the handler TODO:) +# RUNNING_MWFR_ARRAY_3 = [MWFR_B_COMPLETED, MWFR_C_PENDING, MWFR_A_RUNNING, MWFR_D_PENDING] +HANDLER_STEPS_RUNNING = mwfr_handler_dict_generator(RUNNING_MWFR_ARRAY) +HANDLER_STEPS_RUNNING_2 = mwfr_handler_dict_generator(RUNNING_MWFR_ARRAY_2) +# HANDLER_STEPS_RUNNING_3 = mwfr_handler_dict_generator(RUNNING_MWFR_ARRAY_3) + +# Handlers that have failed +HALFWAY_DONE_N_FAIL_ARRAY = [MWFR_B_COMPLETED, MWFR_C_COMPLETED, MWFR_A_FAILED, MWFR_D_PENDING] +HALFWAY_DONE_N_FAIL_ARRAY_2 = [MWFR_B_COMPLETED, MWFR_C_COMPLETED, MWFR_A_FAILED_W_ERROR, MWFR_D_RUNNING] +HANDLER_FAILED = mwfr_handler_dict_generator(HALFWAY_DONE_N_FAIL_ARRAY) +HANDLER_FAILED_2 = mwfr_handler_dict_generator(HALFWAY_DONE_N_FAIL_ARRAY_2) + +# Handler that has been stopped +HALFWAY_DONE_N_STOPPED_ARRAY = [MWFR_B_COMPLETED, MWFR_C_COMPLETED, MWFR_A_STOPPED, MWFR_D_PENDING] +HALFWAY_DONE_N_STOPPED_ARRAY_2 = [MWFR_B_COMPLETED, MWFR_C_COMPLETED, MWFR_A_STOPPED_W_LINKTO_AND_ERROR, MWFR_D_PENDING] +HANDLER_STOPPED = mwfr_handler_dict_generator(HALFWAY_DONE_N_STOPPED_ARRAY) + +# Handler that is completed +COMPLETED_ARRAY = [MWFR_B_COMPLETED, MWFR_C_COMPLETED, MWFR_A_COMPLETED, MWFR_D_COMPLETED] +HANDLER_COMPLETED = mwfr_handler_dict_generator(COMPLETED_ARRAY) + +################################################################# +# Tests +################################################################# +class TestMetaWorkflowRunStep: + @pytest.mark.parametrize( + "mwf_run_step_dict, dependencies, items_for_creation, num_attributes", + [ + (MWFR_A, [], [TESTER_UUID], 4), # successfully creates + (MWFR_A, [], None, 3) # TODO: for now, doesn't fail if no items for creation + ] + ) + def test_attribute_validation(self, mwf_run_step_dict, dependencies, items_for_creation, num_attributes): + """ + Tests creation of appropriate MetaWorkflowRunStep objects, + no errors raised. + """ + completed_dict = mwf_run_with_added_attrs(mwf_run_step_dict, dependencies, items_for_creation) + meta_workflow_run_step_object = MetaWorkflowRunStep(completed_dict) + assert num_attributes == len(meta_workflow_run_step_object.__dict__) + assert meta_workflow_run_step_object.status == PENDING + + required_attributes = [NAME, DEPENDENCIES]#, "items_for_creation"] + for attr in required_attributes: + assert hasattr(meta_workflow_run_step_object, attr) == True + + @pytest.mark.parametrize( + "mwf_run_step_dict, dependencies, items_for_creation", + [ + ({}, [], [TESTER_UUID]), # fails because no name + (MWFR_A, None, [TESTER_UUID]), # fails because no dependencies + ] + ) + def test_attribute_validation_attribute_errors(self, mwf_run_step_dict, dependencies, items_for_creation): + """ + Tests creation of appropriate MetaWorkflowRunStep objects, + Attribute Errors raised (missing required attributes). + """ + with pytest.raises(AttributeError) as attr_err_info: + completed_dict = mwf_run_with_added_attrs(mwf_run_step_dict, dependencies, items_for_creation) + MetaWorkflowRunStep(completed_dict) + + +class TestMetaWorkflowRunHandler: + def test_attribute_validation(self): + """ + Tests creation of appropriate MetaWorkflowRun Handler objects, + no errors raised. + # TODO: for now, doesn't fail if no associated_item -- could make this check in ff + """ + meta_workflow_run_handler = MetaWorkflowRunHandler(HANDLER_PENDING) + assert getattr(meta_workflow_run_handler, FINAL_STATUS) == PENDING + required_attributes = [UUID, ASSOCIATED_META_WORKFLOW_HANDLER] + for attr in required_attributes: + assert hasattr(meta_workflow_run_handler, attr) == True + + @pytest.mark.parametrize( + "input_dict", + [ + (HANDLER_WITHOUT_UUID_DICT), # fails because no uuid + (HANDLER_WITHOUT_ASSOC_MWFH_DICT), # fails because no associated MetaWorkflow Handler + (HANDLER_WITHOUT_META_WORKFLOW_RUNS_ARRAY) # fails because no meta_workflow_runs array + ] + ) + def test_attribute_validation_attribute_errors(self, input_dict): + """ + Tests creation of appropriate MetaWorkflowRunHandler objects, + Attribute Errors raised (missing required attributes). + """ + with pytest.raises(AttributeError) as attr_err_info: + MetaWorkflowRunHandler(input_dict) + assert "Object validation error" in str(attr_err_info.value) + + def test_set_meta_workflow_runs_dict(self): + """ + Tests creation of MetaWorkflowRunStep objects for all MetaWorkflow Runs + in the meta_workflow_runs array, and creates dict out of them for quick access and update. + """ + meta_workflow_run_handler = MetaWorkflowRunHandler(HANDLER_PENDING) + meta_workflow_run_steps_dict = getattr(meta_workflow_run_handler, "meta_workflow_run_steps_dict") + assert len(meta_workflow_run_steps_dict) == 4 + for mwf_name, mwf_run_step in meta_workflow_run_steps_dict.items(): + assert mwf_name in MWF_NAMES_LIST + assert isinstance(mwf_run_step, MetaWorkflowRunStep) + + @pytest.mark.parametrize( + "input_dict, updated_final_status", + [ + (HANDLER_PENDING, PENDING), + (HANDLER_STEPS_RUNNING, RUNNING), + (HANDLER_STEPS_RUNNING_2, RUNNING), + # (HANDLER_STEPS_RUNNING_3, RUNNING), + (HANDLER_FAILED, FAILED), + (HANDLER_FAILED_2, FAILED), + (HANDLER_STOPPED, STOPPED), + (HANDLER_COMPLETED, COMPLETED) + ] + ) + def test_update_final_status(self, input_dict, updated_final_status): + """ + Tests the updating of the final_status attribute of a Run Handler + based on the combination of MetaWorkflowRunStep object statuses. + """ + meta_workflow_run_handler = MetaWorkflowRunHandler(input_dict) + assert meta_workflow_run_handler.final_status == PENDING + meta_workflow_run_handler.update_final_status() + assert meta_workflow_run_handler.final_status == updated_final_status + + @pytest.mark.parametrize( + "input_dict, meta_workflow_run_name, step_dict", + [ + (HANDLER_PENDING, "A", MWFR_A_PENDING), + (HANDLER_PENDING, "non_existent_mwf_run_step", None) # fails because invalid name + ] + ) + def test_retrieve_meta_workflow_run_step_obj_by_name(self, input_dict, meta_workflow_run_name, step_dict): + """ + Tests the retrieval of a MetaWorkflowRunStep object by name. + """ + try: + meta_workflow_run_handler = MetaWorkflowRunHandler(input_dict) + result = meta_workflow_run_handler._retrieve_meta_workflow_run_step_obj_by_name(meta_workflow_run_name) + except KeyError as key_err_info: + assert meta_workflow_run_name in str(key_err_info) + else: + step = MetaWorkflowRunStep(step_dict) + assert type(result) == MetaWorkflowRunStep + assert result.__dict__ == step.__dict__ + + @pytest.mark.parametrize( + "input_dict, mwfr_step_name_to_access, attribute_to_fetch, expected_value", + [ + (HANDLER_COMPLETED, "A", "status", COMPLETED), + (HANDLER_COMPLETED, "A", "non_existent_attr", None) # fails because invalid attribute name + ] + ) + def test_get_meta_workflow_run_step_attr(self, input_dict, mwfr_step_name_to_access, attribute_to_fetch, expected_value): + """ + Tests the retrieval of a MetaWorkflowRunStep object's attribute. + """ + handler_obj = MetaWorkflowRunHandler(input_dict) + result = handler_obj.get_meta_workflow_run_step_attr(mwfr_step_name_to_access, attribute_to_fetch) + assert result == expected_value + + + @pytest.mark.parametrize( + "input_dict, mwfr_step_name_to_update, attribute, value", + [ + (HANDLER_COMPLETED, "A", "status", FAILED), + (HANDLER_COMPLETED, "non_existent_mwf_run_step", None, None) # fails because invalid name + ] + ) + def test_update_meta_workflow_run_step_obj(self, input_dict, mwfr_step_name_to_update, attribute, value): + """ + Tests the updating of a MetaWorkflowRunStep object' attribute with the provided value. + """ + try: + handler_obj = MetaWorkflowRunHandler(input_dict) + attr_value_before_change = getattr(handler_obj.meta_workflow_run_steps_dict[mwfr_step_name_to_update], attribute) + handler_obj.update_meta_workflow_run_step_obj(mwfr_step_name_to_update, attribute, value) + attr_value_after_change = getattr(handler_obj.meta_workflow_run_steps_dict[mwfr_step_name_to_update], attribute) + assert attr_value_before_change != attr_value_after_change + assert attr_value_after_change == value + except KeyError as key_err_info: + assert mwfr_step_name_to_update in str(key_err_info) + + @pytest.mark.parametrize( + "input_dict, steps_currently_pending", + [ + (HANDLER_PENDING, MWF_NAMES_LIST), + (HANDLER_STEPS_RUNNING, ["A", "D"]), + (HANDLER_STEPS_RUNNING_2, ["A", "D"]), + (HANDLER_FAILED, ["D"]), + (HANDLER_FAILED_2, []), + (HANDLER_COMPLETED, []) + ] + ) + def test_pending_steps(self, input_dict, steps_currently_pending): + """ + Tests the listing of MetaWorkflow Run names that are pending. + """ + handler_obj = MetaWorkflowRunHandler(input_dict) + result = handler_obj.pending_steps() + assert result == steps_currently_pending + + @pytest.mark.parametrize( + "input_dict, steps_currently_running", + [ + (HANDLER_PENDING, []), + (HANDLER_STEPS_RUNNING, ["B", "C"]), + (HANDLER_STEPS_RUNNING_2, ["C"]), + (HANDLER_FAILED, []), + (HANDLER_FAILED_2, ["D"]), + (HANDLER_COMPLETED, []) + ] + ) + def test_running_steps(self, input_dict, steps_currently_running): + """ + Tests the listing of MetaWorkflow Run names that are running. + """ + handler_obj = MetaWorkflowRunHandler(input_dict) + result = handler_obj.running_steps() + assert result == steps_currently_running + + + @pytest.mark.parametrize( + "input_dict, mwfr_steps_to_update, attrs_to_update, updated_values, expected_meta_workflow_runs_array", + [ + (HANDLER_STEPS_RUNNING, ["B", "B"], [STATUS, META_WORKFLOW_RUN], [COMPLETED, "a_link_to"], RUNNING_MWFR_ARRAY_2), + (HANDLER_FAILED, ["A", "D"], [ERROR, STATUS], ["error_message", RUNNING], HALFWAY_DONE_N_FAIL_ARRAY_2), + (HANDLER_STOPPED, ["A", "A"], [META_WORKFLOW_RUN, ERROR], ["another_link_to", "and_another_error_message"], HALFWAY_DONE_N_STOPPED_ARRAY_2) + ] + ) + def test_update_meta_workflow_runs_array(self, input_dict, mwfr_steps_to_update, attrs_to_update, updated_values, expected_meta_workflow_runs_array): + """ + Tests the updating of a meta_workflow_runs array based on + changed attributes of MetaWorkflowRunStep objects. + """ + handler_obj = MetaWorkflowRunHandler(input_dict) + # import pdb; pdb.set_trace() + for idx in range(len(mwfr_steps_to_update)): + handler_obj.update_meta_workflow_run_step_obj(mwfr_steps_to_update[idx], attrs_to_update[idx], updated_values[idx]) + + result = handler_obj.update_meta_workflow_runs_array() + assert result == expected_meta_workflow_runs_array \ No newline at end of file diff --git a/test/test_run_metawflrun_handler_ff.py b/test/test_run_metawflrun_handler_ff.py new file mode 100644 index 0000000..d1ae8e8 --- /dev/null +++ b/test/test_run_metawflrun_handler_ff.py @@ -0,0 +1,235 @@ +from contextlib import contextmanager +from test.utils import patch_context +from typing import Iterator, List, Any, Optional + +import mock +import pytest + + +from magma_ff.utils import JsonObject + +import magma_ff.run_metawflrun_handler as run_metaworkflow_run_handler_module +from magma_ff.run_metawflrun_handler import ( + ExecuteMetaWorkflowRunHandler, + execute_metawflrun_handler, +) + +from magma_ff.create_metawfr import ( + MetaWorkflowRunCreationError, +) + +from test.meta_workflow_run_handler_constants import * + + +META_WORKFLOW_RUN_HANDLER_UUID = "meta_workflow_run_handler_tester_uuid" +AUTH_KEY = {"server": "some_server"} + + +@contextmanager +def patch_patch_metadata(**kwargs) -> Iterator[mock.MagicMock]: + """Patch ff_utils.patch_metadata call within execute_metawflrun_handler function.""" + with patch_context( + run_metaworkflow_run_handler_module.ff_utils, "patch_metadata", **kwargs + ) as mock_item: + yield mock_item + + +@contextmanager +def patch_check_status(**kwargs) -> Iterator[mock.MagicMock]: + """Patch utils.check_status call within execute_metawflrun_handler function.""" + with patch_context( + run_metaworkflow_run_handler_module, "check_status", **kwargs + ) as mock_item: + yield mock_item + + +@contextmanager +def patch_make_embed_request(**kwargs) -> Iterator[mock.MagicMock]: + """Patch utils.make_embed_request call within make_embed_request function.""" + with patch_context( + run_metaworkflow_run_handler_module, "make_embed_request", **kwargs + ) as mock_item: + yield mock_item + + +@contextmanager +def patch_create_meta_workflow_run(**kwargs) -> Iterator[mock.MagicMock]: + """ + Patch magma_ff.create_metawfr.create_meta_workflow_run call + within ExecuteMetaWorkflowRunHandler class. + """ + with patch_context( + run_metaworkflow_run_handler_module, "create_meta_workflow_run", **kwargs + ) as mock_item: + yield mock_item + + +class TestExecuteMetaWorkflowRunHandler: + """Tests for methods/properties for ExecuteMetaWorkflowRunHandler class.""" + + @pytest.mark.parametrize( + "run_handler, pending_step_name, expected_result", + [ + (HANDLER_PENDING, "B", True), + (HANDLER_PENDING, "A", False), + (HANDLER_STEPS_RUNNING, "A", False), + (HANDLER_STEPS_RUNNING, "D", False), + (HANDLER_STEPS_RUNNING_2, "A", False), + (HANDLER_FAILED, "D", False), + (HANDLER_STOPPED, "D", False), + ], + ) + def test_check_pending_step_dependencies( + self, run_handler: JsonObject, pending_step_name: str, expected_result: bool + ) -> None: + """ + Tests the check of a mwfr step's dependencies, + and whether they are completed or not (checking status). + """ + execution_generator = ExecuteMetaWorkflowRunHandler(run_handler, AUTH_KEY) + result = execution_generator._check_pending_step_dependencies(pending_step_name) + assert result == expected_result + + @pytest.mark.parametrize( + "run_handler, pending_step_name, exception_expected", + [(HANDLER_PENDING, "B", False), (HANDLER_PENDING, "B", True)], + ) + def test_create_and_update_meta_workflow_run_step( + self, run_handler: JsonObject, pending_step_name: str, exception_expected: bool + ) -> None: + """Tests creation (and updates) of new metaworkflow run steps""" + with patch_create_meta_workflow_run() as mock_create_mwfr: + execution_generator = ExecuteMetaWorkflowRunHandler(run_handler, AUTH_KEY) + if not exception_expected: + mock_create_mwfr.return_value = TEST_MWFR_SIMPLE_GET_OUTPUT + execution_generator._create_and_update_meta_workflow_run_step( + pending_step_name + ) + assert ( + execution_generator.handler.get_meta_workflow_run_step_attr( + pending_step_name, META_WORKFLOW_RUN + ) + == TEST_MWFR_SIMPLE_GET_OUTPUT[UUID] + ) + assert ( + execution_generator.handler.get_meta_workflow_run_step_attr( + pending_step_name, STATUS + ) + == RUNNING + ) + else: + mock_create_mwfr.side_effect = MetaWorkflowRunCreationError("oops") + execution_generator._create_and_update_meta_workflow_run_step( + pending_step_name + ) + assert ( + execution_generator.handler.get_meta_workflow_run_step_attr( + pending_step_name, ERROR + ) + == "oops" + ) + assert ( + execution_generator.handler.get_meta_workflow_run_step_attr( + pending_step_name, STATUS + ) + == FAILED + ) + + @pytest.mark.parametrize( + "run_handler, orig_final_status, yielded_statuses, yielded_mwf_run_arrays", + [ + ( + HANDLER_PENDING, + PENDING, + [RUNNING, RUNNING, RUNNING, RUNNING], + [ + FIRST_STEP_RUNNING_ARRAY, + RUNNING_MWFR_ARRAY, + RUNNING_MWFR_ARRAY, + RUNNING_MWFR_ARRAY, + ], + ), + ( + HANDLER_STEPS_RUNNING, + RUNNING, + [RUNNING, RUNNING], + [RUNNING_MWFR_ARRAY, RUNNING_MWFR_ARRAY], + ), + ( + HANDLER_STEPS_RUNNING_2, + RUNNING, + [RUNNING, RUNNING], + [RUNNING_MWFR_ARRAY, RUNNING_MWFR_ARRAY], + ), + (HANDLER_FAILED, FAILED, [FAILED], [HALFWAY_DONE_N_FAIL_ARRAY]), + (HANDLER_FAILED_2, FAILED, [], []), + (HANDLER_STOPPED, STOPPED, [STOPPED], [HALFWAY_DONE_N_STOPPED_ARRAY]), + (HANDLER_COMPLETED, COMPLETED, [], []), + ], + ) + def test_generator_of_created_meta_workflow_run_steps( + self, + run_handler: JsonObject, + orig_final_status: str, + yielded_statuses: List[str], + yielded_mwf_run_arrays: List[List[Any]], + ) -> None: + """ + Tests generator of dictionaries used to PATCH created MetaWorkflow Runs + and the final status of the overall MetaWorkflow Run Handler. + """ + with patch_create_meta_workflow_run(return_value=TEST_MWFR_SIMPLE_GET_OUTPUT): + execution_generator = ExecuteMetaWorkflowRunHandler(run_handler, AUTH_KEY) + assert ( + getattr(execution_generator.handler, FINAL_STATUS) == orig_final_status + ) + patch_dict_generator = ( + execution_generator.generator_of_created_meta_workflow_run_steps() + ) + assert len(yielded_statuses) == len(list(patch_dict_generator)) + for idx, step in enumerate(patch_dict_generator): + assert step[FINAL_STATUS] == yielded_statuses[idx] + assert step[META_WORKFLOW_RUNS] == yielded_mwf_run_arrays[idx] + + +@pytest.mark.parametrize( + "run_handler_json, value_err_expected, status_valid, patch_metadata_calls", + [ + (None, True, True, 0), + (HANDLER_PENDING_COPY, False, False, 0), + (HANDLER_PENDING_COPY, False, True, 4), + ], +) +def test_execute_metawflrun_handler( + run_handler_json: Optional[JsonObject], + value_err_expected: bool, + status_valid: bool, + patch_metadata_calls: int, +) -> None: + """ + Tests wrapper function of generator of dictionaries used to PATCH + the Run Handler final status and created MetaWorkflow Runs. + Includes additional CGAP portal status checks. + """ + with patch_make_embed_request() as mock_embed_request: + with patch_check_status() as mock_check_status: + with patch_patch_metadata() as mock_patch_metadata: + with patch_create_meta_workflow_run( + return_value=TEST_MWFR_SIMPLE_GET_OUTPUT + ): + if value_err_expected: + mock_embed_request.return_value = None + with pytest.raises(ValueError) as val_err: + execute_metawflrun_handler(TESTER_UUID, AUTH_KEY) + assert TESTER_UUID in val_err + assert ( + mock_patch_metadata.call_count == patch_metadata_calls + ) + else: + mock_embed_request.return_value = run_handler_json + if not status_valid: + mock_check_status.return_value = False + else: + mock_check_status.return_value = True + execute_metawflrun_handler(TESTER_UUID, AUTH_KEY) + assert mock_patch_metadata.call_count == patch_metadata_calls diff --git a/test/test_topological_sort.py b/test/test_topological_sort.py new file mode 100644 index 0000000..6079db4 --- /dev/null +++ b/test/test_topological_sort.py @@ -0,0 +1,207 @@ +#!/usr/bin/env python3 + +################################################################# +# Libraries +################################################################# +import pytest + +from magma.metawfl_handler import MetaWorkflowStep +from magma.topological_sort import TopologicalSortHandler +from magma.magma_constants import * +from dcicutils.misc_utils import CycleError + +################################################################# +# Vars +################################################################# + +A_name = "A" +B_name = "B" +C_name = "C" +D_name = "D" +E_name = "E" + +# of the form [mwf_uuid, mwf_name] +# used for functions defined below to generate lists of dicts +# (steps with dependencies array) +MWF_A = ["test_mwf_uuid_0", A_name] +MWF_B = ["test_mwf_uuid_1", B_name] +MWF_C = ["test_mwf_uuid_2", C_name] +MWF_D = ["test_mwf_uuid_3", D_name] +MWF_E = ["test_mwf_uuid_4", E_name] + +A = [A_name] +B = [B_name] +C = [C_name] +D = [D_name] +E = [E_name] + +DEP_ON_A = [A] +DEP_ON_B = [B] +DEP_ON_C = [C] +DEP_ON_D = [D] +DEP_ON_E = [E] +DEP_EMPTY = [[]] + +THREE_MWF = [MWF_A, MWF_B, MWF_C] +FOUR_MWF = [MWF_A, MWF_B, MWF_C, MWF_D] +FIVE_MWF = [MWF_A, MWF_B, MWF_C, MWF_D, MWF_E] + + +def construct_array_of_meta_workflows(meta_workflow_metadata_list, dependencies_list): + """ + Function to constructs a list of lists for MetaWorkflow steps. + Used to generate dictionaries of MetaWorkflow steps in the + below function meta_workflow_dict. + + :param meta_workflow_metadata_list: list of the form [meta_workflow_linkTo, meta_workflow_name] + :type meta_workflow_metadata_list: list + :param dependencies_list: list of dependencies. Index-matched to meta_workflow_metadata_list + :type dependencies_list: list + :return: list of aggregated meta_workflows with their metadata needed for creation, + of the form [meta_workflow_linkTo_1, meta_workflow_name_1, [dependencies_1],...] + :rtype: list + """ + length = len(meta_workflow_metadata_list) + array_of_meta_workflows = [] + for idx in range(length): + array_of_meta_workflows.append(meta_workflow_metadata_list[idx] + dependencies_list[idx]) + return array_of_meta_workflows + + +# a meta_workflow_dict generator of sorts +def meta_workflow_dict(simple_meta_workflow_metadata_list): + """ + Constructs dictionary of MetaWorkflow Step metadata, given a list + of the metadata. + Attributes used here are based on MetaWorkflow Handler schema in CGAP portal. + + :param simple_meta_workflow_metadata_list: list of the form + [meta_workflow_linkTo, meta_workflow_name, [meta_workflow_dependencies]] + :type simple_meta_workflow_metadata_list: list + :return: dictionary representing a MetaWorkflow Step + :rtype: dict + """ + meta_workflow_dict = { + META_WORKFLOW: simple_meta_workflow_metadata_list[0], + NAME: simple_meta_workflow_metadata_list[1] + } + if len(simple_meta_workflow_metadata_list) == 3: + meta_workflow_dict[DEPENDENCIES] = simple_meta_workflow_metadata_list[2] + + # just to be able to create MetaWorkflowStep objects without error + meta_workflow_dict[ITEMS_FOR_CREATION_UUID] = "foo" + return meta_workflow_dict + +def create_input_meta_workflows_dict(array_of_meta_workflows): + """ + Returns simulation of meta_workflows dictionary of the form + {meta_workflow_name_1: MetaWorkflowStep object 1, ...} + (defined in a MetaWorkflow Handler) + + :param array_of_meta_workflows: list of the form + [[meta_workflow_linkTo_1, meta_workflow_name_1, [meta_workflow_1_dependencies]], ...] + :type array_of_meta_workflows: list + :return: dictionary of MetaWorkflow name-MetaWorkflowStep object key-value pairs + :rtype: dict + """ + input_meta_workflows_dict = {} + for meta_workflow_list in array_of_meta_workflows: + meta_workflow_dictionary = meta_workflow_dict(meta_workflow_list) + meta_workflow_name = meta_workflow_dictionary[NAME] + input_meta_workflows_dict[meta_workflow_name] = MetaWorkflowStep(meta_workflow_dictionary) + return input_meta_workflows_dict + + +# DAGs (directed acyclic graphs, can be typologically sorted) +# Dependency arrays are index-matched to a list of MetaWorkflow metadata +# See functions above for further detail +# ----------------------------------------------------------- +# DAG_0 +# A B -----> C +DEPENDENCIES_DAG_0 = [DEP_EMPTY, DEP_EMPTY, DEP_ON_B] +DAG_0 = construct_array_of_meta_workflows(THREE_MWF, DEPENDENCIES_DAG_0) + +# DAG_1 +# B -----> D +# | ⋀ ⋀ +# | / | +# ⋁ / | +# A <----- C +DEPENDENCIES_DAG_1 = [[B+C], DEP_EMPTY, DEP_EMPTY, [A+B+C]] +DAG_1 = construct_array_of_meta_workflows(FOUR_MWF, DEPENDENCIES_DAG_1) + + +# Cyclic graphs, cannot be typologically sorted +# ---------------------------------------------- +# CYCLIC_0 +# A B__ +# ⋀ \_____ +# | | +# | ⋁ +# D <----- C +DEPENDENCIES_CYCLIC_0 = [DEP_EMPTY, DEP_ON_D, DEP_ON_B, DEP_ON_C] +CYCLIC_0 = construct_array_of_meta_workflows(FOUR_MWF, DEPENDENCIES_CYCLIC_0) + +# CYCLIC_1 +# A -----> B ----> E +# ⋀ | ⋀ | +# | | \____| +# | ⋁ +# D <----- C +DEPENDENCIES_CYCLIC_1 = [DEP_ON_D, [A+E], DEP_ON_B, DEP_ON_C, DEP_ON_B] +CYCLIC_1 = construct_array_of_meta_workflows(FIVE_MWF, DEPENDENCIES_CYCLIC_1) + + +################################################################# +# Tests +################################################################# +class TestTopologicalSortHandler: + @pytest.mark.parametrize( + "array_of_meta_workflows, input_graph_to_topological_sort", + [ + (DAG_0, {A_name: {}, B_name: {}, C_name: {B_name}}), + (DAG_1, {A_name: {B_name, C_name}, B_name: {}, C_name: {}, D_name: {A_name, B_name, C_name}}), + (CYCLIC_0, {A_name: {}, B_name: {D_name}, C_name: {B_name}, D_name: {C_name}}) + ], + ) + def test_create_topo_sort_graph_input(self, array_of_meta_workflows, input_graph_to_topological_sort): + """ + Tests conversion of MetaWorkflow Steps dict from MetaWorkflow Handler to + appropriately formatted input graph for a TopologicalSorter object. + """ + # TODO: could make these next two lines a fxn because i reuse over and over + input_meta_workflow_dict = create_input_meta_workflows_dict(array_of_meta_workflows) + sorter = TopologicalSortHandler(input_meta_workflow_dict) + assert sorter.graph == input_graph_to_topological_sort + + @pytest.mark.parametrize( + "array_of_meta_workflows, possible_sorted_lists", + [ + (DAG_0, [[A_name, B_name, C_name], [B_name, A_name, C_name], [B_name, C_name, A_name]]), + (DAG_1, [[B_name, C_name, A_name, D_name], [C_name, B_name, A_name, D_name]]) + ], + ) + def test_sorted_graph_list(self, array_of_meta_workflows, possible_sorted_lists): + """ + Tests topological sorting of sortable MetaWorkflow steps. + """ + input_meta_workflow_dict = create_input_meta_workflows_dict(array_of_meta_workflows) + sorter = TopologicalSortHandler(input_meta_workflow_dict) + assert sorter.sorted_graph_list() in possible_sorted_lists + + @pytest.mark.parametrize( + "array_of_meta_workflows", + [ + (CYCLIC_0), (CYCLIC_1) + ], + ) + def test_sorted_graph_list_cycle_error(self, array_of_meta_workflows): + """ + Tests attempts to topologically sort MetaWorkflow steps with circular dependencies. + Raises CycleError. + """ + with pytest.raises(CycleError) as cycle_err_info: + input_meta_workflow_dict = create_input_meta_workflows_dict(array_of_meta_workflows) + sorter = TopologicalSortHandler(input_meta_workflow_dict) + sorter.sorted_graph_list() + assert "nodes are in a cycle" in str(cycle_err_info.value) \ No newline at end of file diff --git a/test/test_validated_dictionary.py b/test/test_validated_dictionary.py new file mode 100644 index 0000000..97a10bc --- /dev/null +++ b/test/test_validated_dictionary.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 + +################################################################# +# Libraries +################################################################# +import pytest + +from magma.validated_dictionary import ValidatedDictionary + +################################################################# +# Vars +################################################################# +EMPTY_INPUT_DICT = {} +SIMPLE_INPUT_DICT = {"attr_0": 0} +EXTENSIVE_INPUT_DICT = { + "attr_0": 0, + "attr_1": "foo", + "attr_2": False, + "attr_3": [0, 1, 2, 3], + "attr_4": { + "subattr_0": 0, + "subattr_1": "bar" + } +} + +EMPTY_VALIDATED_DICT = ValidatedDictionary(EMPTY_INPUT_DICT) +SIMPLE_VALIDATED_DICT = ValidatedDictionary(SIMPLE_INPUT_DICT) +EXTENSIVE_VALIDATED_DICT = ValidatedDictionary(EXTENSIVE_INPUT_DICT) + +################################################################# +# Tests +################################################################# +class TestValidatedDictionary: + @pytest.mark.parametrize( + "validated_dictionary_object, input_dict", + [ + (EMPTY_VALIDATED_DICT, EMPTY_INPUT_DICT), + (SIMPLE_VALIDATED_DICT, SIMPLE_INPUT_DICT), + (EXTENSIVE_VALIDATED_DICT, EXTENSIVE_INPUT_DICT) + ] + ) + def test_validated_dictionary_init(self, validated_dictionary_object, input_dict): + """ + Test of the __init__ function of the ValidatedDictionary class + """ + present_attributes = list(input_dict.keys()) + for attr in present_attributes: + assert hasattr(validated_dictionary_object, attr) == True + assert getattr(validated_dictionary_object, attr) == input_dict[attr] + + @pytest.mark.parametrize( + "validated_dictionary_object, attributes_to_check", + [ + (EMPTY_VALIDATED_DICT, ()), + (SIMPLE_VALIDATED_DICT, ("attr_0",)), + (EXTENSIVE_VALIDATED_DICT, ("attr_2",)), + (EXTENSIVE_VALIDATED_DICT, ("attr_0", "attr_1", "attr_2", "attr_3", "attr_4")) + ] + ) + def test_validate_basic_attributes_no_errors(self, validated_dictionary_object, attributes_to_check): + """ + Test for function checking that specified attributes are part of a given ValidatedDictionary object, + no errors raised. + """ + result = validated_dictionary_object._validate_basic_attributes(*attributes_to_check) + assert result is None + + @pytest.mark.parametrize( + "validated_dictionary_object, attributes_to_check", + [ + (EMPTY_VALIDATED_DICT, ("not_present", "also_not_present")), + (SIMPLE_VALIDATED_DICT, ("attr_0", "not_present")), + (EXTENSIVE_VALIDATED_DICT, ("attr_0", "attr_1", "not_present", "attr_2")) + ] + ) + def test_validate_basic_attributes_attribute_errors(self, validated_dictionary_object, attributes_to_check): + """ + Test for function checking that specified attributes are part of a given ValidatedDictionary object, + AttributeError raised. + """ + with pytest.raises(AttributeError) as attr_err_info: + validated_dictionary_object._validate_basic_attributes(*attributes_to_check) + assert "Object validation error" in str(attr_err_info.value) \ No newline at end of file diff --git a/test/test_wfrutils_ff.py b/test/test_wfrutils_ff.py new file mode 100644 index 0000000..d690017 --- /dev/null +++ b/test/test_wfrutils_ff.py @@ -0,0 +1,185 @@ +from contextlib import contextmanager +from test.utils import patch_context +from typing import Iterator # , List, Any, Optional +from requests.exceptions import HTTPError + +import mock +import pytest + +import magma_ff.wfrutils as wfrutils_module +from magma_ff.wfrutils import ( + # FFWfrUtils, + FFMetaWfrUtils, +) + +from magma.magma_constants import * +from magma_ff.utils import JsonObject + +# TODO: add to constants file? +TEST_MWFR_ID_A = "test_uuid_a" +TEST_MWFR_ID_B = "test_uuid_b" +AUTH_KEY = {"server": "some_server"} +RANDOM_COST = 34.56 + +MWFR_A_PORTAL_OBJ = {UUID: TEST_MWFR_ID_A, FINAL_STATUS: PENDING, COST: RANDOM_COST} + +MWFR_B_PORTAL_OBJ = {UUID: TEST_MWFR_ID_B, FINAL_STATUS: RUNNING} + +CACHE_WITH_MWFR = {TEST_MWFR_ID_B: MWFR_B_PORTAL_OBJ} + + +@contextmanager +def patch_get_metadata(**kwargs) -> Iterator[mock.MagicMock]: + """Patch ff_utils.get_metadata call within FFMetaWfrUtils class.""" + with patch_context(wfrutils_module.ff_utils, "get_metadata", **kwargs) as mock_item: + yield mock_item + + +@contextmanager +def patch_meta_workflow_runs_cache(**kwargs) -> Iterator[mock.MagicMock]: + """Patch _meta_workflow_runs_cache property within FFMetaWfrUtils class.""" + with patch_context( + wfrutils_module.FFMetaWfrUtils, + "_meta_workflow_runs_cache", + new_callable=mock.PropertyMock, + **kwargs + ) as mock_item: + yield mock_item + + +@contextmanager +def patch_retrieve_meta_workflow_run(**kwargs) -> Iterator[mock.MagicMock]: + """Patch _retrieve_meta_workflow_run method within FFMetaWfrUtils class.""" + with patch_context( + wfrutils_module.FFMetaWfrUtils, "_retrieve_meta_workflow_run", **kwargs + ) as mock_item: + yield mock_item + + +class TestFFMetaWfrUtils: + """Tests for methods/properties for FFMetaWfrUtils class.""" + + def test_meta_workflow_runs_cache(self) -> None: + """ + Tests updates to _meta_workflow_runs_cache property. + """ + meta_workflow_runs_retriever = FFMetaWfrUtils(AUTH_KEY) + assert meta_workflow_runs_retriever._meta_workflow_runs_cache == {} + meta_workflow_runs_retriever._meta_workflow_runs_cache[ + TEST_MWFR_ID_B + ] = MWFR_B_PORTAL_OBJ + assert meta_workflow_runs_retriever._meta_workflow_runs_cache == CACHE_WITH_MWFR + meta_workflow_runs_retriever._meta_workflow_runs_cache[ + TEST_MWFR_ID_A + ] = MWFR_A_PORTAL_OBJ + assert len(meta_workflow_runs_retriever._meta_workflow_runs_cache) == 2 + + @pytest.mark.parametrize( + "meta_workflow_run_identifier, meta_workflow_run, in_cache, get_request_exception, cache_calls", + [ + ( + TEST_MWFR_ID_A, + MWFR_A_PORTAL_OBJ, + False, + False, + 2, + ), # successful GET from portal + ( + TEST_MWFR_ID_A, + MWFR_A_PORTAL_OBJ, + False, + True, + 1, + ), # unsuccessful GET from portal + ( + TEST_MWFR_ID_B, + MWFR_B_PORTAL_OBJ, + True, + False, + 2, + ), # MWFR already in the cache + ], + ) + def test_retrieve_meta_workflow_run( + self, + meta_workflow_run_identifier: str, + meta_workflow_run: JsonObject, + in_cache: bool, + get_request_exception: bool, + cache_calls: int, + ) -> None: + """ + Tests retrieval of MetaWorkflow Runs from portal, and addition to cache. + """ + with patch_get_metadata() as mock_get_metadata: + with patch_meta_workflow_runs_cache() as mock_cache: + meta_workflow_runs_retriever = FFMetaWfrUtils(AUTH_KEY) + if in_cache: + mock_cache.return_value = CACHE_WITH_MWFR + result = meta_workflow_runs_retriever._retrieve_meta_workflow_run( + meta_workflow_run_identifier + ) + assert result == meta_workflow_run + mock_get_metadata.assert_not_called() + assert mock_cache.call_count == cache_calls + else: + if get_request_exception: + mock_get_metadata.side_effect = Exception("oops") + with pytest.raises(HTTPError): + meta_workflow_runs_retriever._retrieve_meta_workflow_run( + meta_workflow_run_identifier + ) + assert mock_cache.call_count == cache_calls + else: + mock_get_metadata.return_value = meta_workflow_run + result = ( + meta_workflow_runs_retriever._retrieve_meta_workflow_run( + meta_workflow_run_identifier + ) + ) + assert mock_cache.call_count == cache_calls + assert result == meta_workflow_run + + @pytest.mark.parametrize( + "meta_workflow_run_identifier, meta_workflow_run, expected_status", + [ + (TEST_MWFR_ID_A, MWFR_A_PORTAL_OBJ, PENDING), + (TEST_MWFR_ID_B, MWFR_B_PORTAL_OBJ, RUNNING) + ], + ) + def test_get_meta_workflow_run_status( + self, + meta_workflow_run_identifier: str, + meta_workflow_run: JsonObject, + expected_status: str + ) -> None: + """ + Tests retrieval of a MetaWorkflow Run's status attribute from portal. + """ + with patch_retrieve_meta_workflow_run(return_value=meta_workflow_run): + meta_workflow_runs_retriever = FFMetaWfrUtils(AUTH_KEY) + result = meta_workflow_runs_retriever.get_meta_workflow_run_status(meta_workflow_run_identifier) + assert result == expected_status + + @pytest.mark.parametrize( + "meta_workflow_run_identifier, meta_workflow_run, expected_cost", + [ + (TEST_MWFR_ID_A, MWFR_A_PORTAL_OBJ, RANDOM_COST), + (TEST_MWFR_ID_B, MWFR_B_PORTAL_OBJ, float(0)) + ], + ) + def test_get_meta_workflow_run_cost( + self, + meta_workflow_run_identifier: str, + meta_workflow_run: JsonObject, + expected_cost: float + ) -> None: + """ + Tests retrieval of a MetaWorkflow Run's cost attribute from portal. + """ + with patch_retrieve_meta_workflow_run(return_value=meta_workflow_run): + meta_workflow_runs_retriever = FFMetaWfrUtils(AUTH_KEY) + result = meta_workflow_runs_retriever.get_meta_workflow_run_cost(meta_workflow_run_identifier) + assert result == expected_cost + assert isinstance(result, float) + \ No newline at end of file diff --git a/test/tester.py b/test/tester.py new file mode 100644 index 0000000..eae4eb1 --- /dev/null +++ b/test/tester.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 + +### python3 -m test.tester + +################################################################# +# Libraries +################################################################# + +import mock +from contextlib import contextmanager + +from typing import Iterator + +from test.utils import patch_context +# from magma.magma_constants import * +import magma_ff.run_metawflrun_handler as run_metaworkflow_run_handler_module +from magma_ff.run_metawflrun_handler import ( + ExecuteMetaWorkflowRunHandler, + execute_metawflrun_handler, +) + +# import magma_ff.create_metawfr as create_metaworkflow_run_module +from magma_ff.create_metawfr import ( + create_meta_workflow_run, + MetaWorkflowRunCreationError, +) + +from test.meta_workflow_run_handler_constants import * + +from magma_ff.metawflrun_handler import MetaWorkflowRunHandler +from magma.metawflrun_handler import MetaWorkflowRunStep + +META_WORKFLOW_RUN_HANDLER_UUID = "meta_workflow_run_handler_tester_uuid" +AUTH_KEY = {"server": "some_server"} + +@contextmanager +def patch_get_metadata(**kwargs) -> Iterator[mock.MagicMock]: + """Patch ff_utils.get_metadata call within MetaWorkflowRunHAndlerFromItem class.""" + with patch_context( + create_metaworkflow_run_module.ff_utils, "get_metadata", **kwargs + ) as mock_item: + yield mock_item + + +@contextmanager +def patch_create_meta_workflow_run(**kwargs) -> Iterator[mock.MagicMock]: + """Patch ff_utils.post_metadata call within MetaWorkflowRunHAndlerFromItem class.""" + with patch_context( + run_metaworkflow_run_handler_module, "create_meta_workflow_run", **kwargs + ) as mock_item: + yield mock_item + + +@contextmanager +def patch_create_and_update_meta_workflow_run_step( + **kwargs, +) -> Iterator[mock.MagicMock]: + """Patch ff_utils.post_metadata call within MetaWorkflowRunHAndlerFromItem class.""" + with patch_context( + run_metaworkflow_run_handler_module, + "_create_and_update_meta_workflow_run_step", + **kwargs, + ) as mock_item: + yield mock_item + +############################ +if __name__ == "__main__": + + with patch_create_meta_workflow_run(return_value=TEST_MWFR_SIMPLE_GET_OUTPUT) as mock_create_mwfr: + execution_generator = ExecuteMetaWorkflowRunHandler( + HANDLER_PENDING, AUTH_KEY + ) + generatorr = execution_generator.generator_of_created_meta_workflow_run_steps() + # import pdb; pdb.set_trace() + for idx, step in enumerate(generatorr): + print(idx) + print(step) + print() \ No newline at end of file