diff --git a/GeniERuntimeExample/GeniERuntimeExample.ipynb b/GeniERuntimeExample/GeniERuntimeExample.ipynb index 78a2d90..6b19974 100644 --- a/GeniERuntimeExample/GeniERuntimeExample.ipynb +++ b/GeniERuntimeExample/GeniERuntimeExample.ipynb @@ -17,35 +17,25 @@ "metadata": {}, "outputs": [], "source": [ - "from pathlib import Path\n", - "from dnv.oneworkflow.utils.workunit_extension import *\n", - "from dnv.oneworkflow.utils.starter import *\n", - "from dnv.oneworkflow import OneWorkflowClient\n", + "from dnv.oneworkflow.utils import *\n", "from pathlib import Path\n", "import os\n", - "oneWorkflowTMPFolder = r'D:\\OneWorkflowTmp' #due to possible issues with long file paths we prefer to have this folder at the root\n", + "root_folder = os.getcwd()\n", + "workspacePath = str(Path(root_folder, 'Workspace'))\n", + "workspaceId = \"GeniERuntimeExample\"\n", + "\n", + "cloudRun = False\n", + "oneWorkflowTMPFolder = r'c:\\oneworkflowTmp' #due to possible issues with long file paths we prefer to have this folder at the root\n", "if not os.path.exists(oneWorkflowTMPFolder):\n", " try:\n", " print(\"Trying to create tmp folder for one workflow local execution\")\n", " os.mkdir(oneWorkflowTMPFolder)\n", + " print(oneWorkflowTMPFolder + \" created!\\n\")\n", " except:\n", " print(\"did not manage to create tmp folder for local execution. Check that you have privileges to create it or try to manually create it from the coomand line.\")\n", - "\n", - "workspaceId = \"GeniERuntimeExample\"\n", - "# local workspace, all results will be put here after local or cloud runs\n", - "# location of common files for all analysis, has to be below workspacePath and in the folder names CommonFilesr\n", - "root_folder = os.getcwd()\n", - "print(root_folder)\n", - "workspacePath = str(Path(root_folder, 'Workspace'))\n", - "cloudRun = False\n", "#If running locally the code below will also start the local workflow host.\n", - "workflow_client = one_workflow_client(workspace_id = workspaceId, workspace_path = workspacePath, cloud_run = cloudRun,\n", - " local_workflow_runtime_temp_folder_path = oneWorkflowTMPFolder, platform=Platform.WINDOWS, max_cores=5,auto_deploy_option = AutoDeployOption.DEV)\n", - "workflow_client.start_workflow_runtime_service()\n", - "if (cloudRun):\n", - " workflow_client.login()\n", - "if not workflow_client.upload_common_files(FileOptions(max_size=\"524MB\",patterns=[\"**/*.*\"], overwrite=True)):\n", - " print(\"Upload failed\")" + "workflow_client = one_workflow_client(workspace_id = workspaceId, cloud_run = cloudRun, workspace_path = workspacePath, local_workflow_runtime_temp_folder_path = oneWorkflowTMPFolder,\n", + " local_workflow_runtime_temp_folders_cleanup=False,environment=Environment.Testing,pool_id=\"SesamWorkflow_Windows_Standard_A1_v2\")\n" ] }, { @@ -55,9 +45,9 @@ "outputs": [], "source": [ "import pandas as pd\n", - "from SesamHelpers import *\n", "import shutil\n", - "import json\n", + "from dnv.oneworkflow import PythonCommand\n", + "from dnv.sesam.genie_runtime_command import *\n", "# we must delete existing results locally before generating new results\n", "local__result_path = Path(workspacePath, workflow_client.results_directory)\n", "if os.path.isdir(local__result_path):\n", @@ -65,33 +55,33 @@ "\n", "#parametrized values\n", "df = pd.DataFrame({'AP': [\"0m\", \"0.5m\", \"1m\"], 'FP': [\"150m\", \"250m\", \"500m\"]})\n", - "workUnit = GeniERuntimeTaskCreator(\"ContainerHull_template.js\", df,workflow_client).get_genieruntime_work_unit(cloudRun, workspacePath)\n", - "downloadOptions = FileOptions(\n", - " max_size=\"1112MB\",\n", - " patterns=[\"**/T1.FEM\", \"**/*.csv\"])\n", - "job = workflow_client.create_job(workUnit)\n", - "\n", "#for debugging only\n", "#job_json = json.dumps(job, default=lambda o: o.encode(), indent=4)\n", "#print(job_json)\n", - "await run_workflow_async(job, workflow_client, downloadOptions)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### CLose client -must be done before a new job can be started in a different notebook\n", - "Will remove all job and blob folders." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "workflow_client.local_workflow_runtime_service.stop_service()" + "\n", + "\n", + "commands_info = []\n", + "for index, row in df.iterrows():\n", + " loadcase_folder_name = f\"Model_{index + 1}\"\n", + " genieruntime_command = GeniERuntimeCommand()\n", + " genieruntime_command.Parameters = {}\n", + " genieruntime_command.TemplateInputFile = \"ContainerHull_template.js\"\n", + " for key, value in row.items():\n", + " genieruntime_command.Parameters[key] = value\n", + " \n", + " post_processing_command = PythonCommand(\n", + " directory=workflow_client.common_directory,\n", + " filename=\"postprocessing.py\")\n", + " cmd_info = CommandInfo(commands=[genieruntime_command,post_processing_command],load_case_foldername=loadcase_folder_name)\n", + " commands_info.append(cmd_info)\n", + " \n", + "print(\"Running commands in parallel\")\n", + "await run_managed_commands_parallelly_async(\n", + " client=workflow_client,\n", + " commands_info=commands_info,\n", + " files_to_download_from_blob_to_client=FileOptions(max_size=\"11124MB\",patterns=[\"**/*.txt\", \"**/*.lis\", \"**/*.MLG\", \"**/*.MLG\",\"**/*.CSV\",\"**/*.FEM\"]),\n", + " enable_common_files_copy_to_load_cases=True,\n", + ")\n" ] }, { diff --git a/GeniERuntimeExample/GeniERuntimeExample.py b/GeniERuntimeExample/GeniERuntimeExample.py new file mode 100644 index 0000000..33853d3 --- /dev/null +++ b/GeniERuntimeExample/GeniERuntimeExample.py @@ -0,0 +1,52 @@ + +from dnv.oneworkflow.utils.workunit_extension import * +from dnv.oneworkflow.utils.starter import * +from pathlib import Path +import os +import pandas as pd +from dnv.sesam.genie_runtime_command import * +root_folder = os.path.dirname(os.path.abspath(__file__)) +workspacePath = str(Path(root_folder, 'Workspace')) +workspaceId = "GeniERuntimeExample" + +cloudRun = False +oneWorkflowTMPFolder = r'd:\oneworkflowTmp' #due to possible issues with long file paths we prefer to have this folder at the root +if not os.path.exists(oneWorkflowTMPFolder): + try: + print("Trying to create tmp folder for one workflow local execution") + os.mkdir(oneWorkflowTMPFolder) + print(oneWorkflowTMPFolder + " created!\n") + except: + print("did not manage to create tmp folder for local execution. Check that you have privileges to create it or try to manually create it from the coomand line.") +#If running locally the code below will also start the local workflow host. +workflow_client = one_workflow_client(workspace_id = workspaceId, cloud_run = cloudRun, workspace_path = workspacePath, local_workflow_runtime_temp_folder_path = oneWorkflowTMPFolder, + local_workflow_runtime_temp_folders_cleanup=False,environment=Environment.Testing) +#parametrized values +df = pd.DataFrame({'AP': ["0m", "0.5m", "1m"], 'FP': ["150m", "250m", "500m"]}) +#for debugging only +#job_json = json.dumps(job, default=lambda o: o.encode(), indent=4) +#print(job_json) + + +commands_info = [] +for index, row in df.iterrows(): + loadcase_folder_name = f"Model_{index + 1}" + genieruntime_command = GeniERuntimeCommand() + genieruntime_command.Parameters = {} + genieruntime_command.TemplateInputFile = "ContainerHull_template.js" + for key, value in row.items(): + genieruntime_command.Parameters[key] = value + + post_processing_command = PythonCommand( + directory=workflow_client.common_directory, + filename="postprocessing.py") + cmd_info = CommandInfo(commands=[genieruntime_command, post_processing_command],load_case_foldername=loadcase_folder_name) + commands_info.append(cmd_info) + +print("Running commands in parallel") +asyncio.run(run_managed_commands_parallelly_async( + client=workflow_client, + commands_info=commands_info, + files_to_download_from_blob_to_client=FileOptions(max_size="11124MB",patterns=["**/*.txt", "**/*.lis", "**/*.MLG", "**/*.MLG","**/*.CSV"]), + enable_common_files_copy_to_load_cases=True, +)) \ No newline at end of file diff --git a/GeniERuntimeExample/SesamHelpers.py b/GeniERuntimeExample/SesamHelpers.py deleted file mode 100644 index 8a0f98d..0000000 --- a/GeniERuntimeExample/SesamHelpers.py +++ /dev/null @@ -1,57 +0,0 @@ - -import pandas as pd -from typing import Any -import os -from dnv.sesam.sima_command import SimaCommand -from dnv.onecompute.flowmodel import ParallelWork -from dnv.sesam.genie_runtime_command import * -from dnv.oneworkflow import PythonCommand, CompositeExecutableCommand -from dnv.onecompute import FileSpecification -from dnv.oneworkflow.oneworkflowclient import OneWorkflowClient -from dnv.onecompute.flowmodel import WorkUnit - - -class GeniERuntimeTaskCreator: - def __init__(self, template_input_file: str, data_frame : pd.DataFrame, workflow_client: OneWorkflowClient): - """ - A class for setting up a GeniERuntime analysis for local or cloud run including simple postprocessing using SifIO - Parameters: - template_input_file(str) : template file to be used. - data_frame(pd.DataFrame): Pandas dataframe containing values for the template file. - workflow_client(workflow_client) : is needed to provide information about relevant folders like common files, result files and workspace. - - """ - self.template_input_file = workflow_client.workspace_info - self.common_files_folder = workflow_client.common_directory - self.results_folder = workflow_client.results_directory - self.data = data_frame - - - - def get_genieruntime_work_unit(self, cloud_run : bool, full_path_to_workspace: str): - """Returns a parallel processing unit based on parameters given in templatefile. - """ - parallel_work = ParallelWork() - for index, row in self.data.iterrows(): - loadcase_folder_name = f"Model_{index + 1}" - result_folder_lc = os.path.join(self.results_folder, loadcase_folder_name) - genieruntime_command = GeniERuntimeCommand() - genieruntime_command.Parameters = {} - genieruntime_command.TemplateInputFile = "ContainerHull_template.js" - for key, value in row.items(): - genieruntime_command.Parameters[key] = value - - python_copy_command = PythonCommand( - directory=self.common_files_folder, - filename="copyfiles.py") - post_processing_command = PythonCommand( - directory=self.common_files_folder, - filename="postprocessing.py") - cmd = CompositeExecutableCommand([python_copy_command, genieruntime_command, post_processing_command], result_folder_lc) - - work_unit = ( - WorkUnit(cmd, work_unit_id=loadcase_folder_name) - .output_directory(result_folder_lc, include_files=["**/*.FEM","**/*.csv"]) - ) - parallel_work.WorkItems.append(work_unit) - return parallel_work \ No newline at end of file diff --git a/GeniERuntimeExample/Workspace/CommonFiles/postprocessing.py b/GeniERuntimeExample/Workspace/CommonFiles/postprocessing.py index 33f282e..f98be87 100644 --- a/GeniERuntimeExample/Workspace/CommonFiles/postprocessing.py +++ b/GeniERuntimeExample/Workspace/CommonFiles/postprocessing.py @@ -21,6 +21,7 @@ def write_node_element_count(): Reads the number of occurrences of a data type and the size of the established pointer table for a datatype. """ + print("Reading node and element count from T1.FEM file\n") with SesamDataFactory.CreateReader(".", 'T1.FEM') as reader: reader.CreateModel() all_data =[] diff --git a/SE28ExampleSimaWasimSestra/SimaExample.py b/SE28ExampleSimaWasimSestra/SimaExample.py new file mode 100644 index 0000000..e6611c6 --- /dev/null +++ b/SE28ExampleSimaWasimSestra/SimaExample.py @@ -0,0 +1,48 @@ +from pathlib import Path +import os +from dnv.oneworkflow.utils.workunit_extension import * +from dnv.oneworkflow.utils.starter import * +from SimaHelper import * +import json +root_folder = os.path.dirname(os.path.abspath(__file__)) +oneWorkflowTMPFolder = r'd:\oneworkflowTmp' #due to possible issues with long file paths we prefer to have this folder at the root +if not os.path.exists(oneWorkflowTMPFolder): + try: + print("Trying to create tmp folder for one workflow local execution") + os.mkdir(oneWorkflowTMPFolder) + print(oneWorkflowTMPFolder + " created!\n") + except: + print("did not manage to create tmp folder for local execution. Check that you have privileges to create it or try to manually create it from the coomand line.") + +# local workspace, all results will be put here after local or cloud runs +# location of common files for all analysis, has to be below workspacePath +print(root_folder) +workspacePath = str(Path(root_folder, 'Workspace')) +workspaceId = "SE28" +loadcase_file = f"{workspacePath}\\test_cases_light.xlsx" +wasim_input_file = "test_cases_wasim_input.xlsx" +stask_file = "SimaTemplateV2.stask" +cloudRun = False +notebook_root_folder = os.getcwd() + +workflow_client = one_workflow_client(workspace_id = workspaceId, cloud_run = cloudRun, workspace_path = workspacePath, local_workflow_runtime_temp_folder_path = oneWorkflowTMPFolder, + local_workflow_runtime_temp_folders_cleanup=False,environment=Environment.Testing) + +"""Tests SIMA and Python commands""" +# Upload Input Files +workflow_client.upload_input_files() + +#Sima path must be specified +sima_settings = SimaSettings(sima_exe_path=r'C:\Program Files\DNV\Sima V4.4-00') +sima_commands = SimaTaskCreator(sima_settings, workflow_client).get_sima_commands(loadcase_file, stask_file) + + + +print("Running commands in parallel") +asyncio.run(run_managed_commands_parallelly_async( + #log_job = True, + client=workflow_client, + commands_info=sima_commands, + files_to_download_from_blob_to_client=FileOptions(max_size="11124MB",patterns=["**/*.log","**/*.txt", "**/*.lis", "**/*.MLG", "**/*.MLG","**/*.CSV"]), + enable_common_files_copy_to_load_cases=True, +)) \ No newline at end of file diff --git a/SE28ExampleSimaWasimSestra/SimaHelper.py b/SE28ExampleSimaWasimSestra/SimaHelper.py index b6ec99f..8eefed4 100644 --- a/SE28ExampleSimaWasimSestra/SimaHelper.py +++ b/SE28ExampleSimaWasimSestra/SimaHelper.py @@ -3,11 +3,10 @@ from typing import Any import os from dnv.sesam.sima_command import SimaCommand -from dnv.onecompute.flowmodel import ParallelWork -from dnv.oneworkflow import PythonCommand from dnv.onecompute import FileSpecification from dnv.oneworkflow.oneworkflowclient import OneWorkflowClient - +from dnv.oneworkflow.utils.workunit_extension import * +from dnv.oneworkflow.utils.starter import * class SimaSettings: def __init__(self, sima_exe_path: str, result_files_to_keep=[ "*-sima.lis", @@ -53,7 +52,7 @@ def get_commands_inputs(self,stask_file: str, case: dict[str, Any]) -> dict[str, - def get_sima_work_unit(self, full_path_to_load_case_file: str, stask_file: str ,single_task: bool = False): + def get_sima_commands(self, full_path_to_load_case_file: str, stask_file: str ,single_task: bool = False): """Returns a parallel processing unit based on input given in an Excel file. Parameters: @@ -63,18 +62,13 @@ def get_sima_work_unit(self, full_path_to_load_case_file: str, stask_file: str , """ os.chdir(self.workspace.workspace_path) - load_cases_parent_folder_name = self.workspace.load_cases_parent_directory - - parallel_work = ParallelWork() - parallel_work.work_items.clear() - + commands_info = [] # Open environmental input file + index = 0 df_cases = pd.read_excel(full_path_to_load_case_file, index_col=0) for loadcase_folder_name, case in df_cases.iterrows(): - load_case_folder = os.path.join( - load_cases_parent_folder_name, loadcase_folder_name) - result_folder_lc = os.path.join( - self.workspace.results_directory, loadcase_folder_name) + index = index + 1 + loadcase_folder_name = f"loadcase_{index}" # Get SIMA commands and inputs commands_inputs = self.get_commands_inputs(stask_file, case.to_dict()) # Create SimaCommand instance @@ -82,11 +76,9 @@ def get_sima_work_unit(self, full_path_to_load_case_file: str, stask_file: str , sima_cmd.commands = commands_inputs["commands"] sima_cmd.input = commands_inputs["inputs"] sima_cmd.sima_result_files = self.sima_settings.result_files_to_keep - sima_cmd.working_directory = load_case_folder + #sima_cmd.working_directory = load_case_folder # Add work item to ParallelWork instance - parallel_work.add(sima_cmd, work_unit_id=loadcase_folder_name).output_directory(result_folder_lc, - include_files=self.sima_settings.result_files_to_keep) - if single_task == True: - break - return parallel_work \ No newline at end of file + cmd_info = CommandInfo(commands=[sima_cmd],load_case_foldername=loadcase_folder_name) + commands_info.append(cmd_info) + return commands_info \ No newline at end of file diff --git a/SE28ExampleSimaWasimSestra/SimaHelperOld.py b/SE28ExampleSimaWasimSestra/SimaHelperOld.py new file mode 100644 index 0000000..b6ec99f --- /dev/null +++ b/SE28ExampleSimaWasimSestra/SimaHelperOld.py @@ -0,0 +1,92 @@ + +import pandas as pd +from typing import Any +import os +from dnv.sesam.sima_command import SimaCommand +from dnv.onecompute.flowmodel import ParallelWork +from dnv.oneworkflow import PythonCommand +from dnv.onecompute import FileSpecification +from dnv.oneworkflow.oneworkflowclient import OneWorkflowClient + +class SimaSettings: + def __init__(self, sima_exe_path: str, result_files_to_keep=[ + "*-sima.lis", + "variable*.inp", + "*.log", + "results.tda", + "results.txt", + "sima_*.res", + "sys-sima.dat", + "sima_*.bin", + "key_sima_*.txt", + "sima.*"]): + self.sima_exe_path = sima_exe_path + self.result_files_to_keep = result_files_to_keep + + +class SimaTaskCreator: + def __init__(self, sima_settings: SimaSettings, workflow_client: OneWorkflowClient): + """ + A class for setting up a Sima analysis for local or cloud run. + Parameters: + sima_settings(SimaSettings) : contains basic information about path to Sima executable and which files to keep after a Sima run. + workflow_client(workflow_client) : is needed to provide information about relevant folders like common files, result files and workspace. + + """ + self.workspace = self.workspace = workflow_client.workspace_info + self.common_files_folder = workflow_client.common_directory + self.results_folder = workflow_client.results_directory + self.sima_settings = sima_settings + + def get_commands_inputs(self,stask_file: str, case: dict[str, Any]) -> dict[str, dict[str, Any]]: + commands = dict[str, Any]() + commands["--consoleLog"] = "" + commands["--log-level"] = "ALL" + commands["--data"] = "." + commands["--import"] = dict(file=FileSpecification(sharedfolder=True, + directory=self.common_files_folder, filename=stask_file)) + commands["--run"] = dict(task="WorkflowTask", + workflow="ExampleWorkflow") + + return {"commands": commands, "inputs": case} + + + + + def get_sima_work_unit(self, full_path_to_load_case_file: str, stask_file: str ,single_task: bool = False): + """Returns a parallel processing unit based on input given in an Excel file. + + Parameters: + full_path_to_load_case_file(str) : first row in the Excel file should contain the name of the loadcase, the other rows contains the variables to use with their variations. The name of the columns must match what is used with Sima. + stask_file(str) : the stask file to be used. I should be located in the common files folder. + single_task(bool): if set to True, the unit will only contain the first task. + + """ + os.chdir(self.workspace.workspace_path) + load_cases_parent_folder_name = self.workspace.load_cases_parent_directory + + parallel_work = ParallelWork() + parallel_work.work_items.clear() + + # Open environmental input file + df_cases = pd.read_excel(full_path_to_load_case_file, index_col=0) + for loadcase_folder_name, case in df_cases.iterrows(): + load_case_folder = os.path.join( + load_cases_parent_folder_name, loadcase_folder_name) + result_folder_lc = os.path.join( + self.workspace.results_directory, loadcase_folder_name) + # Get SIMA commands and inputs + commands_inputs = self.get_commands_inputs(stask_file, case.to_dict()) + # Create SimaCommand instance + sima_cmd = SimaCommand(self.sima_settings.sima_exe_path) + sima_cmd.commands = commands_inputs["commands"] + sima_cmd.input = commands_inputs["inputs"] + sima_cmd.sima_result_files = self.sima_settings.result_files_to_keep + sima_cmd.working_directory = load_case_folder + + # Add work item to ParallelWork instance + parallel_work.add(sima_cmd, work_unit_id=loadcase_folder_name).output_directory(result_folder_lc, + include_files=self.sima_settings.result_files_to_keep) + if single_task == True: + break + return parallel_work \ No newline at end of file diff --git a/SE28ExampleSimaWasimSestra/Workspace/CommonFiles/SimaTemplateV2.stask b/SE28ExampleSimaWasimSestra/Workspace/CommonFiles/SimaTemplateV2.stask new file mode 100644 index 0000000..814bc18 Binary files /dev/null and b/SE28ExampleSimaWasimSestra/Workspace/CommonFiles/SimaTemplateV2.stask differ diff --git a/SE28ExampleSimaWasimSestra/Workspace/test_cases_light.xlsx b/SE28ExampleSimaWasimSestra/Workspace/test_cases_light.xlsx new file mode 100644 index 0000000..e625c8d Binary files /dev/null and b/SE28ExampleSimaWasimSestra/Workspace/test_cases_light.xlsx differ diff --git a/SE28ExampleSimaWasimSestra/simaSimple.ipynb b/SE28ExampleSimaWasimSestra/simaSimple.ipynb new file mode 100644 index 0000000..5dd9820 --- /dev/null +++ b/SE28ExampleSimaWasimSestra/simaSimple.ipynb @@ -0,0 +1,322 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Introduction \n", + "\n", + "\n", + "## How to use this notebook\n", + "This example runs Sima in locally based on input created in the [Create load case folders with unique Sima input](#loadcase_id) section. The code is only a pilot and only intended for testing. \n", + "\n", + "This note bookshould be run in the following way:\n", + "1. [Installation](#installation) can be run to install all the relevant tools, like python modules and local worker executable\n", + "2. [Initialize Workflow](#initialize), run this Python code once at every notebook start to set up basic settings. [Set up custom user parameters](#custom) section should be changed if you want to change workspace or switch between cloud and local run. 2. If you changed something in this section, remember to rerun [Set up OneWorkflow client](#builder).\n", + "3. [Run analysis](#run) shall be run each time a new Sima analysis needs to be run.\n", + "4. [Run Wasim and Sestra](#runwasim) run Wasim and Sestra using results from above Sima run.\n", + "\n", + "### Postprocessing\n", + "It is now possible to read the SIN/FEM file using SifIO, please consult the [Python examples](https://test.pypi.org/project/dnv-sifio/) C# [documentation](https://sesam.dnv.com/dev/api/sifio/). This notebook provides a small example script for getting node coordinates and displacements for the 200 first nodes for loadcase 11, [postprocessing.py](Workspace/CommonFiles/postprocessing.py). The script will be run just after Wasim and Sestra for each load case, see the [Run Wasim and Sestra](#runwasim) section. The section [Post processing](#postprocessing) shows how the post processed result files may be read and visualized in this notebook. More Python examples will be provided later. Relevant additional documents are:\n", + "- The input interface file documentations can be found [here](https://sesam.dnv.com/download/windows64/sesam_input_interface_format.pdf)\n", + "- The results interface format [here](https://sesam.dnv.com/download/windows64/sesam_results_interface_format.pdf)\n", + "\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Initialize workflow \n", + "Run only once when notebook is opened." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set up custom user parameters \n", + "You need to set oneWorkflowTMPFolder to a folder that works for you. A short folder path is recommended due to possible issues with Sima and long file names." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "import os\n", + "\n", + "oneWorkflowTMPFolder = r'd:\\oneworkflowTmp' #due to possible issues with long file paths we prefer to have this folder at the root\n", + "if not os.path.exists(oneWorkflowTMPFolder):\n", + " try:\n", + " print(\"Trying to create tmp folder for one workflow local execution\")\n", + " os.mkdir(oneWorkflowTMPFolder)\n", + " print(oneWorkflowTMPFolder + \" created!\\n\")\n", + " except:\n", + " print(\"did not manage to create tmp folder for local execution. Check that you have privileges to create it or try to manually create it from the coomand line.\")\n", + "\n", + "# local workspace, all results will be put here after local or cloud runs\n", + "# location of common files for all analysis, has to be below workspacePath\n", + "root_folder = os.getcwd()\n", + "print(root_folder)\n", + "workspacePath = str(Path(root_folder, 'Workspace'))\n", + "workspaceId = \"SE28\"\n", + "loadcase_file = f\"{workspacePath}\\\\test_cases_light.xlsx\"\n", + "wasim_input_file = \"test_cases_wasim_input.xlsx\"\n", + "stask_file = \"SimaTemplateV2.stask\"\n", + "cloudRun = False\n", + "notebook_root_folder = os.getcwd()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set up OneWorkflow client \n", + "Run only once workbook is started or if some parameters above are changed." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from dnv.oneworkflow.utils.workunit_extension import *\n", + "from dnv.oneworkflow.utils.starter import *\n", + "\n", + "workflow_client = one_workflow_client(workspace_id = workspaceId, workspace_path = workspacePath, cloud_run = cloudRun,\n", + " local_workflow_runtime_temp_folder_path = oneWorkflowTMPFolder, platform=Platform.WINDOWS, max_cores=5,auto_deploy_option = AutoDeployOption.DEV)\n", + "workflow_client.start_workflow_runtime_service()\n", + "workspace = workflow_client.workspace_info\n", + "commonfiles_folder = workspace.common_files_directory\n", + "results_folder = workspace.results_directory\n", + "#If running locally the code below will also start the local workflow host.\n", + "if (cloudRun):\n", + " workflow_client.login()\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Upload common files for the job \n", + "This step uploads all common files in folder *commonFilesDirectory* to the job. Only needed to run if new common files are to be uploaded or workspace changed." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "from dnv.onecompute.directory_client import FileOptions\n", + "try:\n", + " workflow_client.upload_common_files(FileOptions(\n", + " # max_size_bytes=124_000,\n", + " #patterns=[\"**/*.py\",\"**/*.inp\"],\n", + " overwrite=True))\n", + "except Exception as e:\n", + " print(e)\n", + " print(\"Ignore this error message if the files are already present.\")\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Run Sima analysis \n", + "This code will fetch data from the blob storage created in the step above, and run all the job tasks. The code will wait for all tasks to complete before downloading the results." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from SimaHelperOld import *\n", + "import json\n", + "\"\"\"Tests SIMA and Python commands\"\"\"\n", + "# Upload Input Files\n", + "workflow_client.upload_input_files()\n", + "\n", + "#Sima path must be specified\n", + "sima_settings = SimaSettings(sima_exe_path=r'C:\\Program Files\\DNV\\Sima V4.4-00')\n", + "sima_work_unit = SimaTaskCreator(sima_settings, workflow_client).get_sima_work_unit(loadcase_file, stask_file)\n", + "\n", + "# Create Parallel Work Unit and Job\n", + "job = workflow_client.create_job(sima_work_unit)\n", + "job_json = json.dumps(job, default=lambda o: o.encode(), indent=4)\n", + "\n", + "#print(job_json)\n", + "# Run workflow\n", + "downloadOptions = FileOptions(\n", + " max_size=\"11124MB\",\n", + " patterns=[\"**/*.txt\", \"**/*.lis\", \"**/*.mlg\"])\n", + "await run_workflow_async(job, workflow_client, downloadOptions)\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Run Wasim and Sestra " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from dnv.onecompute.flowmodel import WorkUnit\n", + "from SesamUtilities import WasimAndSestraTaskCreator\n", + "import pandas as pd\n", + "from dnv.oneworkflow import ParallelWork\n", + "import json\n", + "import os\n", + "workspace.results_directory = \"Results\"\n", + "load_cases = [\"test001\", \"test002\"]\n", + "\n", + "os.chdir(workspacePath)\n", + "topSuperElement = 3\n", + "# due to 5 field width on Sestra cards we need to use as short name here\n", + "additionalTemplateParameters = {\n", + " 'FMT': topSuperElement, 'topsel': topSuperElement}\n", + "\n", + "def run_wasim_and_sestra_using_results_from_sima(\n", + " results_directory: str, load_cases: str) -> ParallelWork:\n", + " \"\"\"Creates a parallel work unit\"\"\"\n", + " df_cases = pd.read_excel(os.path.join(\n", + " workspacePath, wasim_input_file), index_col=0)\n", + " parallel_work_units = list[WorkUnit]()\n", + "\n", + " for casename, case in df_cases.iterrows():\n", + " if not casename in load_cases:\n", + " print(\"skipping \" + casename)\n", + " continue\n", + " \n", + " load_case_result_files_dir = os.path.join(results_directory, casename)\n", + " casedict = case.to_dict()\n", + " \n", + " cmd = WasimAndSestraTaskCreator(\n", + " load_case_result_files_dir, commonfiles_folder, casedict, additionalTemplateParameters).CreateTasks()\n", + " work_unit = (\n", + " WorkUnit(cmd, f\"post_rerun_{casename}\")\n", + " .input_directory(load_case_result_files_dir)\n", + " .output_directory(load_case_result_files_dir, include_files=[\"**/sima.*\", \"**/*.txt\", \"**/*.tda\", \"**/*.bin\", \"**/*.log\", \"**/*.inp\", \"**/*.lis\", \"**/*.mlg\", \"**/*.sin\"])\n", + " )\n", + " parallel_work_units.append(work_unit)\n", + "\n", + " return ParallelWork(parallel_work_units)\n", + "print(os.getcwd())\n", + "print(workflow_client.workspace_path)\n", + "\n", + "work_unit = run_wasim_and_sestra_using_results_from_sima(\n", + " workspace.results_directory, load_cases)\n", + "if not cloudRun:\n", + " workflow_client.upload_common_files()\n", + " workflow_client.upload_result_files()\n", + "\n", + "\n", + "downloadOptions = FileOptions(\n", + " min_size =0,\n", + " max_size= \"11124MB\",\n", + " patterns=[\"**/*.txt\", \"**/*.lis\", \"**/*.mlg\", \"**/*.sin\"])\n", + "await run_workflow_async(workflow_client.create_job(work_unit), workflow_client, downloadOptions)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "workflow_client.stop_workflow_runtime_service()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Post processing " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from IPython.display import display\n", + "import numpy as np\n", + "import pandas as pd\n", + "import glob\n", + "from ipywidgets import interactive\n", + "\n", + "lc = 11\n", + "dataFrames = {}\n", + "print(workspacePath)\n", + "for folder in glob.glob(f\"{workspacePath}\\\\Results\\\\*\"):\n", + " test_name = folder.split(\"\\\\\")[-1]\n", + " try:\n", + " data = np.loadtxt(f\"{folder}\\\\postprocessedresultsLC{lc}.txt\")\n", + " dispdata = {\n", + " \"x-coord\": data[:, 0],\n", + " \"total-disp\": np.sqrt(data[:, 3]**2+data[:, 4]**2+data[:, 5]**2)\n", + " }\n", + " dataFrames[test_name] = pd.DataFrame(dispdata)\n", + " except:\n", + " print(\"Failed loading data for test :\" + test_name)\n", + "\n", + "def multiplot(resultcase):\n", + " dataFrames[resultcase].plot(\n", + " x=\"x-coord\", y=\"total-disp\", kind=\"scatter\", figsize=(15, 15))\n", + "\n", + "\n", + "interactive_plot = interactive(multiplot, resultcase=dataFrames.keys())\n", + "interactive_plot\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.1" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "e3f4950b6ecc2246e9f46f113f926a9a268224216b0af4259a912530ba1db262" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/SE28ExampleSimaWasimSestra/workflowCoreDemoSE28SestraAndWasim.ipynb b/SE28ExampleSimaWasimSestra/workflowCoreDemoSE28SestraAndWasim.ipynb index 61131f9..fe0fdcc 100644 --- a/SE28ExampleSimaWasimSestra/workflowCoreDemoSE28SestraAndWasim.ipynb +++ b/SE28ExampleSimaWasimSestra/workflowCoreDemoSE28SestraAndWasim.ipynb @@ -44,17 +44,9 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "c:\\Users\\kblu\\source\\repos\\improveflowGIT\\SE28ExampleSimaWasimSestra\n" - ] - } - ], + "outputs": [], "source": [ "from pathlib import Path\n", "import os\n", @@ -92,64 +84,23 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 5, "metadata": {}, "outputs": [ { - "ename": "AttributeError", - "evalue": "'WindowsPath' object has no attribute 'strip'", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[1;32mc:\\Users\\kblu\\source\\repos\\improveflowGIT\\SE28ExampleSimaWasimSestra\\workflowCoreDemoSE28SestraAndWasim.ipynb Cell 6\u001b[0m line \u001b[0;36m4\n\u001b[0;32m 1\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mdnv\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39moneworkflow\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mutils\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mworkunit_extension\u001b[39;00m \u001b[39mimport\u001b[39;00m \u001b[39m*\u001b[39m\n\u001b[0;32m 2\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mdnv\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39moneworkflow\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mutils\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mstarter\u001b[39;00m \u001b[39mimport\u001b[39;00m \u001b[39m*\u001b[39m\n\u001b[1;32m----> 4\u001b[0m workflow_client \u001b[39m=\u001b[39m one_workflow_client(workspace_id \u001b[39m=\u001b[39;49m workspaceId, workspace_path \u001b[39m=\u001b[39;49m workspacePath, cloud_run \u001b[39m=\u001b[39;49m cloudRun,\n\u001b[0;32m 5\u001b[0m local_workflow_runtime_temp_folder_path \u001b[39m=\u001b[39;49m oneWorkflowTMPFolder, platform\u001b[39m=\u001b[39;49mPlatform\u001b[39m.\u001b[39;49mWINDOWS, max_cores\u001b[39m=\u001b[39;49m\u001b[39m5\u001b[39;49m,auto_deploy_option \u001b[39m=\u001b[39;49m AutoDeployOption\u001b[39m.\u001b[39;49mDEV)\n\u001b[0;32m 6\u001b[0m workflow_client\u001b[39m.\u001b[39mstart_workflow_runtime_service()\n\u001b[0;32m 7\u001b[0m workspace \u001b[39m=\u001b[39m workflow_client\u001b[39m.\u001b[39mworkspace_info\n", - "File \u001b[1;32mc:\\Users\\kblu\\AppData\\Local\\pypoetry\\Cache\\virtualenvs\\ow-X4wP4oR8-py3.10\\lib\\site-packages\\dnv\\oneworkflow\\utils\\starter.py:187\u001b[0m, in \u001b[0;36mone_workflow_client\u001b[1;34m(workspace_id, workspace_path, cloud_run, local_workflow_runtime_temp_folder_path, local_workflow_runtime_service_visible, platform, max_cores, debug_local_worker, local_worker_host_apps_path, local_workflow_runtime_path, auto_deploy_option, console_log_level, redirect_console_logs_to_terminal)\u001b[0m\n\u001b[0;32m 135\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mone_workflow_client\u001b[39m(\n\u001b[0;32m 136\u001b[0m workspace_id: \u001b[39mstr\u001b[39m,\n\u001b[0;32m 137\u001b[0m workspace_path: \u001b[39mstr\u001b[39m,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 148\u001b[0m redirect_console_logs_to_terminal: \u001b[39mbool\u001b[39m \u001b[39m=\u001b[39m \u001b[39mFalse\u001b[39;00m,\n\u001b[0;32m 149\u001b[0m ) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m OneWorkflowClient:\n\u001b[0;32m 150\u001b[0m \u001b[39m \u001b[39m\u001b[39m\"\"\"\u001b[39;00m\n\u001b[0;32m 151\u001b[0m \u001b[39m Creates and configures an instance of the OneWorkflowClient for interacting with the\u001b[39;00m\n\u001b[0;32m 152\u001b[0m \u001b[39m OneWorkflow system. The client can be used to manage and execute workflows. It is\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 185\u001b[0m \u001b[39m parameters.\u001b[39;00m\n\u001b[0;32m 186\u001b[0m \u001b[39m \"\"\"\u001b[39;00m\n\u001b[1;32m--> 187\u001b[0m workflow_client \u001b[39m=\u001b[39m OneWorkflowClient(\n\u001b[0;32m 188\u001b[0m local_workflow_runtime_temp_folder_path\u001b[39m=\u001b[39;49mlocal_workflow_runtime_temp_folder_path,\n\u001b[0;32m 189\u001b[0m local_workflow_runtime_service_visible\u001b[39m=\u001b[39;49mlocal_workflow_runtime_service_visible,\n\u001b[0;32m 190\u001b[0m cloud_run\u001b[39m=\u001b[39;49mcloud_run,\n\u001b[0;32m 191\u001b[0m workspace_id\u001b[39m=\u001b[39;49mworkspace_id,\n\u001b[0;32m 192\u001b[0m workspace_path\u001b[39m=\u001b[39;49mworkspace_path,\n\u001b[0;32m 193\u001b[0m environment\u001b[39m=\u001b[39;49mEnvironment\u001b[39m.\u001b[39;49mTesting,\n\u001b[0;32m 194\u001b[0m application_id\u001b[39m=\u001b[39;49moc_application_id(cloud_run, platform),\n\u001b[0;32m 195\u001b[0m executable_name\u001b[39m=\u001b[39;49mexecutable_name(cloud_run),\n\u001b[0;32m 196\u001b[0m local_worker_host_apps_path\u001b[39m=\u001b[39;49mlocal_worker_host_apps_path,\n\u001b[0;32m 197\u001b[0m local_workflow_runtime_path\u001b[39m=\u001b[39;49mlocal_workflow_runtime_path,\n\u001b[0;32m 198\u001b[0m debug_local_worker\u001b[39m=\u001b[39;49mdebug_local_worker,\n\u001b[0;32m 199\u001b[0m console_log_level\u001b[39m=\u001b[39;49mconsole_log_level,\n\u001b[0;32m 200\u001b[0m auto_deploy_option\u001b[39m=\u001b[39;49mauto_deploy_option,\n\u001b[0;32m 201\u001b[0m max_concurrent_workers\u001b[39m=\u001b[39;49mmax_cores,\n\u001b[0;32m 202\u001b[0m redirect_console_logs_to_terminal\u001b[39m=\u001b[39;49mredirect_console_logs_to_terminal,\n\u001b[0;32m 203\u001b[0m )\n\u001b[0;32m 204\u001b[0m \u001b[39mreturn\u001b[39;00m workflow_client\n", - "File \u001b[1;32mc:\\Users\\kblu\\AppData\\Local\\pypoetry\\Cache\\virtualenvs\\ow-X4wP4oR8-py3.10\\lib\\site-packages\\dnv\\oneworkflow\\oneworkflowclient.py:208\u001b[0m, in \u001b[0;36mOneWorkflowClient.__init__\u001b[1;34m(self, application_id, executable_name, workspace_id, workspace_path, common_directory, load_cases_directory, results_directory, local_workflow_runtime_endpoint, local_workflow_runtime_path, local_workflow_runtime_temp_folder_path, local_workflow_runtime_startup_wait_time, local_workflow_runtime_service_visible, local_workflow_runtime_log_filename, local_worker_host_apps_path, debug_local_worker, console_log_level, auto_deploy_option, max_concurrent_workers, cloud_run, environment, authenticator, pool_id, job_status_polling_interval, redirect_console_logs_to_terminal)\u001b[0m\n\u001b[0;32m 206\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_workspace_config: Optional[WorkspaceConfiguration] \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n\u001b[0;32m 207\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_worker_config: Optional[WorkerConfiguration] \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m--> 208\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m__post_init__()\n", - "File \u001b[1;32mc:\\Users\\kblu\\AppData\\Local\\pypoetry\\Cache\\virtualenvs\\ow-X4wP4oR8-py3.10\\lib\\site-packages\\dnv\\oneworkflow\\oneworkflowclient.py:218\u001b[0m, in \u001b[0;36mOneWorkflowClient.__post_init__\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 215\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mworkspace_id\u001b[39m.\u001b[39mstrip():\n\u001b[0;32m 216\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mworkspace_id \u001b[39m=\u001b[39m \u001b[39mstr\u001b[39m(uuid\u001b[39m.\u001b[39muuid4())\n\u001b[1;32m--> 218\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mworkspace_path\u001b[39m.\u001b[39;49mstrip():\n\u001b[0;32m 219\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mworkspace_path \u001b[39m=\u001b[39m os\u001b[39m.\u001b[39mgetcwd()\n\u001b[0;32m 221\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcommon_directory\u001b[39m.\u001b[39mstrip():\n", - "\u001b[1;31mAttributeError\u001b[0m: 'WindowsPath' object has no attribute 'strip'" + "name": "stdout", + "output_type": "stream", + "text": [ + "The temporary blob storage directory is: d:\\oneworkflowTmp\\oc_nov1vni5_blob\n", + "The temporary jobs root directory is: d:\\oneworkflowTmp\\oc_o4hq2wnp_jobs\n" ] } ], "source": [ "from dnv.oneworkflow.utils.workunit_extension import *\n", "from dnv.oneworkflow.utils.starter import *\n", - "\n", - "workflow_client = one_workflow_client(workspace_id = workspaceId, workspace_path = workspacePath, cloud_run = cloudRun,\n", - " local_workflow_runtime_temp_folder_path = oneWorkflowTMPFolder, platform=Platform.WINDOWS, max_cores=5,auto_deploy_option = AutoDeployOption.DEV)\n", - "workflow_client.start_workflow_runtime_service()\n", - "workspace = workflow_client.workspace_info\n", - "commonfiles_folder = workspace.common_files_directory\n", - "results_folder = workspace.results_directory\n", - "#If running locally the code below will also start the local workflow host.\n", - "if (cloudRun):\n", - " workflow_client.login()\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Upload common files for the job \n", - "This step uploads all common files in folder *commonFilesDirectory* to the job. Only needed to run if new common files are to be uploaded or workspace changed." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "from dnv.onecompute.directory_client import FileOptions\n", - "try:\n", - " workflow_client.upload_common_files(FileOptions(\n", - " # max_size_bytes=124_000,\n", - " #patterns=[\"**/*.py\",\"**/*.inp\"],\n", - " overwrite=True))\n", - "except Exception as e:\n", - " print(e)\n", - " print(\"Ignore this error message if the files are already present.\")\n" + "workflow_client = one_workflow_client(workspace_id = workspaceId, cloud_run = cloudRun, workspace_path = workspacePath, local_workflow_runtime_temp_folder_path = oneWorkflowTMPFolder,\n", + " local_workflow_runtime_temp_folders_cleanup=False,environment=Environment.Testing)\n" ] }, { @@ -175,18 +126,18 @@ "\n", "#Sima path must be specified\n", "sima_settings = SimaSettings(sima_exe_path=r'C:\\Program Files\\DNV\\Sima V4.4-00')\n", - "sima_work_unit = SimaTaskCreator(sima_settings, workflow_client).get_sima_work_unit(loadcase_file, stask_file)\n", + "sima_commands = SimaTaskCreator(sima_settings, workflow_client).get_sima_commands(loadcase_file, stask_file)\n", "\n", - "# Create Parallel Work Unit and Job\n", - "job = workflow_client.create_job(sima_work_unit)\n", - "job_json = json.dumps(job, default=lambda o: o.encode(), indent=4)\n", "\n", - "#print(job_json)\n", - "# Run workflow\n", - "downloadOptions = FileOptions(\n", - " max_size=\"11124MB\",\n", - " patterns=[\"**/*.txt\", \"**/*.lis\", \"**/*.mlg\"])\n", - "await run_workflow_async(job, workflow_client, downloadOptions)\n" + "\n", + "print(\"Running commands in parallel\")\n", + "await run_managed_commands_parallelly_async(\n", + " log_job = True,\n", + " client=workflow_client,\n", + " commands_info=sima_commands,\n", + " files_to_download_from_blob_to_client=FileOptions(max_size=\"11124MB\",patterns=[\"**/*.log\",\"**/*.txt\", \"**/*.lis\", \"**/*.MLG\", \"**/*.MLG\",\"**/*.CSV\",\"**/*.*\"]),\n", + " enable_common_files_copy_to_load_cases=True,\n", + ")" ] }, { diff --git a/SesamCoreExample/SesamCoreExample.ipynb b/SesamCoreExample/SesamCoreExample.ipynb index dba34b7..a417657 100644 --- a/SesamCoreExample/SesamCoreExample.ipynb +++ b/SesamCoreExample/SesamCoreExample.ipynb @@ -17,105 +17,310 @@ "source": [ "from dnv.oneworkflow.utils.workunit_extension import *\n", "from dnv.oneworkflow.utils.starter import *\n", - "from pathlib import Path\n", - "import os\n", - "await install_workflow_runtime(repository = PackageManager.Repository.DEV)" + "await install_workflow_runtime(repository = Repository.DEV)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The temporary blob storage directory is: d:\\oneworkflowTmp\\oc_nbrfab12_blob\n", + "The temporary jobs root directory is: d:\\oneworkflowTmp\\oc_sqw_ohhl_jobs\n" + ] + } + ], "source": [ "from dnv.oneworkflow.utils.workunit_extension import *\n", "from dnv.oneworkflow.utils.starter import *\n", - "from dnv.oneworkflow import OneWorkflowClient\n", "from pathlib import Path\n", "import os\n", - "oneWorkflowTMPFolder = r'c:\\OneWorkflowTMP' #due to possible issues with long file paths we prefer to have this folder at the root\n", + "root_folder = os.getcwd()\n", + "workspacePath = str(Path(root_folder, 'Workspace'))\n", + "workspaceId = \"SesamCoreExample\"\n", + "\n", + "cloudRun = False\n", + "oneWorkflowTMPFolder = r'd:\\oneworkflowTmp' #due to possible issues with long file paths we prefer to have this folder at the root\n", "if not os.path.exists(oneWorkflowTMPFolder):\n", " try:\n", " print(\"Trying to create tmp folder for one workflow local execution\")\n", " os.mkdir(oneWorkflowTMPFolder)\n", + " print(oneWorkflowTMPFolder + \" created!\\n\")\n", " except:\n", " print(\"did not manage to create tmp folder for local execution. Check that you have privileges to create it or try to manually create it from the coomand line.\")\n", - "\n", - "# local workspace, all results will be put here after local or cloud runs\n", - "# location of common files for all analysis, has to be below workspacePath and in the folder names CommonFiles\n", - "root_folder = os.getcwd()\n", - "workspacePath = str(Path(root_folder, 'Workspace'))\n", - "workspaceId = \"SesamCoreExample\"\n", - "cloudRun = False\n", "#If running locally the code below will also start the local workflow host.\n", - "workflow_client = one_workflow_client(workspace_id = workspaceId, workspace_path = workspacePath, cloud_run = cloudRun,\n", - " local_workflow_runtime_temp_folder_path = oneWorkflowTMPFolder, platform=Platform.WINDOWS, max_cores=5,auto_deploy_option = AutoDeployOption.DEV)\n", - "workflow_client.start_workflow_runtime_service()\n", - "if (cloudRun):\n", - " workflow_client.login()\n", - "upload_success = workflow_client.upload_common_files(FileOptions(max_size=\"524MB\",patterns=[\"**/*.*\"], overwrite=True))\n", - "\n", - "# max number of threads used when running locally\n" + "workflow_client = one_workflow_client(workspace_id = workspaceId, cloud_run = cloudRun, workspace_path = workspacePath, local_workflow_runtime_temp_folder_path = oneWorkflowTMPFolder,\n", + " local_workflow_runtime_temp_folders_cleanup=False,environment=Environment.Testing)\n" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Processing LoadCase1\n", + "Processing LoadCase2\n", + "Processing LoadCase3\n", + "Processing LoadCase4\n", + "Processing LoadCase5\n", + "Processing LoadCase6\n", + "Processing LoadCase7\n", + "Processing LoadCase8\n", + "Processing LoadCase9\n", + "Processing LoadCase10\n", + "Processing LoadCase11\n", + "Processing LoadCase12\n", + "Processing LoadCase13\n", + "Running commands in parallel\n", + "Info: Attempt 1/10: LocalWorkflowRuntime service is not ready yet. Retrying in 5 seconds.\n", + "Info: The LocalWorkflowRuntime service is ready.\n", + "Uploading d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\CommonFiles\\copyFiles.py to d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\CommonFiles\\copyFiles.py\n", + "Uploading d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\CommonFiles\\Specimen1_HotSpotPlate_SCORE-HOTSPOT.JNL to d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\CommonFiles\\Specimen1_HotSpotPlate_SCORE-HOTSPOT.JNL\n", + "Uploading d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\CommonFiles\\Specimen1_HotSpots.json to d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\CommonFiles\\Specimen1_HotSpots.json\n", + "Uploading d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\CommonFiles\\Specimen1_input.json to d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\CommonFiles\\Specimen1_input.json\n", + "Uploading d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\CommonFiles\\Specimen1_sestra.inp to d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\CommonFiles\\Specimen1_sestra.inp\n", + "Uploading d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\CommonFiles\\Specimen1_T1.FEM to d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\CommonFiles\\Specimen1_T1.FEM\n", + "Info: Directory 'd:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases' not specified or does not exist. Skipping files upload.\n", + "Info: The work item 8219911d-a113-4f20-baa7-72d7d255b88e message is 'Created'\n", + "Info: The work item LoadCase1 message is 'Created'\n", + "Info: The work item LoadCase2 message is 'Created'\n", + "Info: The work item LoadCase3 message is 'Created'\n", + "Info: The work item LoadCase4 message is 'Created'\n", + "Info: The work item LoadCase5 message is 'Created'\n", + "Info: The work item LoadCase6 message is 'Created'\n", + "Info: The work item LoadCase7 message is 'Created'\n", + "Info: The work item LoadCase8 message is 'Created'\n", + "Info: The work item LoadCase9 message is 'Created'\n", + "Info: The work item LoadCase10 message is 'Created'\n", + "Info: The work item LoadCase11 message is 'Created'\n", + "Info: The work item LoadCase12 message is 'Created'\n", + "Info: The work item LoadCase13 message is 'Created'\n", + "Info: The progress of the job is '0%'. The message is ''\n", + "Info: The work item LoadCase1 message is ''\n", + "Info: The work item LoadCase2 message is ''\n", + "Info: The work item LoadCase3 message is ''\n", + "Info: The work item LoadCase4 message is ''\n", + "Info: The work item LoadCase5 message is ''\n", + "Info: The work item LoadCase6 message is ''\n", + "Info: The work item LoadCase7 message is ''\n", + "Info: The work item LoadCase8 message is ''\n", + "Info: The work item LoadCase9 message is ''\n", + "Info: The work item LoadCase10 message is ''\n", + "Info: The work item LoadCase11 message is ''\n", + "Info: The work item LoadCase12 message is ''\n", + "Info: The work item LoadCase13 message is ''\n", + "Info: The status of work item '8219911d-a113-4f20-baa7-72d7d255b88e' is 'Completed'\n", + "Info: The status of work item 'LoadCase1' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase1\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase1\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/ to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase1\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase1\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase10\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase10\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase11\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase11\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase12\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase12\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase13\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase13\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase2\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase2\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase3\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase3\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase4\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase4\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase5\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase5\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase6\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase6\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase7\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase7\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase8\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase8\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase9\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase9\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase1\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase1\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase1\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase1\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase10\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase10\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase10\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase10\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase11\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase11\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase11\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase11\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase12\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase12\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase12\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase12\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase13\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase13\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase13\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase13\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase2\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase2\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase2\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase2\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase3\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase3\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase3\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase3\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase4\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase4\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase4\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase4\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase5\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase5\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase5\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase5\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase6\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase6\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase6\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase6\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase7\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase7\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase7\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase7\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase8\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase8\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase8\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase8\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase9\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase9\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase9\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase9\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase2' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase2\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase2\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/ to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase1\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase1\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase10\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase10\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase11\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase11\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase12\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase12\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase13\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase13\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase2\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase2\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase3\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase3\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase4\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase4\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase5\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase5\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase6\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase6\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase7\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase7\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase8\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase8\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase9\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase9\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase1\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase1\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase1\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase1\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase10\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase10\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase10\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase10\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase11\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase11\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase11\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase11\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase12\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase12\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase12\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase12\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase13\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase13\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase13\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase13\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase2\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase2\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase2\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase2\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase3\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase3\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase3\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase3\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase4\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase4\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase4\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase4\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase5\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase5\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase5\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase5\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase6\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase6\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase6\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase6\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase7\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase7\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase7\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase7\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase8\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase8\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase8\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase8\\SESTRA.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase9\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase9\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase9\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\\\LoadCase9\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase3' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase3\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase3\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase3\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase3\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase3 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase3\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase3\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase3\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase3\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase3\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase3\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase3\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase4' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase4\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase4\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase4\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase4\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase4 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase4\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase4\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase4\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase4\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase4\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase4\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase4\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase5' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase5\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase5\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase5\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase5\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase5 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase5\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase5\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase5\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase5\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase5\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase5\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase5\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase6' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase6\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase6\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase6\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase6\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase6 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase6\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase6\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase6\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase6\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase6\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase6\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase6\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase7' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase7\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase7\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase7\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase7\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase7 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase7\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase7\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase7\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase7\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase7\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase7\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase7\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase8' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase8\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase8\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase8\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase8\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase8 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase8\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase8\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase8\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase8\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase8\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase8\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase8\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase9' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase9\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase9\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase9\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase9\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase9 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase9\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase9\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase9\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase9\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase9\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase9\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase9\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase10' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase10\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase10\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase10\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase10\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase10 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase10\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase10\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase10\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase10\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase10\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase10\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase10\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase11' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase11\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase11\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase11\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase11\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase11 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase11\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase11\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase11\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase11\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase11\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase11\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase11\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase12' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase12\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase12\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase12\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase12\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase12 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase12\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase12\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase12\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase12\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase12\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase12\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase12\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: The status of work item 'LoadCase13' is 'Completed'\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase13\\stderr.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase13\\stderr.txt\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\Logs\\LoadCase13\\stdout.txt to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase13\\stdout.txt\n", + "Info: Downloading files from file:///d:/oneworkflowTmp/oc_nbrfab12_blob/SesamCoreExample/LoadCases/LoadCase13 to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase13\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase13\\SesamCore_Specimen1_HotSpotPlate.lis to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase13\\SesamCore_Specimen1_HotSpotPlate.lis\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase13\\SCORE.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase13\\SCORE.MLG\n", + "Downloading d:\\oneworkflowTmp\\oc_nbrfab12_blob\\SesamCoreExample\\LoadCases\\LoadCase13\\SESTRA.MLG to d:\\source\\improveflowH4-workflow\\SesamCoreExample\\Workspace\\LoadCases\\LoadCase13\\SESTRA.MLG\n", + "Info: Download completed.\n", + "Info: Job ID: a93ba4a4-b4c1-4ce6-88ed-52cc27774f7e. Job Status: Completed. Job Progress: 100%. Job Message: Job a93ba4a4-b4c1-4ce6-88ed-52cc27774f7e completed with status Completed.\n" + ] + } + ], "source": [ - "import pandas as pd\n", "import os\n", - "from dnv.oneworkflow import ParallelWork\n", - "from dnv.onecompute.flowmodel import WorkUnit\n", "from dnv.sesam.sesam_core_command import *\n", - "from dnv.oneworkflow import PythonCommand, CompositeExecutableCommand\n", - "import json\n", "import shutil\n", - "from dnv.oneworkflow.utils.workunit_extension import with_shared_files_copied_to_loadcase\n", - "\n", "# we must delete existing results locally before generating new results\n", "local__result_path = Path(workspacePath, workflow_client.results_directory)\n", "if os.path.isdir(local__result_path):\n", " shutil.rmtree(local__result_path) \n", "\n", - "parallel_work = ParallelWork()\n", + "commands_info = []\n", "for index in range(1,14): # iterating over two simple cases, they now will do the same analysis\n", " loadcase_folder_name = f\"LoadCase{index}\"\n", - " result_folder_lc = os.path.join(workflow_client.results_directory, loadcase_folder_name)\n", - " python_copy_command = PythonCommand(\n", - " directory=workflow_client.common_directory)\n", + " print(\"Processing \" + loadcase_folder_name)\n", " core_command = SesamCoreCommand(command = \"fatigue\",input_file_name= \"Specimen1_input.json\", options = \"-hs\")\n", - " cmd = CompositeExecutableCommand([core_command], result_folder_lc)\n", - " work_unit = (WorkUnit(cmd, loadcase_folder_name)\n", - " .output_directory(result_folder_lc, include_files=[\"**/*.txt\", \"**/*.MLG\", \"**/*.lis\", \"**/*.sin\", \"*.log\"])\n", - " .with_shared_files_copied_to_loadcase(workflow_client.common_directory, [\"**/*.py\"])\n", - " )\n", - " parallel_work.WorkItems.append(work_unit)\n", - "downloadOptions = FileOptions(max_size=\"10MB\",patterns=[\"**/*.txt\", \"**/*.lis\", \"**/*.MLG\"])\n", - "job = workflow_client.create_job(parallel_work)\n", - "#for debugging\n", - "#job_json = json.dumps(job, default=lambda o: o.encode(), indent=4)\n", - "#print(job_json)\n", - "await run_workflow_async(job, workflow_client, downloadOptions)\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### CLose client -must be done before a new job can be started in a different notebook\n", - "Will remove all job and blob folders." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "workflow_client.stop_workflow_runtime_service()" + " cmd_info = CommandInfo(commands=[core_command],load_case_foldername=loadcase_folder_name)\n", + " commands_info.append(cmd_info)\n", + "print(\"Running commands in parallel\")\n", + "await run_managed_commands_parallelly_async(\n", + " client=workflow_client,\n", + " commands_info=commands_info,\n", + " files_to_download_from_blob_to_client=FileOptions(max_size=\"11124MB\",patterns=[\"**/*.txt\", \"**/*.lis\", \"**/*.MLG\"]),\n", + " enable_common_files_copy_to_load_cases=True,\n", + " )" ] }, { diff --git a/SesamCoreExample/SesamcoreExample.py b/SesamCoreExample/SesamcoreExample.py new file mode 100644 index 0000000..da3d27d --- /dev/null +++ b/SesamCoreExample/SesamcoreExample.py @@ -0,0 +1,49 @@ +from pathlib import Path +import os +from dnv.oneworkflow.utils.workunit_extension import * +from dnv.oneworkflow.utils.starter import * +from dnv.onecompute.directory_client import FileOptions +from dnv.oneworkflow import ParallelWork +from dnv.onecompute.flowmodel import WorkUnit +from dnv.sesam.sesam_core_command import * +from dnv.oneworkflow import PythonCommand, CompositeExecutableCommand +import shutil +import json +# local workspace, all results will be put here after local or cloud runs +# location of common files for all analysis, has to be below workspacePath and in the folder names CommonFiles +root_folder = os.path.dirname(os.path.abspath(__file__)) +workspacePath = str(Path(root_folder, 'Workspace')) +workspaceId = "SesamCoreExample" + +cloudRun = False +oneWorkflowTMPFolder = r'd:\oneworkflowTmp' #due to possible issues with long file paths we prefer to have this folder at the root +if not os.path.exists(oneWorkflowTMPFolder): + try: + print("Trying to create tmp folder for one workflow local execution") + os.mkdir(oneWorkflowTMPFolder) + print(oneWorkflowTMPFolder + " created!\n") + except: + print("did not manage to create tmp folder for local execution. Check that you have privileges to create it or try to manually create it from the coomand line.") +#If running locally the code below will also start the local workflow host. +workflow_client = one_workflow_client(workspace_id = workspaceId, cloud_run = cloudRun, workspace_path = workspacePath, local_workflow_runtime_temp_folder_path = oneWorkflowTMPFolder, + local_workflow_runtime_temp_folders_cleanup=False,environment=Environment.Testing) +# we must delete existing results locally before generating new results +local__result_path = Path(workspacePath, workflow_client.results_directory) +print(local__result_path) +if os.path.isdir(local__result_path): + shutil.rmtree(local__result_path) +commands_info = [] +for index in range(1,14): # iterating over two simple cases, they now will do the same analysis + loadcase_folder_name = f"LoadCase{index}" + print("Processing " + loadcase_folder_name) + core_command = SesamCoreCommand(command = "fatigue",input_file_name= "Specimen1_input.json", options = "-hs") + cmd_info = CommandInfo(commands=[core_command],load_case_foldername=loadcase_folder_name) + commands_info.append(cmd_info) + +asyncio.run(run_managed_commands_parallelly_async( + client=workflow_client, + commands_info=commands_info, + files_to_download_from_blob_to_client=FileOptions(max_size="11124MB",patterns=["**/*.txt", "**/*.lis", "**/*.MLG", "**/*.*"]), + enable_common_files_copy_to_load_cases=True, + )) + \ No newline at end of file diff --git a/installation.ipynb b/installation.ipynb index 4e715c2..6bacb8e 100644 --- a/installation.ipynb +++ b/installation.ipynb @@ -164,7 +164,7 @@ "outputs": [], "source": [ "from dnv.oneworkflow.utils.starter import *\n", - "await install_workflow_runtime(repository = PackageManager.Repository.DEV)" + "await install_workflow_runtime(repository = Repository.DEV)\n" ] } ], diff --git a/pyproject.toml b/pyproject.toml index 60d5bcf..a7f2c10 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,9 +16,9 @@ dnv-net-runtime = "^1.0.0.39" dnv-net-intellisense = "^1.0.0.31" dnv-sifio = "^5.3.2.994" quantconnect-stubs = "^15851" -dnv-onecompute = "^8.3.0.22" -dnv-oneworkflow = "^1.0.0.939" -dnv-sesam-commands = "^5.12.0.192" +dnv-onecompute = "^8.4.0.88" +dnv-oneworkflow = "^1.0.0.1204" +dnv-sesam-commands = "^5.14.0.17" [tool.poetry.group.dev.dependencies] pytest = "^7.4.2"