From eb3417b97d8bee76bbdc97e01722dcc06f072e65 Mon Sep 17 00:00:00 2001 From: dkunhamb Date: Mon, 18 Aug 2025 09:53:10 -0500 Subject: [PATCH 1/3] draft --- mapdl-dpf/README.rst | 4 + mapdl-dpf/requirements_24.1.txt | 2 + mapdl-dpf/requirements_24.2.txt | 3 + mapdl-dpf/requirements_25.1.txt | 3 + mapdl-dpf/wf_mapdl-dpf.py | 350 ++++++++++++++++++++++++++++++++ 5 files changed, 362 insertions(+) create mode 100644 mapdl-dpf/README.rst create mode 100644 mapdl-dpf/requirements_24.1.txt create mode 100644 mapdl-dpf/requirements_24.2.txt create mode 100644 mapdl-dpf/requirements_25.1.txt create mode 100644 mapdl-dpf/wf_mapdl-dpf.py diff --git a/mapdl-dpf/README.rst b/mapdl-dpf/README.rst new file mode 100644 index 00000000..42f1490c --- /dev/null +++ b/mapdl-dpf/README.rst @@ -0,0 +1,4 @@ +TBD +=== + +TBD diff --git a/mapdl-dpf/requirements_24.1.txt b/mapdl-dpf/requirements_24.1.txt new file mode 100644 index 00000000..b328588a --- /dev/null +++ b/mapdl-dpf/requirements_24.1.txt @@ -0,0 +1,2 @@ + +ansys-dpf-core[plotting]==0.12.2 diff --git a/mapdl-dpf/requirements_24.2.txt b/mapdl-dpf/requirements_24.2.txt new file mode 100644 index 00000000..f431217a --- /dev/null +++ b/mapdl-dpf/requirements_24.2.txt @@ -0,0 +1,3 @@ +ansys-mechanical-core==0.11.10 +ansys-fluent-core==0.26.1 +matplotlib==3.10.0 diff --git a/mapdl-dpf/requirements_25.1.txt b/mapdl-dpf/requirements_25.1.txt new file mode 100644 index 00000000..15189f80 --- /dev/null +++ b/mapdl-dpf/requirements_25.1.txt @@ -0,0 +1,3 @@ + +ansys-dpf-core[plotting]==0.12.2 +ansys.mapdl.core==0.64.0 \ No newline at end of file diff --git a/mapdl-dpf/wf_mapdl-dpf.py b/mapdl-dpf/wf_mapdl-dpf.py new file mode 100644 index 00000000..a2bd1615 --- /dev/null +++ b/mapdl-dpf/wf_mapdl-dpf.py @@ -0,0 +1,350 @@ +# Copyright (C) 2024 - 2025 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +""" +.. _global-local_1: + +Consecutive submodeling with MAPDL pool +---------------------------- +Problem description: + - In this example we demonstrate how to use MAPDL pool to + perform a consecutive submodeling simulation. + +Analysis type: + - Static Analysis + +Material properties: + - Youngs modulus, :math:`E = 200 \, GPa` + - Poissons ratio, :math:`\mu = 0.3` + +Boundary conditions (global model): + - Fixed support applied at the bottom side + - Frictionless support applied at the right side + +Loading: + - Total displacement of –1 mm in the Y‑direction at the top surface, ramped linearly over 10 timesteps + +.. image:: ../_static/bvp.png + :width: 500 + :alt: Problem Sketch + +Modeling notes: + - At each timestep, the global model is solved with the specified boundary conditions; + the resulting nodal displacements are interpolated to the boundary nodes of the local model, using the DPF + interpolation operator. Those displacements are enforced as constraints to the local model, which is then solved + completing that timestep. +""" + +import os +import shutil +import time as tt + +from ansys.dpf import core as dpf +from ansys.mapdl.core import MapdlPool +import numpy as np + +############################################################################### +# Create directories to save the results +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +folders = ["./Output/Common", "./Output/Global", "./Output/Local"] +for fdr in folders: + try: + shutil.rmtree(fdr, ignore_errors=True) + os.makedirs(fdr) + except: + pass + +############################################################################### +# Create Mapdl pool +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# We use the ``MapdlPool`` class to create two separate instances — one dedicated to +# the global simulation and the other to the local simulation + +exec_file = "C:/Program Files/ANSYS Inc/v251/ansys/bin/winx64/ANSYS251.exe" +nCores = 2 +pool = MapdlPool(2, run_location="./Output/Common", nproc=nCores, exec_file=exec_file) + +############################################################################### +# Set up Global and Local FE models +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# We assign the instances to the local and global model, then use ``mapdl.cdread`` to load their geometry and mesh. +# Note the the .cdb files include named selections for the faces we want to apply the boundary conditions and the loads. +# The function ``define_BCs`` defines the global model’s boundary conditions and applied loads. +# The function ``Get_boundary`` is used to record the local model’s cut-boundary node coordinates as a dpf.field +# which will be later used in the DPF interpolator input + +mapdl_global = pool[0] # Global model +mapdl_global.cdread("db", "global.cdb") # Load global model +mapdl_global.cwd(os.getcwd() + "./Output/Global") # Set directory of the global model + +mapdl_local = pool[1] # Local model +mapdl_local.cdread("db", "local.cdb") # Load local model +mapdl_local.cwd(os.getcwd() + "./Output/Local") # Set directory of the local model + + +def define_BCs(mapdl): + # Enter PREP7 in MAPDL + mapdl.prep7() + + # In the .cdb file for the global model the bottom, the right and the top faces + # are saved as named selections + + # Fixed support + mapdl.cmsel("S", "BOTTOM_SIDE", "NODE") # Select bottom face + mapdl.d("ALL", "ALL") + mapdl.nsel("ALL") + + # Frictionless support + mapdl.cmsel("S", "RIGHT_SIDE", "NODE") # Select right face + mapdl.d("ALL", "UZ", "0") + mapdl.nsel("ALL") + + # Applied load + # Ramped Y‑direction displacement of –1 mm is applied on the top face over 10 time steps + mapdl.dim("LOAD", "TABLE", "3", "1", "1", "TIME", "", "", "0") + mapdl.taxis("LOAD(1)", "1", "0.", "1.", "10.") + mapdl.starset("LOAD(1,1,1)", "0.") + mapdl.starset("LOAD(2,1,1)", "-0.1") + mapdl.starset("LOAD(3,1,1)", "-1.") + + mapdl.cmsel("S", "TOP_SIDE", "NODE") # Select top face + mapdl.d("ALL", "UY", "%LOAD%") + mapdl.nsel("ALL") + + # Exit PREP7 + mapdl.finish() + pass + + +def Get_boundary(mapdl): + # Enter PREP7 in MAPDL + mapdl.prep7() + + # In the .cdb file for the local model the boundary faces are saved as + # named selections + + mapdl.nsel("all") + nodes = mapdl.mesh.nodes # All nodes + node_id_all = mapdl.mesh.nnum # All nodes ID + mapdl.cmsel("S", "boundary", "NODE") # Select all boundary faces + node_id_subset = mapdl.get_array("NODE", item1="NLIST").astype(int) # Boundary nodes ID + map_ = dict(zip(node_id_all, list(range(len(node_id_all))))) + + mapdl.nsel("NONE") + boundary_coordinates = dpf.fields_factory.create_3d_vector_field( + num_entities=len(node_id_subset), location="Nodal" + ) # Define DPF field for DPF interpolator input + + nsel = "" + for nid in node_id_subset: # Iterate boundary nodes of the local model + nsel += "nsel,A,NODE,,{}\n".format( + nid + ) # Add selection command for the node to the str (only for ploting) + boundary_coordinates.append(nodes[map_[nid]], nid) # Add node to the DPF field + + # Select all boundary nodes (only for ploting) + mapdl.input_strings(nsel) + + # Plot boundary nodes of the local model + mapdl.nplot(background="w", color="b", show_bounds=True, title="Constrained nodes") + + # Exit PREP7 + mapdl.finish() + return boundary_coordinates + + +# Define the boundary conditions and the loading for the global model +define_BCs(mapdl_global) + +# Get the DPF field with the boundary nodes of the local model +boundary_coords = Get_boundary(mapdl_local) + +############################################################################### +# Set up DPF operators +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# We define two dpf operators: the first reads the displacement results from the global model, +# and the second interpolates those displacements onto the boundary coordinates of the local model. +# The ``DataSources`` class to link results with the DPF operator inputs. + + +def define_dpf_operators(nCores): + # Define the DataSources class and link it to the results of the global model + dataSources = dpf.DataSources() + rst = r".\Output\Global\file{}.rst" + for i in range(nCores): + dataSources.set_domain_result_file_path(path=rst.format(i), key="rst", domain_id=i) + + global_model = dpf.Model(dataSources) + global_disp_op = ( + dpf.operators.result.displacement() + ) # Define displacement result operator to read nodal displacements + global_disp_op.inputs.data_sources.connect( + dataSources + ) # Connect displacement result operator with the global model's results file + disp_interpolator = ( + dpf.operators.mapping.on_coordinates() + ) # Define interpolator to interpolate the results inside the mesh elements with shape functions + return global_model, global_disp_op, disp_interpolator + + +def initialize_dpf_interpolator( + global_model, + local_Bc_coords, + disp_interpolator, +): + my_mesh = global_model.metadata.meshed_region # Global model's mesh + disp_interpolator.inputs.coordinates.connect( + local_Bc_coords + ) # Link interpolator inputs with the local model's boundary coordinates + disp_interpolator.inputs.mesh.connect( + my_mesh + ) # Link interpolator mesh with the global model's mesh + + +def interpolate_data(timestep): + global_disp_op.inputs.time_scoping.connect( + [timestep] + ) # Specify timestep value to read results from + global_disp = ( + global_disp_op.outputs.fields_container.get_data() + ) # Read global nodal displacements + + disp_interpolator.inputs.fields_container.connect( + global_disp + ) # Link the interpolation data with the interpolator + local_disp = disp_interpolator.outputs.fields_container.get_data()[ + 0 + ] # Get displacements of the boundary nodes of the local model + return local_disp + + +# Define the two dpf operators +global_model, global_disp_op, disp_interpolator = define_dpf_operators(nCores) + +############################################################################### +# Set up simulation loop +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# We solve the two models sequentially for each loading step. First the global model is run producing +# a .rst results file. Then we extract the global displacements and use them to define +# cut-boundary conditions for the local model (an input string command will be used for faster excecution time). + + +def define_cut_boundary_constraint_template(local_Bc_coords): + # Define template of input string command to apply the displacement constraints + local_nids = local_Bc_coords.scoping.ids # Get Node ID of boundary nodes of the local model + template = "" + for nid in local_nids: + template += ( + "d," + + str(nid) + + ",ux,{:1.6e}\nd," + + str(nid) + + ",uy,{:1.6e}\nd," + + str(nid) + + ",uz,{:1.6e}\n" + ) + return template + + +def solve_global_local(mapdl_global, mapdl_local, timesteps, local_Bc_coords): + + # Enter solution processor + mapdl_global.solution() + mapdl_local.solution() + + # Static analysis + mapdl_global.antype("STATIC") + mapdl_local.antype("STATIC") + + constraint_template = define_cut_boundary_constraint_template(local_Bc_coords) + + for i in range(1, timesteps + 1): # Iterate timesteps + print(f"Timestep: {i}") + st = tt.time() + # Set loadstep time for the global model + mapdl_global.time(i) + # No extrapolation + mapdl_global.eresx("NO") + mapdl_global.allsel("ALL") + # Write ALL results to database + mapdl_global.outres("ALL", "ALL") + # Solve global model + mapdl_global.solve() + print("Global solve took ", tt.time() - st) + + # Initialize interpolator + if i == 1: + initialize_dpf_interpolator(global_model, local_Bc_coords, disp_interpolator) + # Read & Interpolate displacement data + local_disp = interpolate_data(timestep=i) + # Run MAPDL input string command to apply the displacement constraints + data_array = np.array(local_disp.data).flatten() + mapdl_local.input_strings(constraint_template.format(*data_array)) + + st = tt.time() + mapdl_local.allsel("ALL") + # Set loadstep time for the local model + mapdl_local.time(i) + # No extrapolation + mapdl_local.eresx("NO") + # Write ALL results to database + mapdl_local.outres("ALL", "ALL") + # Solve local model + mapdl_local.solve() + print("Local solve took ", tt.time() - st) + + # Exit solution processor + mapdl_global.finish() + mapdl_local.finish() + + +############################################################################### +# Solve system +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +n_steps = 10 # Number of timesteps +solve_global_local(mapdl_global, mapdl_local, n_steps, boundary_coords) + +############################################################################### +# Visualize results +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +def visualize(mapdl): + # Enter post-processing + mapdl.post1() + # Set the current results set to the last set to be read from result file + mapdl.set("LAST") + # Plot nodal displacement of the loading direction + mapdl.post_processing.plot_nodal_displacement("Y", cmap="jet", background="w", cpos="zy") + # Exit post-processing + mapdl.finish() + + +# Plot Y displacement of global model +visualize(mapdl_global) + +# Plot Y displacement of local model +visualize(mapdl_local) + +# Exit MAPDL pool instances +pool.exit() From 498eb2d1dcfa7811daeba823e6dabf5a87ee26b0 Mon Sep 17 00:00:00 2001 From: dkunhamb Date: Mon, 18 Aug 2025 10:15:45 -0500 Subject: [PATCH 2/3] draft --- .github/workflows/docs.yml | 12 +- .github/workflows/mapdl-dpf.yml | 232 ++++++++++++++++++++++++++++++++ 2 files changed, 241 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/mapdl-dpf.yml diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 55551e0f..f15732ae 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -14,23 +14,29 @@ jobs: uses: ./.github/workflows/geometry-mesh.yml secrets: inherit with: - doc-build: true + doc-build: false geometry-mesh-fluent: uses: ./.github/workflows/geometry-mesh-fluent.yml secrets: inherit with: - doc-build: true + doc-build: false geometry-mechanical-dpf: uses: ./.github/workflows/geometry-mechanical-dpf.yml secrets: inherit with: - doc-build: true + doc-build: false fluent-mechanical: uses: ./.github/workflows/fluent-mechanical.yml secrets: inherit + with: + doc-build: false + + mapdl-dpf: + uses: ./.github/workflows/mapdl-dpf.yml + secrets: inherit with: doc-build: true diff --git a/.github/workflows/mapdl-dpf.yml b/.github/workflows/mapdl-dpf.yml new file mode 100644 index 00000000..8a7240b4 --- /dev/null +++ b/.github/workflows/mapdl-dpf.yml @@ -0,0 +1,232 @@ +name: MAPDL-DPF Workflow + +on: + workflow_dispatch: + inputs: + doc-build: + required: false + default: false + type: boolean + description: 'Whether to build the documentation' + workflow_call: + inputs: + doc-build: + required: false + default: false + type: boolean + description: 'Whether to build the documentation' + push: + branches: + - main + pull_request: + paths: + - 'fluent-mechanical/**' + +env: + MAIN_PYTHON_VERSION: '3.12' + FLUENT_DOCKER_IMAGE: 'ghcr.io/ansys/pyfluent' + MECHANICAL_DOCKER_IMAGE: 'ghcr.io/ansys/mechanical' + DOCKER_MECH_CONTAINER_NAME: mechanical + PYMECHANICAL_PORT: 10000 + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER )}} + PYANSYS_WORKFLOWS_CI: true + ANSYS_RELEASE_FOR_DOCS: 25.1 + RUN_DOC_BUILD: false + PYMECHANICAL_START_INSTANCE: false + +jobs: + fluent: + name: Fluent + runs-on: public-ubuntu-latest-8-cores + strategy: + fail-fast: false + matrix: + ansys-release: [24.1, 24.2, 25.1] + steps: + + - name: Checkout code + uses: actions/checkout@v4 + with: + sparse-checkout: | + fluent-mechanical + doc + + - name: Set up Python ${{ env.MAIN_PYTHON_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r fluent-mechanical/requirements_${{ matrix.ansys-release }}.txt + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Download Fluent service container + run: docker pull ${{ env.FLUENT_DOCKER_IMAGE }}:v${{ matrix.ansys-release }}.0 + + - name: Run the Fluent script + env: + FLUENT_IMAGE_TAG: v${{ matrix.ansys-release }}.0 + run: | + python fluent-mechanical/wf_fm_01_fluent.py + + - name: Store the outputs + uses: actions/upload-artifact@v4 + with: + name: fluent-mechanical-workflow-fluent-outputs-${{ matrix.ansys-release }} + path: | + fluent-mechanical/outputs/htc_temp_mapping_LOW_TEMP.csv + fluent-mechanical/outputs/htc_temp_mapping_MEDIUM_TEMP.csv + fluent-mechanical/outputs/htc_temp_mapping_HIGH_TEMP.csv + + - name: Stop all containers (if any) + run: | + if [ -n "$(docker ps -a -q)" ]; then + docker rm -f $(docker ps -a -q) + fi + + - name: (DOCS) Check if docs should be built + if: (github.event_name == 'workflow_dispatch' || github.event_name == 'schedule') && inputs.doc-build + run: | + echo "Requested to build docs..." + if [ "${{ matrix.ansys-release }}" == "${{ env.ANSYS_RELEASE_FOR_DOCS }}" ]; then + echo "Building docs" + echo "RUN_DOC_BUILD=true" >> $GITHUB_ENV + else + echo "Not building docs - since not primary release" + echo "RUN_DOC_BUILD=false" >> $GITHUB_ENV + fi + + - name: (DOCS) Build the documentation (only on ${{ env.ANSYS_RELEASE_FOR_DOCS}}) + if: ${{ env.RUN_DOC_BUILD == 'true' }} + env: + FLUENT_IMAGE_TAG: v${{ matrix.ansys-release }}.0 + BUILD_DOCS_SCRIPT: 'fluent-mechanical/wf_fm_01_fluent.py' + run: | + cd doc + pip install -r requirements.txt + make html + + - name: (DOCS) Upload docs artifacts + if: ${{ env.RUN_DOC_BUILD == 'true' }} + uses: actions/upload-artifact@v4 + with: + name: fluent-mechanical-docs-stage-fluent + path: | + doc/_build/ + doc/source/examples/fluent-mechanical/ + overwrite: true + + mechanical: + name: Mechanical + runs-on: [public-ubuntu-latest-8-cores] + needs: fluent + strategy: + fail-fast: false + matrix: + ansys-release: [24.1, 24.2, 25.1] + steps: + + - name: Checkout code + uses: actions/checkout@v4 + with: + sparse-checkout: | + fluent-mechanical + doc + + - name: Set up Python ${{ env.MAIN_PYTHON_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y nodejs npm graphviz xvfb + npm install -g @mermaid-js/mermaid-cli + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m venv .venv + . .venv/bin/activate + pip install -r fluent-mechanical/requirements_${{ matrix.ansys-release }}.txt + + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Download (if needed) launch, and validate Mechanical service + env: + LICENSE_SERVER: ${{ secrets.LICENSE_SERVER }} + MECHANICAL_IMAGE: ${{ env.MECHANICAL_DOCKER_IMAGE }}:${{ matrix.ansys-release }}.0 + run: | + docker pull ${{ env.MECHANICAL_IMAGE }} + docker run --restart always --name ${{ env.DOCKER_MECH_CONTAINER_NAME }} -e ANSYSLMD_LICENSE_FILE=1055@${{ env.LICENSE_SERVER }} -p ${{ env.PYMECHANICAL_PORT }}:10000 ${{ env.MECHANICAL_IMAGE }} > log.txt & + grep -q 'WB Initialize Done' <(timeout 60 tail -f log.txt) + + - name: Check out the fluent outputs + uses: actions/download-artifact@v4 + with: + name: fluent-mechanical-workflow-fluent-outputs-${{ matrix.ansys-release }} + path: fluent-mechanical/outputs + + - name: Run the PyMechanical script + run: | + . .venv/bin/activate + xvfb-run python fluent-mechanical/wf_fm_02_mechanical.py + + - name: Store the outputs + uses: actions/upload-artifact@v4 + with: + name: fluent-mechanical-workflow-mechanical-outputs-${{ matrix.ansys-release }} + path: fluent-mechanical/outputs + + - name: (DOCS) Check if docs should be built + if: (github.event_name == 'workflow_dispatch' || github.event_name == 'schedule') && inputs.doc-build + run: | + echo "Requested to build docs..." + if [ "${{ matrix.ansys-release }}" = "${{ env.ANSYS_RELEASE_FOR_DOCS }}" ]; then + echo "Building docs" + echo "RUN_DOC_BUILD=true" >> $GITHUB_ENV + else + echo "Not building docs - since not primary release" + echo "RUN_DOC_BUILD=false" >> $GITHUB_ENV + fi + + - name: (DOCS) Download the docs artifacts + uses: actions/download-artifact@v4 + if: ${{ env.RUN_DOC_BUILD == 'true' }} + with: + name: fluent-mechanical-docs-stage-fluent + path: doc + + - name: (DOCS) Build the documentation (only on ${{ env.ANSYS_RELEASE_FOR_DOCS }}) + if: ${{ env.RUN_DOC_BUILD == 'true' }} + env: + BUILD_DOCS_SCRIPT: 'fluent-mechanical/wf_fm_02_mechanical.py' + run: | + . .venv/bin/activate + cd doc + pip install -r requirements.txt + xvfb-run make html + + - name: (DOCS) Upload docs artifacts + if: ${{ env.RUN_DOC_BUILD == 'true' }} + uses: actions/upload-artifact@v4 + with: + name: fluent-mechanical-docs + path: | + doc/_build/ + doc/source/examples/fluent-mechanical/ + overwrite: true \ No newline at end of file From 3a2fa8fb318f6b4085b2f7117779fd0fa94581d7 Mon Sep 17 00:00:00 2001 From: clatapie <78221213+clatapie@users.noreply.github.com> Date: Tue, 19 Aug 2025 18:09:17 +0200 Subject: [PATCH 3/3] feat: applying latest changes --- mapdl-dpf/requirements_24.1.txt | 4 +- mapdl-dpf/requirements_24.2.txt | 5 +- mapdl-dpf/requirements_25.1.txt | 5 +- mapdl-dpf/wf_mapdl-dpf.py | 307 +++++++++++++------------------- 4 files changed, 131 insertions(+), 190 deletions(-) diff --git a/mapdl-dpf/requirements_24.1.txt b/mapdl-dpf/requirements_24.1.txt index b328588a..cb53bc1b 100644 --- a/mapdl-dpf/requirements_24.1.txt +++ b/mapdl-dpf/requirements_24.1.txt @@ -1,2 +1,2 @@ - -ansys-dpf-core[plotting]==0.12.2 +ansys-mapdl-core[graphics]==0.70.2 +ansys-dpf-core==0.14.1 diff --git a/mapdl-dpf/requirements_24.2.txt b/mapdl-dpf/requirements_24.2.txt index f431217a..cb53bc1b 100644 --- a/mapdl-dpf/requirements_24.2.txt +++ b/mapdl-dpf/requirements_24.2.txt @@ -1,3 +1,2 @@ -ansys-mechanical-core==0.11.10 -ansys-fluent-core==0.26.1 -matplotlib==3.10.0 +ansys-mapdl-core[graphics]==0.70.2 +ansys-dpf-core==0.14.1 diff --git a/mapdl-dpf/requirements_25.1.txt b/mapdl-dpf/requirements_25.1.txt index 15189f80..cb53bc1b 100644 --- a/mapdl-dpf/requirements_25.1.txt +++ b/mapdl-dpf/requirements_25.1.txt @@ -1,3 +1,2 @@ - -ansys-dpf-core[plotting]==0.12.2 -ansys.mapdl.core==0.64.0 \ No newline at end of file +ansys-mapdl-core[graphics]==0.70.2 +ansys-dpf-core==0.14.1 diff --git a/mapdl-dpf/wf_mapdl-dpf.py b/mapdl-dpf/wf_mapdl-dpf.py index a2bd1615..dbb40381 100644 --- a/mapdl-dpf/wf_mapdl-dpf.py +++ b/mapdl-dpf/wf_mapdl-dpf.py @@ -1,24 +1,3 @@ -# Copyright (C) 2024 - 2025 ANSYS, Inc. and/or its affiliates. -# SPDX-License-Identifier: MIT -# -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. """ .. _global-local_1: @@ -26,7 +5,7 @@ Consecutive submodeling with MAPDL pool ---------------------------- Problem description: - - In this example we demonstrate how to use MAPDL pool to + - In this example we demonstrate how to use MAPDL pool to perform a consecutive submodeling simulation. Analysis type: @@ -37,36 +16,36 @@ - Poissons ratio, :math:`\mu = 0.3` Boundary conditions (global model): - - Fixed support applied at the bottom side + - Fixed support applied at the bottom side - Frictionless support applied at the right side Loading: - - Total displacement of –1 mm in the Y‑direction at the top surface, ramped linearly over 10 timesteps + - Total displacement of -1 mm in the Y-direction at the top surface, ramped linearly over 10 timesteps .. image:: ../_static/bvp.png :width: 500 :alt: Problem Sketch Modeling notes: - - At each timestep, the global model is solved with the specified boundary conditions; + - At each timestep, the global model is solved with the specified boundary conditions; the resulting nodal displacements are interpolated to the boundary nodes of the local model, using the DPF - interpolation operator. Those displacements are enforced as constraints to the local model, which is then solved + interpolation operator. Those displacements are enforced as constraints to the local model, which is then solved completing that timestep. """ +import numpy as np import os import shutil -import time as tt - -from ansys.dpf import core as dpf from ansys.mapdl.core import MapdlPool -import numpy as np +from ansys.dpf import core as dpf +import time as tt +from pathlib import Path ############################################################################### # Create directories to save the results # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -folders = ["./Output/Common", "./Output/Global", "./Output/Local"] +folders = [ './Output/Common' , './Output/Global' , './Output/Local' ] for fdr in folders: try: shutil.rmtree(fdr, ignore_errors=True) @@ -76,103 +55,99 @@ ############################################################################### # Create Mapdl pool -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# We use the ``MapdlPool`` class to create two separate instances — one dedicated to -# the global simulation and the other to the local simulation +# ~~~~~~~~~~~~~~~~~ +# We use the ``MapdlPool`` class to create two separate instances — one dedicated to +# the global simulation and the other to the local simulation -exec_file = "C:/Program Files/ANSYS Inc/v251/ansys/bin/winx64/ANSYS251.exe" -nCores = 2 -pool = MapdlPool(2, run_location="./Output/Common", nproc=nCores, exec_file=exec_file) +nCores = 2 # Number of cores to use +pool = MapdlPool(2, nproc=nCores) ############################################################################### # Set up Global and Local FE models -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # We assign the instances to the local and global model, then use ``mapdl.cdread`` to load their geometry and mesh. -# Note the the .cdb files include named selections for the faces we want to apply the boundary conditions and the loads. -# The function ``define_BCs`` defines the global model’s boundary conditions and applied loads. +# Note the the .cdb files include named selections for the faces we want to apply the boundary conditions and the loads. +# The function ``define_BCs`` defines the global model’s boundary conditions and applied loads. # The function ``Get_boundary`` is used to record the local model’s cut-boundary node coordinates as a dpf.field # which will be later used in the DPF interpolator input -mapdl_global = pool[0] # Global model -mapdl_global.cdread("db", "global.cdb") # Load global model -mapdl_global.cwd(os.getcwd() + "./Output/Global") # Set directory of the global model +cwd = Path.cwd() # Get current working directory + -mapdl_local = pool[1] # Local model -mapdl_local.cdread("db", "local.cdb") # Load local model -mapdl_local.cwd(os.getcwd() + "./Output/Local") # Set directory of the local model +mapdl_global = pool[0] # Global model +mapdl_global.cdread('db','global.cdb') # Load global model +mapdl_global.cwd(cwd/Path('Output/Global')) # Set directory of the global model +mapdl_local = pool[1] # Local model +mapdl_local.cdread('db','local.cdb') # Load local model +mapdl_local.cwd(cwd/Path("Output/Local")) # Set directory of the local model def define_BCs(mapdl): # Enter PREP7 in MAPDL mapdl.prep7() # In the .cdb file for the global model the bottom, the right and the top faces - # are saved as named selections + # are saved as named selections # Fixed support - mapdl.cmsel("S", "BOTTOM_SIDE", "NODE") # Select bottom face - mapdl.d("ALL", "ALL") - mapdl.nsel("ALL") + mapdl.cmsel('S','BOTTOM_SIDE','NODE') # Select bottom face + mapdl.d('ALL','ALL') + mapdl.nsel('ALL') # Frictionless support - mapdl.cmsel("S", "RIGHT_SIDE", "NODE") # Select right face - mapdl.d("ALL", "UZ", "0") - mapdl.nsel("ALL") + mapdl.cmsel('S','RIGHT_SIDE','NODE') # Select right face + mapdl.d('ALL','UZ','0') + mapdl.nsel('ALL') # Applied load # Ramped Y‑direction displacement of –1 mm is applied on the top face over 10 time steps - mapdl.dim("LOAD", "TABLE", "3", "1", "1", "TIME", "", "", "0") - mapdl.taxis("LOAD(1)", "1", "0.", "1.", "10.") - mapdl.starset("LOAD(1,1,1)", "0.") - mapdl.starset("LOAD(2,1,1)", "-0.1") - mapdl.starset("LOAD(3,1,1)", "-1.") - - mapdl.cmsel("S", "TOP_SIDE", "NODE") # Select top face - mapdl.d("ALL", "UY", "%LOAD%") - mapdl.nsel("ALL") - + mapdl.dim('LOAD','TABLE','3','1','1','TIME', '', '', '0') + mapdl.taxis('LOAD(1)','1','0.','1.','10.') + mapdl.starset('LOAD(1,1,1)','0.') + mapdl.starset('LOAD(2,1,1)','-0.1') + mapdl.starset('LOAD(3,1,1)','-1.') + + mapdl.cmsel('S','TOP_SIDE','NODE') # Select top face + mapdl.d('ALL','UY','%LOAD%') + mapdl.nsel('ALL') + # Exit PREP7 mapdl.finish() pass - def Get_boundary(mapdl): # Enter PREP7 in MAPDL mapdl.prep7() - + # In the .cdb file for the local model the boundary faces are saved as # named selections - - mapdl.nsel("all") - nodes = mapdl.mesh.nodes # All nodes - node_id_all = mapdl.mesh.nnum # All nodes ID - mapdl.cmsel("S", "boundary", "NODE") # Select all boundary faces - node_id_subset = mapdl.get_array("NODE", item1="NLIST").astype(int) # Boundary nodes ID - map_ = dict(zip(node_id_all, list(range(len(node_id_all))))) - - mapdl.nsel("NONE") + + mapdl.nsel('all') + nodes = mapdl.mesh.nodes # All nodes + node_id_all = mapdl.mesh.nnum # All nodes ID + mapdl.cmsel('S','boundary','NODE') # Select all boundary faces + node_id_subset = mapdl.get_array('NODE', item1="NLIST").astype(int) # Boundary nodes ID + map_ = dict(zip(node_id_all,list(range(len(node_id_all))))) + + mapdl.nsel('NONE') boundary_coordinates = dpf.fields_factory.create_3d_vector_field( - num_entities=len(node_id_subset), location="Nodal" - ) # Define DPF field for DPF interpolator input - - nsel = "" - for nid in node_id_subset: # Iterate boundary nodes of the local model - nsel += "nsel,A,NODE,,{}\n".format( - nid - ) # Add selection command for the node to the str (only for ploting) - boundary_coordinates.append(nodes[map_[nid]], nid) # Add node to the DPF field + num_entities=len(node_id_subset),location='Nodal') # Define DPF field for DPF interpolator input + + nsel = '' + for nid in node_id_subset: # Iterate boundary nodes of the local model + nsel+='nsel,A,NODE,,{}\n'.format(nid) # Add selection command for the node to the str (only for ploting) + boundary_coordinates.append( nodes[ map_[nid] ] , nid ) # Add node to the DPF field # Select all boundary nodes (only for ploting) - mapdl.input_strings(nsel) - + mapdl.input_strings(nsel) + # Plot boundary nodes of the local model - mapdl.nplot(background="w", color="b", show_bounds=True, title="Constrained nodes") - + mapdl.nplot(background='w',color='b',show_bounds=True,title='Constrained nodes') + # Exit PREP7 mapdl.finish() return boundary_coordinates - # Define the boundary conditions and the loading for the global model define_BCs(mapdl_global) @@ -180,171 +155,139 @@ def Get_boundary(mapdl): boundary_coords = Get_boundary(mapdl_local) ############################################################################### -# Set up DPF operators +# Set up DPF operators # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# We define two dpf operators: the first reads the displacement results from the global model, +# We define two dpf operators: the first reads the displacement results from the global model, # and the second interpolates those displacements onto the boundary coordinates of the local model. # The ``DataSources`` class to link results with the DPF operator inputs. - def define_dpf_operators(nCores): # Define the DataSources class and link it to the results of the global model dataSources = dpf.DataSources() - rst = r".\Output\Global\file{}.rst" for i in range(nCores): - dataSources.set_domain_result_file_path(path=rst.format(i), key="rst", domain_id=i) - + dataSources.set_domain_result_file_path(path=Path(f"./Output/Global/file{i}.rst"), key='rst', domain_id=i) + global_model = dpf.Model(dataSources) - global_disp_op = ( - dpf.operators.result.displacement() - ) # Define displacement result operator to read nodal displacements - global_disp_op.inputs.data_sources.connect( - dataSources - ) # Connect displacement result operator with the global model's results file - disp_interpolator = ( - dpf.operators.mapping.on_coordinates() - ) # Define interpolator to interpolate the results inside the mesh elements with shape functions - return global_model, global_disp_op, disp_interpolator - - -def initialize_dpf_interpolator( - global_model, - local_Bc_coords, - disp_interpolator, -): - my_mesh = global_model.metadata.meshed_region # Global model's mesh - disp_interpolator.inputs.coordinates.connect( - local_Bc_coords - ) # Link interpolator inputs with the local model's boundary coordinates - disp_interpolator.inputs.mesh.connect( - my_mesh - ) # Link interpolator mesh with the global model's mesh - - + global_disp_op = dpf.operators.result.displacement() # Define displacement result operator to read nodal displacements + global_disp_op.inputs.data_sources.connect(dataSources) # Connect displacement result operator with the global model's results file + disp_interpolator = dpf.operators.mapping.on_coordinates() # Define interpolator to interpolate the results inside the mesh elements with shape functions + return global_model,global_disp_op,disp_interpolator + +def initialize_dpf_interpolator(global_model,local_Bc_coords,disp_interpolator,): + my_mesh = global_model.metadata.meshed_region # Global model's mesh + disp_interpolator.inputs.coordinates.connect(local_Bc_coords) # Link interpolator inputs with the local model's boundary coordinates + disp_interpolator.inputs.mesh.connect(my_mesh) # Link interpolator mesh with the global model's mesh + def interpolate_data(timestep): - global_disp_op.inputs.time_scoping.connect( - [timestep] - ) # Specify timestep value to read results from - global_disp = ( - global_disp_op.outputs.fields_container.get_data() - ) # Read global nodal displacements - - disp_interpolator.inputs.fields_container.connect( - global_disp - ) # Link the interpolation data with the interpolator - local_disp = disp_interpolator.outputs.fields_container.get_data()[ - 0 - ] # Get displacements of the boundary nodes of the local model + global_disp_op.inputs.time_scoping.connect([timestep]) # Specify timestep value to read results from + global_disp = global_disp_op.outputs.fields_container.get_data() # Read global nodal displacements + + disp_interpolator.inputs.fields_container.connect(global_disp) # Link the interpolation data with the interpolator + local_disp = disp_interpolator.outputs.fields_container.get_data()[0] # Get displacements of the boundary nodes of the local model return local_disp - # Define the two dpf operators -global_model, global_disp_op, disp_interpolator = define_dpf_operators(nCores) +global_model,global_disp_op,disp_interpolator = define_dpf_operators(nCores) ############################################################################### -# Set up simulation loop +# Set up simulation loop # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -# We solve the two models sequentially for each loading step. First the global model is run producing -# a .rst results file. Then we extract the global displacements and use them to define +# We solve the two models sequentially for each loading step. First the global model is run producing +# a .rst results file. Then we extract the global displacements and use them to define # cut-boundary conditions for the local model (an input string command will be used for faster excecution time). - def define_cut_boundary_constraint_template(local_Bc_coords): # Define template of input string command to apply the displacement constraints - local_nids = local_Bc_coords.scoping.ids # Get Node ID of boundary nodes of the local model - template = "" + local_nids = local_Bc_coords.scoping.ids # Get Node ID of boundary nodes of the local model + template = '' for nid in local_nids: - template += ( - "d," - + str(nid) - + ",ux,{:1.6e}\nd," - + str(nid) - + ",uy,{:1.6e}\nd," - + str(nid) - + ",uz,{:1.6e}\n" - ) + template += 'd,'+str(nid)+',ux,{:1.6e}\nd,'+str(nid)+',uy,{:1.6e}\nd,'+str(nid)+',uz,{:1.6e}\n' return template -def solve_global_local(mapdl_global, mapdl_local, timesteps, local_Bc_coords): +def solve_global_local(mapdl_global,mapdl_local,timesteps,local_Bc_coords): + # Enter solution processor mapdl_global.solution() mapdl_local.solution() # Static analysis - mapdl_global.antype("STATIC") - mapdl_local.antype("STATIC") - + mapdl_global.antype("STATIC") + mapdl_local.antype("STATIC") + constraint_template = define_cut_boundary_constraint_template(local_Bc_coords) - for i in range(1, timesteps + 1): # Iterate timesteps - print(f"Timestep: {i}") + for i in range(1,timesteps+1): # Iterate timesteps + print(f'Timestep: {i}') st = tt.time() # Set loadstep time for the global model mapdl_global.time(i) - # No extrapolation - mapdl_global.eresx("NO") - mapdl_global.allsel("ALL") + # No extrapolation + mapdl_global.eresx('NO') + mapdl_global.allsel('ALL') # Write ALL results to database - mapdl_global.outres("ALL", "ALL") + mapdl_global.outres('ALL','ALL') # Solve global model mapdl_global.solve() - print("Global solve took ", tt.time() - st) + print('Global solve took ' , tt.time()-st ) - # Initialize interpolator - if i == 1: - initialize_dpf_interpolator(global_model, local_Bc_coords, disp_interpolator) - # Read & Interpolate displacement data + #Initialize interpolator + if i==1: + initialize_dpf_interpolator(global_model,local_Bc_coords,disp_interpolator) + # Read & Interpolate displacement data local_disp = interpolate_data(timestep=i) - # Run MAPDL input string command to apply the displacement constraints + # Run MAPDL input string command to apply the displacement constraints data_array = np.array(local_disp.data).flatten() - mapdl_local.input_strings(constraint_template.format(*data_array)) + mapdl_local.input_strings(constraint_template.format( *data_array)) st = tt.time() - mapdl_local.allsel("ALL") + mapdl_local.allsel('ALL') # Set loadstep time for the local model mapdl_local.time(i) # No extrapolation - mapdl_local.eresx("NO") + mapdl_local.eresx('NO') # Write ALL results to database - mapdl_local.outres("ALL", "ALL") + mapdl_local.outres('ALL','ALL') # Solve local model mapdl_local.solve() - print("Local solve took ", tt.time() - st) - + print('Local solve took ' , tt.time()-st ) + # Exit solution processor mapdl_global.finish() mapdl_local.finish() - ############################################################################### -# Solve system -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -n_steps = 10 # Number of timesteps -solve_global_local(mapdl_global, mapdl_local, n_steps, boundary_coords) +# Solve system +# ~~~~~~~~~~~~ +n_steps = 10 # Number of timesteps +solve_global_local(mapdl_global,mapdl_local,n_steps,boundary_coords) ############################################################################### # Visualize results -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - +# ~~~~~~~~~~~~~~~~~ def visualize(mapdl): # Enter post-processing mapdl.post1() # Set the current results set to the last set to be read from result file - mapdl.set("LAST") + mapdl.set('LAST') # Plot nodal displacement of the loading direction - mapdl.post_processing.plot_nodal_displacement("Y", cmap="jet", background="w", cpos="zy") + mapdl.post_processing.plot_nodal_displacement( + "Y", + cmap="jet", + background='w', + cpos='zy' + ) # Exit post-processing mapdl.finish() - # Plot Y displacement of global model visualize(mapdl_global) # Plot Y displacement of local model visualize(mapdl_local) +############################################################################### # Exit MAPDL pool instances pool.exit()