diff --git a/.github/pseudo-cluster/reframe/docker-entrypoint.sh b/.github/pseudo-cluster/reframe/docker-entrypoint.sh
index 71eb08fc03..a01dba35f2 100755
--- a/.github/pseudo-cluster/reframe/docker-entrypoint.sh
+++ b/.github/pseudo-cluster/reframe/docker-entrypoint.sh
@@ -8,11 +8,13 @@ sudo service munge start
cp -r /usr/local/share/reframe .
cd reframe
./bootstrap.sh
-pip install pytest-cov
+pip install coverage
+source $HOME/.profile
echo "Running unittests with backend scheduler: ${BACKEND}"
tempdir=$(mktemp -d -p /scratch)
-TMPDIR=$tempdir ./test_reframe.py --cov=reframe --cov-report=xml \
+TMPDIR=$tempdir coverage run --source=reframe ./test_reframe.py \
--rfm-user-config=ci-scripts/configs/ci-cluster.py \
--rfm-user-system=pseudo-cluster:compute-${BACKEND:-squeue}
+coverage xml -o coverage.xml
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 3831e4257a..5bda327307 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13']
+ python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
@@ -19,25 +19,9 @@ jobs:
./bootstrap.sh
- name: Generic Unittests
run: |
- pip install pytest-cov
- ./test_reframe.py --cov=reframe --cov-report=xml
- - name: Upload coverage reports
- uses: codecov/codecov-action@v4.2.0
-
- unittest-py-eol:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- python-version: ['3.6', '3.7']
- steps:
- - uses: actions/checkout@v4
- - name: Build Image for Python ${{ matrix.python-version }}
- run: |
- docker build --build-arg PYTHON_VERSION=${{ matrix.python-version }} -f ci-scripts/dockerfiles/reframe-python.dockerfile -t reframe-python${{ matrix.python-version }}:latest .
- - name: Run Unittests
- run: |
- docker run --name reframe-python${{ matrix.python-version }} reframe-python${{ matrix.python-version }}:latest
- docker cp reframe-python${{ matrix.python-version }}:/home/rfmuser/reframe/coverage.xml .
+ pip install coverage
+ coverage run --source=reframe ./test_reframe.py
+ coverage xml -o coverage.xml
- name: Upload coverage reports
uses: codecov/codecov-action@v4.2.0
@@ -45,7 +29,7 @@ jobs:
runs-on: macos-latest
strategy:
matrix:
- python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13']
+ python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
@@ -57,8 +41,9 @@ jobs:
./bootstrap.sh
- name: Generic Unittests
run: |
- pip install pytest-cov
- ./test_reframe.py --cov=reframe --cov-report=xml
+ pip install coverage
+ coverage run --source=reframe ./test_reframe.py
+ coverage xml -o coverage.xml
- name: Upload coverage reports
uses: codecov/codecov-action@v4.2.0
@@ -66,7 +51,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- modules-version: [lmod, lmod77, tmod32, tmod4]
+ modules-version: [envmodules, lmod, spack]
steps:
- uses: actions/checkout@v4
- name: Login to GitHub Container Registry
@@ -121,7 +106,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13']
+ python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
steps:
- uses: actions/checkout@v4
- name: Setup up Python ${{ matrix.python-version }}
@@ -144,7 +129,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13']
+ python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
diff --git a/.github/workflows/test-flux.yaml b/.github/workflows/test-flux.yaml
index cd10473543..fbe6b0dd4d 100644
--- a/.github/workflows/test-flux.yaml
+++ b/.github/workflows/test-flux.yaml
@@ -10,7 +10,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- container: ['fluxrm/flux-sched:focal']
+ container: ['fluxrm/flux-sched:noble']
container:
image: ${{ matrix.container }}
@@ -30,7 +30,6 @@ jobs:
run: |
apt-get update && apt-get install -y python3-pip
./bootstrap.sh
- pip install pytest-cov
export PATH=$PWD/bin:$PATH
which reframe
@@ -41,6 +40,7 @@ jobs:
which reframe
flux start reframe -c examples/howto/flux -C examples/howto/flux/settings.py -l
flux start reframe -c examples/howto/flux -C examples/howto/flux/settings.py -r
- flux start python3 ./test_reframe.py --cov=reframe --cov-report=xml --rfm-user-config=examples/howto/flux/settings.py
+ flux start coverage run --source=reframe ./test_reframe.py --rfm-user-config=examples/howto/flux/settings.py
+ coverage xml -o coverage.xml
- name: Upload coverage reports
uses: codecov/codecov-action@v4.2.0
diff --git a/ci-scripts/configs/cscs-ci.py b/ci-scripts/configs/cscs-ci.py
deleted file mode 100644
index 221b58a1c9..0000000000
--- a/ci-scripts/configs/cscs-ci.py
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright 2016-2024 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
-# ReFrame Project Developers. See the top-level LICENSE file for details.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-
-#
-# CSCS CI settings
-#
-
-import reframe.utility.osext as osext
-
-
-site_configuration = {
- 'systems': [
- {
- 'name': 'daint',
- 'descr': 'Piz Daint CI nodes',
- 'hostnames': [
- 'daint'
- ],
- 'modules_system': 'tmod',
- 'resourcesdir': '/apps/common/UES/reframe/resources',
- 'partitions': [
- {
- 'name': 'gpu',
- 'scheduler': 'slurm',
- 'time_limit': '10m',
- 'access': [
- '--constraint=gpu',
- '--partition=cscsci',
- f'--account={osext.osgroup()}'
- ],
- 'environs': [
- 'builtin'
- ],
- 'descr': 'Hybrid nodes (Haswell/P100)',
- 'max_jobs': 100,
- 'resources': [
- {
- 'name': 'switches',
- 'options': [
- '--switches={num_switches}'
- ]
- }
- ],
- 'launcher': 'srun'
- }
- ]
- },
- {
- 'name': 'dom',
- 'descr': 'Dom TDS',
- 'hostnames': [
- 'dom'
- ],
- 'modules_system': 'tmod',
- 'resourcesdir': '/apps/common/UES/reframe/resources',
- 'partitions': [
- {
- 'name': 'slurm',
- 'scheduler': 'slurm',
- 'time_limit': '10m',
- 'access': [
- '--constraint=gpu',
- f'--account={osext.osgroup()}'
- ],
- 'environs': [
- 'builtin'
- ],
- 'descr': 'Hybrid nodes (Haswell/P100)',
- 'max_jobs': 100,
- 'resources': [
- {
- 'name': 'switches',
- 'options': [
- '--switches={num_switches}'
- ]
- }
- ],
- 'launcher': 'srun'
- },
- {
- 'name': 'pbs',
- 'scheduler': 'pbs',
- 'time_limit': '10m',
- 'access': [
- 'proc=gpu',
- f'-A {osext.osgroup()}'
- ],
- 'environs': [
- 'builtin'
- ],
- 'descr': 'Hybrid nodes (Haswell/P100)',
- 'max_jobs': 100,
- 'launcher': 'mpiexec'
- },
- {
- 'name': 'torque',
- 'scheduler': 'torque',
- 'time_limit': '10m',
- 'access': [
- '-l proc=gpu',
- f'-A {osext.osgroup()}'
- ],
- 'environs': [
- 'builtin'
- ],
- 'descr': 'Hybrid nodes (Haswell/P100)',
- 'max_jobs': 100,
- 'launcher': 'mpiexec'
- }
- ]
- },
- {
- 'name': 'tsa',
- 'descr': 'Tsa MCH',
- 'hostnames': [
- r'tsa-\w+\d+'
- ],
- 'modules_system': 'tmod',
- 'resourcesdir': '/apps/common/UES/reframe/resources',
- 'partitions': [
- {
- 'name': 'cn',
- 'scheduler': 'slurm',
- 'access': [
- '--partition=cn-regression'
- ],
- 'environs': [
- 'builtin'
- ],
- 'descr': 'Tsa compute nodes',
- 'max_jobs': 20,
- 'resources': [
- {
- 'name': '_rfm_gpu',
- 'options': [
- '--gres=gpu:{num_gpus_per_node}'
- ]
- }
- ],
- 'launcher': 'srun'
- }
- ]
- },
- ],
- 'general': [
- {
- 'check_search_path': ['checks/'],
- 'check_search_recursive': True
- }
- ]
-}
diff --git a/ci-scripts/configs/envmod.py b/ci-scripts/configs/envmod.py
new file mode 100644
index 0000000000..6d31563ca0
--- /dev/null
+++ b/ci-scripts/configs/envmod.py
@@ -0,0 +1,23 @@
+# Copyright 2016-2024 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
+# ReFrame Project Developers. See the top-level LICENSE file for details.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+
+site_configuration = {
+ 'systems': [
+ {
+ 'name': 'envmodsys',
+ 'descr': 'Generic system using Environment Modules',
+ 'hostnames': ['.*'],
+ 'modules_system': 'envmod',
+ 'partitions': [
+ {
+ 'name': 'default',
+ 'scheduler': 'local',
+ 'launcher': 'local',
+ 'environs': ['builtin']
+ }
+ ]
+ }
+ ]
+}
diff --git a/ci-scripts/configs/lmod.py b/ci-scripts/configs/lmod.py
index e5fe1983ee..0ac569b197 100644
--- a/ci-scripts/configs/lmod.py
+++ b/ci-scripts/configs/lmod.py
@@ -3,15 +3,11 @@
#
# SPDX-License-Identifier: BSD-3-Clause
-#
-# Generic fallback configuration
-#
-
site_configuration = {
'systems': [
{
- 'name': 'generic',
- 'descr': 'Generic example system',
+ 'name': 'lmodsys',
+ 'descr': 'Generic system using Lmod',
'hostnames': ['.*'],
'modules_system': 'lmod',
'partitions': [
@@ -22,49 +18,6 @@
'environs': ['builtin']
}
]
- },
- ],
- 'environments': [
- {
- 'name': 'builtin',
- 'cc': 'cc',
- 'cxx': '',
- 'ftn': ''
- },
- ],
- 'logging': [
- {
- 'handlers': [
- {
- 'type': 'stream',
- 'name': 'stdout',
- 'level': 'info',
- 'format': '%(message)s'
- },
- {
- 'type': 'file',
- 'level': 'debug',
- 'format': '[%(asctime)s] %(levelname)s: %(check_info)s: %(message)s', # noqa: E501
- 'append': False
- }
- ],
- 'handlers_perflog': [
- {
- 'type': 'filelog',
- 'prefix': '%(check_system)s/%(check_partition)s',
- 'level': 'info',
- 'format': (
- '%(check_job_completion_time)s|reframe %(version)s|'
- '%(check_info)s|jobid=%(check_jobid)s|'
- '%(check_perf_var)s=%(check_perf_value)s|'
- 'ref=%(check_perf_ref)s '
- '(l=%(check_perf_lower_thres)s, '
- 'u=%(check_perf_upper_thres)s)|'
- '%(check_perf_unit)s'
- ),
- 'append': True
- }
- ]
}
- ],
+ ]
}
diff --git a/ci-scripts/configs/spack.py b/ci-scripts/configs/spack.py
index 7f2862bbac..d95d8c885a 100644
--- a/ci-scripts/configs/spack.py
+++ b/ci-scripts/configs/spack.py
@@ -3,15 +3,11 @@
#
# SPDX-License-Identifier: BSD-3-Clause
-#
-# Generic fallback configuration
-#
-
site_configuration = {
'systems': [
{
- 'name': 'generic',
- 'descr': 'Generic example system',
+ 'name': 'spacksys',
+ 'descr': 'Generic system using Spack',
'hostnames': ['.*'],
'modules_system': 'spack',
'partitions': [
@@ -22,49 +18,6 @@
'environs': ['builtin']
}
]
- },
- ],
- 'environments': [
- {
- 'name': 'builtin',
- 'cc': 'cc',
- 'cxx': '',
- 'ftn': ''
- },
- ],
- 'logging': [
- {
- 'handlers': [
- {
- 'type': 'stream',
- 'name': 'stdout',
- 'level': 'info',
- 'format': '%(message)s'
- },
- {
- 'type': 'file',
- 'level': 'debug',
- 'format': '[%(asctime)s] %(levelname)s: %(check_info)s: %(message)s', # noqa: E501
- 'append': False
- }
- ],
- 'handlers_perflog': [
- {
- 'type': 'filelog',
- 'prefix': '%(check_system)s/%(check_partition)s',
- 'level': 'info',
- 'format': (
- '%(check_job_completion_time)s|reframe %(version)s|'
- '%(check_info)s|jobid=%(check_jobid)s|'
- '%(check_perf_var)s=%(check_perf_value)s|'
- 'ref=%(check_perf_ref)s '
- '(l=%(check_perf_lower_thres)s, '
- 'u=%(check_perf_upper_thres)s)|'
- '%(check_perf_unit)s'
- ),
- 'append': True
- }
- ]
}
- ],
+ ]
}
diff --git a/ci-scripts/configs/tmod32.py b/ci-scripts/configs/tmod32.py
deleted file mode 100644
index c82594db06..0000000000
--- a/ci-scripts/configs/tmod32.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright 2016-2024 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
-# ReFrame Project Developers. See the top-level LICENSE file for details.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-
-#
-# Generic fallback configuration
-#
-
-site_configuration = {
- 'systems': [
- {
- 'name': 'generic',
- 'descr': 'Generic example system',
- 'hostnames': ['.*'],
- 'modules_system': 'tmod32',
- 'partitions': [
- {
- 'name': 'default',
- 'scheduler': 'local',
- 'launcher': 'local',
- 'environs': ['builtin']
- }
- ]
- },
- ],
- 'environments': [
- {
- 'name': 'builtin',
- 'cc': 'cc',
- 'cxx': '',
- 'ftn': ''
- },
- ],
- 'logging': [
- {
- 'handlers': [
- {
- 'type': 'stream',
- 'name': 'stdout',
- 'level': 'info',
- 'format': '%(message)s'
- },
- {
- 'type': 'file',
- 'level': 'debug',
- 'format': '[%(asctime)s] %(levelname)s: %(check_info)s: %(message)s', # noqa: E501
- 'append': False
- }
- ],
- 'handlers_perflog': [
- {
- 'type': 'filelog',
- 'prefix': '%(check_system)s/%(check_partition)s',
- 'level': 'info',
- 'format': (
- '%(check_job_completion_time)s|reframe %(version)s|'
- '%(check_info)s|jobid=%(check_jobid)s|'
- '%(check_perf_var)s=%(check_perf_value)s|'
- 'ref=%(check_perf_ref)s '
- '(l=%(check_perf_lower_thres)s, '
- 'u=%(check_perf_upper_thres)s)|'
- '%(check_perf_unit)s'
- ),
- 'append': True
- }
- ]
- }
- ],
-}
diff --git a/ci-scripts/configs/tmod4.py b/ci-scripts/configs/tmod4.py
deleted file mode 100644
index 1b9d4c6531..0000000000
--- a/ci-scripts/configs/tmod4.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright 2016-2024 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
-# ReFrame Project Developers. See the top-level LICENSE file for details.
-#
-# SPDX-License-Identifier: BSD-3-Clause
-
-#
-# Generic fallback configuration
-#
-
-site_configuration = {
- 'systems': [
- {
- 'name': 'generic',
- 'descr': 'Generic example system',
- 'hostnames': ['.*'],
- 'modules_system': 'tmod4',
- 'partitions': [
- {
- 'name': 'default',
- 'scheduler': 'local',
- 'launcher': 'local',
- 'environs': ['builtin']
- }
- ]
- },
- ],
- 'environments': [
- {
- 'name': 'builtin',
- 'cc': 'cc',
- 'cxx': '',
- 'ftn': ''
- },
- ],
- 'logging': [
- {
- 'handlers': [
- {
- 'type': 'stream',
- 'name': 'stdout',
- 'level': 'info',
- 'format': '%(message)s'
- },
- {
- 'type': 'file',
- 'level': 'debug',
- 'format': '[%(asctime)s] %(levelname)s: %(check_info)s: %(message)s', # noqa: E501
- 'append': False
- }
- ],
- 'handlers_perflog': [
- {
- 'type': 'filelog',
- 'prefix': '%(check_system)s/%(check_partition)s',
- 'level': 'info',
- 'format': (
- '%(check_job_completion_time)s|reframe %(version)s|'
- '%(check_info)s|jobid=%(check_jobid)s|'
- '%(check_perf_var)s=%(check_perf_value)s|'
- 'ref=%(check_perf_ref)s '
- '(l=%(check_perf_lower_thres)s, '
- 'u=%(check_perf_upper_thres)s)|'
- '%(check_perf_unit)s'
- ),
- 'append': True
- }
- ]
- }
- ],
-}
diff --git a/ci-scripts/dockerfiles/Lmod.dockerfile b/ci-scripts/dockerfiles/Lmod.dockerfile
index b908d76022..4240d9eb5a 100644
--- a/ci-scripts/dockerfiles/Lmod.dockerfile
+++ b/ci-scripts/dockerfiles/Lmod.dockerfile
@@ -1,8 +1,8 @@
-FROM ubuntu:20.04
+FROM ubuntu:24.04
ENV TZ=Europe/Zurich
ENV DEBIAN_FRONTEND=noninteractive
-ENV _LMOD_VER=8.4.12
+ENV _LMOD_VER=9.0.4
# Setup apt
RUN \
@@ -11,7 +11,7 @@ RUN \
update-ca-certificates
# Required utilities
-RUN apt-get -y install wget
+RUN apt-get -y install bc wget
# Install Lmod
RUN \
diff --git a/ci-scripts/dockerfiles/eb-spack-howto.dockerfile b/ci-scripts/dockerfiles/eb-spack-howto.dockerfile
index f308189495..c0a92e2962 100644
--- a/ci-scripts/dockerfiles/eb-spack-howto.dockerfile
+++ b/ci-scripts/dockerfiles/eb-spack-howto.dockerfile
@@ -3,10 +3,10 @@
#
-FROM ghcr.io/reframe-hpc/lmod:8.4.12
+FROM ghcr.io/reframe-hpc/lmod:9.0.4
-ENV _SPACK_VER=0.22.2
-ENV _EB_VER=4.9.4
+ENV _SPACK_VER=1.1.0
+ENV _EB_VER=5.1.2
# Install ReFrame unit test requirements
@@ -19,10 +19,8 @@ RUN useradd -ms /bin/bash rfmuser
USER rfmuser
# Install Spack
-RUN git clone --branch v${_SPACK_VER} https://github.com/spack/spack ~/spack && \
- cd ~/spack
-
-RUN pip3 install easybuild==${_EB_VER}
+RUN git clone --branch v${_SPACK_VER} --depth 1 https://github.com/spack/spack ~/spack
+RUN pip3 install --break-system-packages easybuild==${_EB_VER}
ENV PATH="/home/rfmuser/.local/bin:${PATH}"
@@ -35,6 +33,6 @@ RUN ./bootstrap.sh
RUN echo '. /usr/local/lmod/lmod/init/profile && . /home/rfmuser/spack/share/spack/setup-env.sh' > /home/rfmuser/setup.sh
-ENV BASH_ENV /home/rfmuser/setup.sh
+ENV BASH_ENV=/home/rfmuser/setup.sh
-CMD ["/bin/bash", "-c", "./bin/reframe --system=tutorialsys -r -C examples/tutorial/config/baseline_modules.py -R -c examples/tutorial/easybuild/eb_test.py -c examples/tutorial/spack/spack_test.py"]
+CMD ["/bin/bash", "-c", "./bin/reframe --system=tutorialsys --exec-policy=serial -r -C examples/tutorial/config/baseline_modules.py -R -c examples/tutorial/easybuild/eb_test.py -c examples/tutorial/spack/spack_test.py"]
diff --git a/ci-scripts/dockerfiles/Tmod4.dockerfile b/ci-scripts/dockerfiles/envmodules.dockerfile
similarity index 62%
rename from ci-scripts/dockerfiles/Tmod4.dockerfile
rename to ci-scripts/dockerfiles/envmodules.dockerfile
index d8f7fb66aa..e9360e38c0 100644
--- a/ci-scripts/dockerfiles/Tmod4.dockerfile
+++ b/ci-scripts/dockerfiles/envmodules.dockerfile
@@ -1,8 +1,8 @@
-FROM ubuntu:20.04
+FROM ubuntu:24.04
ENV TZ=Europe/Zurich
ENV DEBIAN_FRONTEND=noninteractive
-ENV _TMOD_VER=4.6.0
+ENV _ENVMOD_VER=5.6.1
# Setup apt
RUN \
@@ -13,14 +13,14 @@ RUN \
# Required utilities
RUN apt-get -y install wget less
-# Install Tmod4
+# Install Environment Modules
RUN \
apt-get -y install autoconf tcl-dev && \
- wget -q https://github.com/cea-hpc/modules/archive/v${_TMOD_VER}.tar.gz -O tmod.tar.gz && \
+ wget -q https://github.com/cea-hpc/modules/archive/v${_ENVMOD_VER}.tar.gz -O tmod.tar.gz && \
tar xzf tmod.tar.gz && \
- cd modules-${_TMOD_VER} && \
+ cd modules-${_ENVMOD_VER} && \
./configure && make install && \
- cd .. && rm -rf tmod.tar.gz modules-${_TMOD_VER} && \
+ cd .. && rm -rf tmod.tar.gz modules-${_ENVMOD_VER} && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
diff --git a/ci-scripts/dockerfiles/reframe-tmod4.dockerfile b/ci-scripts/dockerfiles/reframe-envmodules.dockerfile
similarity index 58%
rename from ci-scripts/dockerfiles/reframe-tmod4.dockerfile
rename to ci-scripts/dockerfiles/reframe-envmodules.dockerfile
index 360a448991..6ab319c745 100644
--- a/ci-scripts/dockerfiles/reframe-tmod4.dockerfile
+++ b/ci-scripts/dockerfiles/reframe-envmodules.dockerfile
@@ -2,7 +2,7 @@
# Execute this from the top-level ReFrame source directory
#
-FROM ghcr.io/reframe-hpc/tmod:4.6.0
+FROM ghcr.io/reframe-hpc/envmodules:5.6.1
# ReFrame requirements
@@ -21,6 +21,7 @@ COPY --chown=rfmuser . /home/rfmuser/reframe/
WORKDIR /home/rfmuser/reframe
RUN ./bootstrap.sh
-RUN pip install pytest-cov
+RUN pip install --break-system-packages coverage
+ENV BASH_ENV=/home/rfmuser/.profile
-CMD ["/bin/bash", "-c", "./test_reframe.py --cov=reframe --cov-report=xml --rfm-user-config=ci-scripts/configs/tmod4.py"]
+CMD ["/bin/bash", "-c", "coverage run --source=reframe ./test_reframe.py --rfm-user-config=ci-scripts/configs/envmod.py; coverage xml -o coverage.xml"]
diff --git a/ci-scripts/dockerfiles/reframe-lmod.dockerfile b/ci-scripts/dockerfiles/reframe-lmod.dockerfile
index f95dae4b67..2bd098d4a9 100644
--- a/ci-scripts/dockerfiles/reframe-lmod.dockerfile
+++ b/ci-scripts/dockerfiles/reframe-lmod.dockerfile
@@ -3,7 +3,7 @@
#
-FROM ghcr.io/reframe-hpc/lmod:8.4.12
+FROM ghcr.io/reframe-hpc/lmod:9.0.4
# Install ReFrame unit test requirements
RUN apt-get -y update && \
@@ -20,6 +20,8 @@ COPY --chown=rfmuser . /home/rfmuser/reframe/
WORKDIR /home/rfmuser/reframe
RUN ./bootstrap.sh
-RUN pip install pytest-cov
+RUN pip install --break-system-packages coverage
+RUN echo '. /usr/local/lmod/lmod/init/profile' >> /home/rfmuser/.profile
+ENV BASH_ENV=/home/rfmuser/.profile
-CMD ["/bin/bash", "-c", "./test_reframe.py --cov=reframe --cov-report=xml --rfm-user-config=ci-scripts/configs/lmod.py"]
+CMD ["/bin/bash", "-c", "coverage run --source=reframe ./test_reframe.py -v --rfm-user-config=ci-scripts/configs/lmod.py; coverage xml -o coverage.xml"]
diff --git a/ci-scripts/dockerfiles/reframe-lmod77.dockerfile b/ci-scripts/dockerfiles/reframe-lmod77.dockerfile
deleted file mode 100644
index 5ee2500394..0000000000
--- a/ci-scripts/dockerfiles/reframe-lmod77.dockerfile
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# Execute this from the top-level ReFrame source directory
-#
-
-
-FROM ghcr.io/reframe-hpc/lmod:7.7
-
-# Install ReFrame unit test requirements
-RUN apt-get -y update && \
- apt-get -y install gcc git make python3 python3-pip
-
-# ReFrame user
-RUN useradd -ms /bin/bash rfmuser
-
-USER rfmuser
-
-# Install ReFrame from the current directory
-COPY --chown=rfmuser . /home/rfmuser/reframe/
-
-WORKDIR /home/rfmuser/reframe
-
-RUN ./bootstrap.sh
-RUN pip install pytest-cov
-
-CMD ["/bin/bash", "-c", "./test_reframe.py --cov=reframe --cov-report=xml --rfm-user-config=ci-scripts/configs/lmod.py"]
diff --git a/ci-scripts/dockerfiles/reframe-python.dockerfile b/ci-scripts/dockerfiles/reframe-python.dockerfile
index d80d09f542..13156b8ed8 100644
--- a/ci-scripts/dockerfiles/reframe-python.dockerfile
+++ b/ci-scripts/dockerfiles/reframe-python.dockerfile
@@ -3,7 +3,7 @@
#
# SPDX-License-Identifier: BSD-3-Clause
-ARG PYTHON_VERSION=3.6
+ARG PYTHON_VERSION=3.9
FROM docker.io/python:${PYTHON_VERSION}
@@ -17,7 +17,8 @@ COPY --chown=rfmuser . /home/rfmuser/reframe/
WORKDIR /home/rfmuser/reframe
-RUN ./bootstrap.sh +docs
-RUN pip install pytest-cov
+RUN ./bootstrap.sh
+RUN pip install --break-system-packages coverage
+ENV BASH_ENV=/home/rfmuser/.profile
-CMD ["/bin/bash", "-c", "./test_reframe.py --cov=reframe --cov-report=xml"]
+CMD ["/bin/bash", "-c", "coverage run --source=reframe ./test_reframe.py; coverage xml -o coverage.xml"]
diff --git a/ci-scripts/dockerfiles/reframe-spack.dockerfile b/ci-scripts/dockerfiles/reframe-spack.dockerfile
new file mode 100644
index 0000000000..c8c2bebd97
--- /dev/null
+++ b/ci-scripts/dockerfiles/reframe-spack.dockerfile
@@ -0,0 +1,33 @@
+#
+# Execute this from the top-level ReFrame source directory
+#
+
+
+FROM ubuntu:24.04
+
+ENV _SPACK_VER=1.1.0
+
+# Install ReFrame unit test requirements
+RUN apt-get -y update && \
+ apt-get -y install gcc git make python3 python3-pip
+
+# ReFrame user
+RUN useradd -ms /bin/bash rfmuser
+
+USER rfmuser
+
+# Install Spack
+RUN git clone --branch v${_SPACK_VER} https://github.com/spack/spack ~/spack
+
+# Install ReFrame from the current directory
+COPY --chown=rfmuser . /home/rfmuser/reframe/
+
+WORKDIR /home/rfmuser/reframe
+
+RUN ./bootstrap.sh
+RUN pip install --break-system-packages coverage
+
+RUN echo '. /home/rfmuser/spack/share/spack/setup-env.sh' >> /home/rfmuser/.profile
+ENV BASH_ENV=/home/rfmuser/.profile
+
+CMD ["/bin/bash", "-c", "coverage run --source=reframe ./test_reframe.py -v --rfm-user-config=ci-scripts/configs/spack.py; coverage xml -o coverage.xml"]
diff --git a/ci-scripts/dockerfiles/reframe-tmod32.dockerfile b/ci-scripts/dockerfiles/reframe-tmod32.dockerfile
deleted file mode 100644
index df9418589f..0000000000
--- a/ci-scripts/dockerfiles/reframe-tmod32.dockerfile
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Execute this from the top-level ReFrame source directory
-#
-
-FROM ghcr.io/reframe-hpc/tmod:3.2.10
-
-# ReFrame requirements
-RUN yum -y install gcc make git python3 python3-pip
-
-# ReFrame user
-RUN useradd -ms /bin/bash rfmuser
-RUN pip3 install pytest-cov
-
-USER rfmuser
-
-# Install ReFrame from the current directory
-COPY --chown=rfmuser . /home/rfmuser/reframe/
-
-WORKDIR /home/rfmuser/reframe
-
-RUN ./bootstrap.sh
-
-CMD ["/bin/bash", "-c", "./test_reframe.py --cov=reframe --cov-report=xml --rfm-user-config=ci-scripts/configs/tmod32.py"]
diff --git a/docs/config_reference.rst b/docs/config_reference.rst
index f8e8723df1..2aaa042242 100644
--- a/docs/config_reference.rst
+++ b/docs/config_reference.rst
@@ -161,25 +161,28 @@ System Configuration
The modules system that should be used for loading environment modules on this system.
Available values are the following:
- - ``tmod``: The classic Tcl implementation of the `environment modules `__ (version 3.2).
- - ``tmod31``: The classic Tcl implementation of the `environment modules `__ (version 3.1).
- A separate backend is required for Tmod 3.1, because Python bindings are different from Tmod 3.2.
- - ``tmod32``: A synonym of ``tmod``.
- - ``tmod4``: The `new environment modules `__ implementation (versions older than 4.1 are not supported).
+ - ``envmod``: The `new environment modules `__ implementation (versions older than 4.1 are not supported).
- ``lmod``: The `Lua implementation `__ of the environment modules.
- ``spack``: `Spack `__'s built-in mechanism for managing modules.
+ - ``tmod4``: (deprecated) Synonym of ``envmod``.
- ``nomod``: This is to denote that no modules system is used by this system.
Normally, upon loading the configuration of the system ReFrame checks that a sane installation exists for the modules system requested and will issue an error if it fails to find one.
The modules system sanity check is skipped when the :attr:`~config.general.resolve_module_conflicts` is set to :obj:`False`.
This is useful in cases where the current system does not have a modules system but the remote partitions have one and you would like ReFrame to generate the module commands.
- .. versionadded:: 3.4
+ .. versionadded:: 3.4
The ``spack`` backend is added.
- .. versionchanged:: 4.5.0
+ .. versionchanged:: 4.5.0
The modules system sanity check is skipped when the :attr:`config.general.resolve_module_conflicts` is not set.
+ .. versionchanged:: 4.10
+ The ``tmod``, ``tmod31``, ``tmod32`` backends are no more supported.
+
+ .. deprecated:: 4.10
+ The ``tmod4`` backend is deprecated; please use ``envmod`` instead.
+
.. py:attribute:: systems.modules
diff --git a/docs/howto.rst b/docs/howto.rst
index 97c72bf55b..9e3acece64 100644
--- a/docs/howto.rst
+++ b/docs/howto.rst
@@ -97,7 +97,7 @@ Integrating with EasyBuild
ReFrame integrates with the `EasyBuild `__ build automation framework, which allows you to use EasyBuild for building the source code of your test.
-Let's consider a simple ReFrame test that installs ``bzip2-1.0.6`` given the easyconfig `bzip2-1.0.6.eb `__ and checks that the installed version is correct.
+Let's consider a simple ReFrame test that installs ``zlib-1.3.1`` given the easyconfig `zlib-1.3.1.eb `__ and checks that the installed version is correct.
The following code block shows the check, highlighting the lines specific to this tutorial:
.. literalinclude:: ../examples/tutorial/easybuild/eb_test.py
@@ -138,7 +138,7 @@ ReFrame generates the following commands to build and install the easyconfig:
.. code-block:: bash
:caption: Run in the EasyBuild+Spack container.
- cat output/tutorialsys/default/builtin/BZip2EBCheck/rfm_build.sh
+ cat output/tutorialsys/default/builtin/ZlibEBCheck/rfm_build.sh
.. code-block:: bash
@@ -147,7 +147,7 @@ ReFrame generates the following commands to build and install the easyconfig:
export EASYBUILD_INSTALLPATH=${stagedir}/easybuild
export EASYBUILD_PREFIX=${stagedir}/easybuild
export EASYBUILD_SOURCEPATH=${stagedir}/easybuild
- eb bzip2-1.0.6.eb -f
+ eb zlib-1.3.1.eb -f
All the files generated by EasyBuild (sources, temporary files, installed software and the corresponding modules) are kept under the test's stage directory, thus the relevant EasyBuild environment variables are set.
@@ -169,13 +169,13 @@ This generated final run script is the following:
.. code-block:: bash
:caption: Run in the EasyBuild+Spack container.
- cat output/tutorialsys/default/builtin/BZip2EBCheck/rfm_job.sh
+ cat output/tutorialsys/default/builtin/ZlibEBCheck/rfm_job.sh
.. code-block:: bash
module use ${stagedir}/easybuild/modules/all
- module load bzip/1.0.6
- bzip2 --help
+ module load zlib/1.3.1
+ ls $LD_LIBRARY_PATH/libz.so.1.3.1
Packaging the installation
@@ -204,7 +204,7 @@ Integrating with Spack
ReFrame can also use `Spack `__ to build a software package and test it.
-The example shown here is the equivalent to the `EasyBuild <#integrating-with-easybuild>`__ one that built ``bzip2``.
+The example shown here is the equivalent to the `EasyBuild <#integrating-with-easybuild>`__ one that built ``zlib``.
Here is the test code:
.. literalinclude:: ../examples/tutorial/spack/spack_test.py
@@ -244,7 +244,7 @@ Here is what ReFrame generates as a build script for this example:
spack env create -d rfm_spack_env
spack -e rfm_spack_env config add "config:install_tree:root:opt/spack"
- spack -e rfm_spack_env add bzip2@1.0.6
+ spack -e rfm_spack_env add zlib@1.3.1
spack -e rfm_spack_env install
As you might have noticed ReFrame expects that Spack is already installed on the system.
@@ -262,12 +262,12 @@ Here is the stage directory structure:
│ │ └── darwin-catalina-skylake
│ ├── spack.lock
│ └── spack.yaml
- ├── rfm_BZip2SpackCheck_build.err
- ├── rfm_BZip2SpackCheck_build.out
- ├── rfm_BZip2SpackCheck_build.sh
- ├── rfm_BZip2SpackCheck_job.err
- ├── rfm_BZip2SpackCheck_job.out
- └── rfm_BZip2SpackCheck_job.sh
+ ├── rfm_ZlibSpackCheck_build.err
+ ├── rfm_ZlibSpackCheck_build.out
+ ├── rfm_ZlibSpackCheck_build.sh
+ ├── rfm_ZlibSpackCheck_job.err
+ ├── rfm_ZlibSpackCheck_job.out
+ └── rfm_ZlibSpackCheck_job.sh
Finally, here is the generated run script that ReFrame uses to run the test, once its build has succeeded:
@@ -276,8 +276,8 @@ Finally, here is the generated run script that ReFrame uses to run the test, onc
#!/bin/bash
spack env create -d rfm_spack_env
- eval `spack -e rfm_spack_env load --sh bzip2@1.0.6`
- bzip2 --help
+ eval `spack -e rfm_spack_env load --sh zlib@1.3.1`
+ pkg-config --libs zlib
From this point on, sanity and performance checking are exactly identical to any other ReFrame test.
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 7447db2b40..a24877df97 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,20 +1,13 @@
archspec==0.2.5
ClusterShell==1.9.3
-docutils==0.18.1; python_version < '3.9'
docutils==0.21.2; python_version >= '3.9'
fasteners==0.19; python_version < '3.10'
fasteners==0.20; python_version >= '3.10'
-jinja2==3.0.3; python_version == '3.6'
-jinja2==3.1.6; python_version >= '3.7'
+jinja2==3.1.6
jsonschema==3.2.0
-PyYAML==6.0.1; python_version < '3.8'
-PyYAML==6.0.3; python_version >= '3.8'
-semver==2.13.0; python_version == '3.6'
-semver==3.0.4; python_version >= '3.7'
-Sphinx==5.3.0; python_version < '3.8'
-Sphinx==7.1.2; python_version == '3.8'
+PyYAML==6.0.3
+semver==3.0.4
Sphinx==7.4.7; python_version == '3.9'
Sphinx==8.1.3; python_version == '3.10'
Sphinx==8.2.3; python_version >= '3.11'
-sphinx-rtd-theme==2.0.0; python_version < '3.9'
-sphinx-rtd-theme==3.0.2; python_version >= '3.9'
+sphinx-rtd-theme==3.0.2
diff --git a/docs/started.rst b/docs/started.rst
index 3ab07c0295..fa6bd84aa5 100644
--- a/docs/started.rst
+++ b/docs/started.rst
@@ -5,7 +5,7 @@ Getting Started
Requirements
------------
-* Python 3.6 or higher.
+* Python 3.9 or higher.
Python 2 is not supported.
* The required Python packages are the following:
@@ -16,12 +16,11 @@ Requirements
.. note::
.. versionchanged:: 3.0
- Support for Python 3.5 has been dropped.
+ Support for Python 3.5 is dropped.
+ .. versionchanged:: 4.10
-.. warning::
- Although ReFrame supports Python 3.6 and 3.7, you should note that these Python versions have reached end-of-life and you are strongly advised to use a newer version.
- ReFrame installations on these Python versions may use out-of-date dependencies due to incompatibilities.
+ Support for Python < 3.9 is dropped.
Getting the Framework
diff --git a/examples/tutorial/dockerfiles/eb-spack.dockerfile b/examples/tutorial/dockerfiles/eb-spack.dockerfile
index 645507a887..34a6fe1217 100644
--- a/examples/tutorial/dockerfiles/eb-spack.dockerfile
+++ b/examples/tutorial/dockerfiles/eb-spack.dockerfile
@@ -3,10 +3,10 @@
#
-FROM ghcr.io/reframe-hpc/lmod:8.4.12
+FROM ghcr.io/reframe-hpc/lmod:9.0.4
-ENV _SPACK_VER=0.16
-ENV _EB_VER=4.4.1
+ENV _SPACK_VER=1.1.0
+ENV _EB_VER=5.1.2
RUN apt-get -y update && \
apt-get -y install curl && \
@@ -22,7 +22,7 @@ RUN git clone --depth 1 --branch $REFRAME_TAG https://github.com/reframe-hpc/ref
ENV PATH=/usr/local/share/reframe/bin:$PATH
# Install EasyBuild
-RUN pip3 install easybuild==${_EB_VER}
+RUN pip3 install --break-system-packages easybuild==${_EB_VER}
# Add tutorial user
RUN useradd -ms /bin/bash -G sudo user && \
@@ -33,7 +33,7 @@ WORKDIR /home/user
# Install Spack
RUN mkdir .local && cd .local && \
- git clone --branch releases/v${_SPACK_VER} --depth 1 https://github.com/spack/spack
+ git clone --branch v${_SPACK_VER} --depth 1 https://github.com/spack/spack
-RUN echo '. /usr/local/lmod/lmod/init/profile && . /home/user/.local/spack/share/spack/setup-env.sh' > /home/user/.profile
-ENV BASH_ENV /home/user/.profile
+RUN echo '. /usr/local/lmod/lmod/init/profile && . /home/user/.local/spack/share/spack/setup-env.sh' >> /home/user/.profile
+ENV BASH_ENV=/home/user/.profile
diff --git a/examples/tutorial/easybuild/eb_test.py b/examples/tutorial/easybuild/eb_test.py
index 8e50d915f8..d859ab394a 100644
--- a/examples/tutorial/easybuild/eb_test.py
+++ b/examples/tutorial/easybuild/eb_test.py
@@ -8,17 +8,17 @@
@rfm.simple_test
-class BZip2EBCheck(rfm.RegressionTest):
+class ZlibEBCheck(rfm.RegressionTest):
descr = 'Demo test using EasyBuild to build the test code'
valid_systems = ['*']
valid_prog_environs = ['builtin']
- executable = 'bzip2'
- executable_opts = ['--help']
+ executable = 'ls'
+ executable_opts = ['$LD_LIBRARY_PATH/libz.so.1.3.1']
build_system = 'EasyBuild'
@run_before('compile')
def setup_build_system(self):
- self.build_system.easyconfigs = ['bzip2-1.0.6.eb']
+ self.build_system.easyconfigs = ['zlib-1.3.1.eb']
self.build_system.options = ['-f']
@run_before('run')
@@ -26,5 +26,5 @@ def prepare_run(self):
self.modules = self.build_system.generated_modules
@sanity_function
- def assert_version(self):
- return sn.assert_found(r'Version 1.0.6', self.stderr)
+ def assert_exists(self):
+ return sn.assert_eq(self.job.exitcode, 0)
diff --git a/examples/tutorial/spack/spack_test.py b/examples/tutorial/spack/spack_test.py
index a095b11d99..e7ae86b1e4 100644
--- a/examples/tutorial/spack/spack_test.py
+++ b/examples/tutorial/spack/spack_test.py
@@ -8,18 +8,20 @@
@rfm.simple_test
-class BZip2SpackCheck(rfm.RegressionTest):
+class ZlibSpackCheck(rfm.RegressionTest):
descr = 'Demo test using Spack to build the test code'
valid_systems = ['*']
valid_prog_environs = ['builtin']
- executable = 'bzip2'
- executable_opts = ['--help']
+ executable = 'pkg-config'
+ executable_opts = ['--libs', 'zlib']
build_system = 'Spack'
@run_before('compile')
def setup_build_system(self):
- self.build_system.specs = ['bzip2@1.0.6']
+ self.build_system.specs = ['zlib@1.3.1']
@sanity_function
def assert_version(self):
- return sn.assert_found(r'Version 1.0.6', self.stderr)
+ return sn.assert_found(
+ r'-L.*/spack/linux-.*/zlib-1.3.1-.*/lib -lz', self.stdout
+ )
diff --git a/reframe/__init__.py b/reframe/__init__.py
index a661177c12..aeff4e1f58 100644
--- a/reframe/__init__.py
+++ b/reframe/__init__.py
@@ -10,7 +10,7 @@
INSTALL_PREFIX = os.path.normpath(
os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
)
-MIN_PYTHON_VERSION = (3, 6, 0)
+MIN_PYTHON_VERSION = (3, 9, 0)
# Check python version
if sys.version_info[:3] < MIN_PYTHON_VERSION:
diff --git a/reframe/core/logging.py b/reframe/core/logging.py
index ec810e061a..bf1e1d6c48 100644
--- a/reframe/core/logging.py
+++ b/reframe/core/logging.py
@@ -337,12 +337,8 @@ class CheckFieldFormatter(logging.Formatter):
# NOTE: This formatter will work only for the '%' style
def __init__(self, fmt=None, datefmt=None, perffmt=None,
ignore_keys=None, style='%'):
- if sys.version_info[:2] <= (3, 7):
- super().__init__(fmt, datefmt, style)
- else:
- super().__init__(fmt, datefmt, style,
- validate=(fmt != '%(check_#ALL)s'))
-
+ super().__init__(fmt, datefmt, style,
+ validate=(fmt != '%(check_#ALL)s'))
self.__fmt = fmt
self.__fmtperf = perffmt[:-1] if perffmt else ''
self.__specs = re.findall(r'\%\((\S+?)\)s', fmt)
@@ -809,11 +805,10 @@ def __init__(self, name, level=logging.NOTSET):
def setLevel(self, level):
self.level = _check_level(level)
- if sys.version_info[:2] >= (3, 7):
- # Clear the internal cache of the base logger, otherwise the
- # logger will remain disabled if its level is raised and then
- # lowered again
- self._cache.clear()
+ # Clear the internal cache of the base logger, otherwise the
+ # logger will remain disabled if its level is raised and then
+ # lowered again
+ self._cache.clear()
def makeRecord(self, name, level, fn, lno, msg, args, exc_info,
func=None, extra=None, sinfo=None):
diff --git a/reframe/core/modules.py b/reframe/core/modules.py
index e8c9c504b1..292acd8843 100644
--- a/reframe/core/modules.py
+++ b/reframe/core/modules.py
@@ -112,13 +112,15 @@ def create(cls, modules_kind=None, validate=True):
modules_impl = {
None: NoModImpl,
'nomod': NoModImpl,
- 'tmod31': TMod31Impl,
- 'tmod': TModImpl,
- 'tmod32': TModImpl,
- 'tmod4': TMod4Impl,
+ 'tmod4': EnvModulesImpl,
+ 'envmod': EnvModulesImpl,
'lmod': LModImpl,
'spack': SpackImpl
}
+ if modules_kind == 'tmod4':
+ getlogger().warning("'tmod4' backend is deprecated; "
+ "please use 'envmod' instead")
+
try:
impl_cls = modules_impl[modules_kind]
except KeyError:
@@ -583,244 +585,8 @@ def __str__(self):
return self.name() + ' ' + self.version()
-class TModImpl(ModulesSystemImpl):
- '''Base class for TMod Module system (Tcl).'''
-
- MIN_VERSION = (3, 2)
-
- def __init__(self):
- self._version = None
- self._validated = False
- if self.validate:
- self._do_validate()
-
- def _do_validate(self):
- # Try to figure out if we are indeed using the TCL version
- try:
- completed = osext.run_command('modulecmd -V')
- except OSError as e:
- raise ConfigError(
- 'could not find a sane TMod installation') from e
-
- version_match = re.search(r'^VERSION=(\S+)', completed.stdout,
- re.MULTILINE)
- tcl_version_match = re.search(r'^TCL_VERSION=(\S+)', completed.stdout,
- re.MULTILINE)
-
- if version_match is None or tcl_version_match is None:
- raise ConfigError('could not find a sane TMod installation')
-
- version = version_match.group(1)
- try:
- ver_major, ver_minor = [int(v) for v in version.split('.')[:2]]
- except ValueError:
- raise ConfigError(
- 'could not parse TMod version string: ' + version) from None
-
- if (ver_major, ver_minor) < self.MIN_VERSION:
- raise ConfigError(
- f'unsupported TMod version: '
- f'{version} (required >= {self.MIN_VERSION})'
- )
-
- self._version = version
- try:
- # Try the Python bindings now
- completed = osext.run_command(self.modulecmd())
- except OSError as e:
- raise ConfigError(
- f'could not get the Python bindings for TMod: {e}'
- ) from e
-
- if re.search(r'Unknown shell type', completed.stderr):
- raise ConfigError(
- 'Python is not supported by this TMod installation'
- )
-
- self._validated = True
-
- def name(self):
- return 'tmod'
-
- def version(self):
- return self._version
-
- def modulecmd(self, *args):
- return ' '.join(['modulecmd', 'python', *args])
-
- def _execute(self, cmd, *args):
- if not self._validated:
- self._do_validate()
-
- modulecmd = self.modulecmd(cmd, *args)
- completed = osext.run_command(modulecmd)
- if re.search(r'\bERROR\b', completed.stderr) is not None:
- raise SpawnedProcessError(modulecmd,
- completed.stdout,
- completed.stderr,
- completed.returncode)
-
- exec(self.process(completed.stdout))
- return completed.stderr
-
- def available_modules(self, substr):
- output = self.execute('avail', '-t', substr)
- ret = []
- for line in output.split('\n'):
- if not line or line[-1] == ':':
- # Ignore empty lines and path entries
- continue
-
- module = re.sub(r'\(default\)', '', line)
- ret.append(Module(module))
-
- return ret
-
- def loaded_modules(self):
- try:
- # LOADEDMODULES may be defined but empty
- return [Module(m)
- for m in os.environ['LOADEDMODULES'].split(':') if m]
- except KeyError:
- return []
-
- def conflicted_modules(self, module):
- output = self.execute_with_path('show', str(module), path=module.path)
- return [Module(m.group(1))
- for m in re.finditer(r'^conflict\s+(\S+)',
- output, re.MULTILINE)]
-
- def is_module_loaded(self, module):
- return module in self.loaded_modules()
-
- def load_module(self, module):
- self.execute_with_path('load', str(module), path=module.path)
-
- def unload_module(self, module):
- self.execute('unload', str(module))
-
- def unload_all(self):
- self.execute('purge')
-
- def searchpath(self):
- path = os.getenv('MODULEPATH', '')
- return path.split(':')
-
- def searchpath_add(self, *dirs):
- if dirs:
- self.execute('use', *dirs)
-
- def searchpath_remove(self, *dirs):
- if dirs:
- self.execute('unuse', *dirs)
-
- def emit_load_instr(self, module):
- commands = []
- if module.path:
- commands.append(f'module use {module.path}')
-
- commands.append(f'module load {module.fullname}')
- if module.path:
- commands.append(f'module unuse {module.path}')
-
- return commands
-
- def emit_unload_instr(self, module):
- return [f'module unload {module}']
-
-
-class TMod31Impl(TModImpl):
- '''Module system for TMod (Tcl).'''
-
- MIN_VERSION = (3, 1)
-
- def __init__(self):
- self._version = None
- self._command = None
- self._validated = False
- if self.validate:
- self._do_validate()
-
- def _do_validate(self):
- # Try to figure out if we are indeed using the TCL version
- try:
- modulecmd = os.getenv('MODULESHOME')
- modulecmd = os.path.join(modulecmd, 'modulecmd.tcl')
- completed = osext.run_command(modulecmd)
- except OSError as e:
- raise ConfigError(
- f'could not find a sane TMod31 installation: {e}'
- ) from e
-
- version_match = re.search(r'Release Tcl (\S+)', completed.stderr,
- re.MULTILINE)
- tcl_version_match = version_match
-
- if version_match is None or tcl_version_match is None:
- raise ConfigError('could not find a sane TMod31 installation')
-
- version = version_match.group(1)
- try:
- ver_major, ver_minor = [int(v) for v in version.split('.')[:2]]
- except ValueError:
- raise ConfigError(
- 'could not parse TMod31 version string: ' + version) from None
-
- if (ver_major, ver_minor) < self.MIN_VERSION:
- raise ConfigError(
- f'unsupported TMod version: {version} '
- f'(required >= {self.MIN_VERSION})'
- )
-
- self._version = version
- self._command = f'{modulecmd} python'
- try:
- # Try the Python bindings now
- completed = osext.run_command(self._command)
- except OSError as e:
- raise ConfigError(
- f'could not get the Python bindings for TMod31: {e}'
- )
-
- if re.search(r'Unknown shell type', completed.stderr):
- raise ConfigError(
- 'Python is not supported by this TMod installation'
- )
-
- self._validated = True
-
- def name(self):
- return 'tmod31'
-
- def modulecmd(self, *args):
- return ' '.join([self._command, *args])
-
- def _execute(self, cmd, *args):
- if not self._validated:
- self._do_validate()
-
- modulecmd = self.modulecmd(cmd, *args)
- completed = osext.run_command(modulecmd)
- if re.search(r'\bERROR\b', completed.stderr) is not None:
- raise SpawnedProcessError(modulecmd,
- completed.stdout,
- completed.stderr,
- completed.returncode)
-
- exec_match = re.search(r"^exec\s'(\S+)'", completed.stdout,
- re.MULTILINE)
- if exec_match is None:
- raise ConfigError('could not use the python bindings')
-
- with open(exec_match.group(1), 'r') as content_file:
- cmd = content_file.read()
-
- exec(self.process(cmd))
- return completed.stderr
-
-
-class TMod4Impl(TModImpl):
- '''Module system for TMod 4.'''
+class EnvModulesImpl(ModulesSystemImpl):
+ '''Module system for Environment Modules.'''
MIN_VERSION = (4, 1)
@@ -867,7 +633,10 @@ def _do_validate(self):
self._validated = True
def name(self):
- return 'tmod4'
+ return 'envmod'
+
+ def version(self):
+ return self._version
def modulecmd(self, *args):
return ' '.join(['modulecmd', 'python', *args])
@@ -899,20 +668,34 @@ def load_module(self, module):
# 'restore' discards previous module path manipulations
for op, mp in self._extra_module_paths:
if op == '+':
- super().searchpath_add(mp)
+ self.execute('use', mp)
else:
- super().searchpath_remove(mp)
+ self.execute('unuse', mp)
return []
else:
- return super().load_module(module)
+ self.execute_with_path('load', str(module), path=module.path)
def unload_module(self, module):
if module.collection:
- # Module collection are not unloaded
+ # Module collections are not unloaded
return
- super().unload_module(module)
+ self.execute('unload', str(module))
+
+ def loaded_modules(self):
+ try:
+ # LOADEDMODULES may be defined but empty
+ return [Module(m)
+ for m in os.environ['LOADEDMODULES'].split(':') if m]
+ except KeyError:
+ return []
+
+ def is_module_loaded(self, module):
+ return module in self.loaded_modules()
+
+ def unload_all(self):
+ self.execute('purge')
def conflicted_modules(self, module):
if module.collection:
@@ -921,7 +704,10 @@ def conflicted_modules(self, module):
# collection
return []
- return super().conflicted_modules(module)
+ output = self.execute_with_path('show', str(module), path=module.path)
+ return [Module(m.group(1))
+ for m in re.finditer(r'^conflict\s+(\S+)',
+ output, re.MULTILINE)]
def _emit_restore_instr(self, module):
cmds = [f'module restore {module}']
@@ -938,28 +724,51 @@ def emit_load_instr(self, module):
if module.collection:
return self._emit_restore_instr(module)
- return super().emit_load_instr(module)
+ commands = []
+ if module.path:
+ commands.append(f'module use {module.path}')
+
+ commands.append(f'module load {module.fullname}')
+ if module.path:
+ commands.append(f'module unuse {module.path}')
+
+ return commands
def emit_unload_instr(self, module):
if module.collection:
return []
- return super().emit_unload_instr(module)
+ return [f'module unload {module}']
+
+ def searchpath(self):
+ path = os.getenv('MODULEPATH', '')
+ return path.split(':')
def searchpath_add(self, *dirs):
if dirs:
self._extra_module_paths += [('+', mp) for mp in dirs]
-
- super().searchpath_add(*dirs)
+ self.execute('use', *dirs)
def searchpath_remove(self, *dirs):
if dirs:
self._extra_module_paths += [('-', mp) for mp in dirs]
+ self.execute('unuse', *dirs)
+
+ def available_modules(self, substr):
+ output = self.execute('avail', '-t', substr)
+ ret = []
+ for line in output.split('\n'):
+ if not line or line[-1] == ':':
+ # Ignore empty lines and path entries
+ continue
- super().searchpath_remove(*dirs)
+ module = re.sub(r'\(default\)', '', line)
+ ret.append(Module(module))
+
+ return ret
-class LModImpl(TMod4Impl):
+class LModImpl(EnvModulesImpl):
'''Module system for Lmod (Tcl/Lua).'''
def __init__(self):
diff --git a/reframe/core/pipeline.py b/reframe/core/pipeline.py
index 09f03f0470..76790c4306 100644
--- a/reframe/core/pipeline.py
+++ b/reframe/core/pipeline.py
@@ -2616,7 +2616,7 @@ def _copy_to_outputdir(self):
dst = os.path.join(
self.outputdir, os.path.relpath(f, self.stagedir)
)
- osext.copytree(f, dst, dirs_exist_ok=True)
+ shutil.copytree(f, dst, dirs_exist_ok=True)
else:
shutil.copy2(f, self.outputdir)
diff --git a/reframe/frontend/autodetect.py b/reframe/frontend/autodetect.py
index 3ef7a0b9d1..a84dbd72fd 100644
--- a/reframe/frontend/autodetect.py
+++ b/reframe/frontend/autodetect.py
@@ -59,7 +59,7 @@ def __enter__(self):
src = os.path.join(rfm.INSTALL_PREFIX, p)
if os.path.isdir(src):
dst = os.path.join(self._workdir, p)
- osext.copytree(src, dst, dirs_exist_ok=True)
+ shutil.copytree(src, dst, dirs_exist_ok=True)
else:
shutil.copy2(src, self._workdir)
except FileNotFoundError:
diff --git a/reframe/frontend/cli.py b/reframe/frontend/cli.py
index d783e86a40..f431948bca 100644
--- a/reframe/frontend/cli.py
+++ b/reframe/frontend/cli.py
@@ -702,6 +702,14 @@ def main():
help='The delimiter to use when using `--table-format=csv`',
envvar='RFM_TABLE_FORMAT_DELIM', configvar='general/table_format_delim'
)
+ misc_options.add_argument(
+ '--term-lhs', action='store',
+ help='LHS term in performance comparisons'
+ )
+ misc_options.add_argument(
+ '--term-rhs', action='store',
+ help='RHS term in performance comparisons'
+ )
misc_options.add_argument(
'-v', '--verbose', action='count',
help='Increase verbosity level of output',
@@ -1168,8 +1176,10 @@ def restrict_logging():
sys.exit(1)
printer.table(
- reporting.performance_compare(options.performance_compare,
- None, namepatt, *filt)
+ reporting.performance_compare(
+ options.performance_compare, None, namepatt, *filt,
+ options.term_lhs, options.term_rhs
+ )
)
sys.exit(0)
@@ -1769,7 +1779,9 @@ def module_unuse(*paths):
try:
if rt.get_option('storage/0/enable'):
data = reporting.performance_compare(
- rt.get_option('general/0/perf_report_spec'), report
+ rt.get_option('general/0/perf_report_spec'), report,
+ term_lhs=options.term_lhs,
+ term_rhs=options.term_rhs
)
else:
data = report.report_data()
diff --git a/reframe/frontend/printer.py b/reframe/frontend/printer.py
index a26fd7d4b5..87b46990ee 100644
--- a/reframe/frontend/printer.py
+++ b/reframe/frontend/printer.py
@@ -286,7 +286,8 @@ def table(self, data, **kwargs):
table_format = rt.runtime().get_option('general/0/table_format')
if table_format == 'csv':
- return self._table_as_csv(data)
+ self._table_as_csv(data)
+ return
# Map our options to tabulate
if table_format == 'plain':
diff --git a/reframe/frontend/reporting/__init__.py b/reframe/frontend/reporting/__init__.py
index 940595f01f..26638d6e18 100644
--- a/reframe/frontend/reporting/__init__.py
+++ b/reframe/frontend/reporting/__init__.py
@@ -11,23 +11,22 @@
import lxml.etree as etree
import math
import os
+import polars as pl
import re
import socket
import time
import uuid
from collections import UserDict
-from collections.abc import Hashable
import reframe as rfm
import reframe.utility.jsonext as jsonext
import reframe.utility.osext as osext
from reframe.core.exceptions import ReframeError, what, is_severe, reraise_as
from reframe.core.logging import getlogger, _format_time_rfc3339, time_function
-from reframe.core.runtime import runtime
from reframe.core.warnings import suppress_deprecations
from reframe.utility import nodelist_abbrev, OrderedSet
from .storage import StorageBackend
-from .utility import Aggregator, parse_cmp_spec, parse_query_spec
+from .utility import parse_cmp_spec, parse_query_spec
# The schema data version
# Major version bumps are expected to break the validation of previous schemas
@@ -564,54 +563,53 @@ class _TCProxy(UserDict):
_required_keys = ['name', 'system', 'partition', 'environ']
def __init__(self, testcase, include_only=None):
+ # Define the derived attributes
+ def _basename():
+ return testcase['name'].split()[0]
+
+ def _sysenv():
+ return _format_sysenv(testcase['system'],
+ testcase['partition'],
+ testcase['environ'])
+
+ def _job_nodelist():
+ nodelist = testcase['job_nodelist']
+ if isinstance(nodelist, str):
+ return nodelist
+ else:
+ return nodelist_abbrev(testcase['job_nodelist'])
+
if isinstance(testcase, _TCProxy):
testcase = testcase.data
if include_only is not None:
self.data = {}
- for k in include_only + self._required_keys:
- if k in testcase:
- self.data.setdefault(k, testcase[k])
- else:
- self.data = testcase
+ for key in include_only + self._required_keys:
+ # Computed attributes
+ if key == 'basename':
+ val = _basename()
+ elif key == 'sysenv':
+ val = _sysenv()
+ elif key == 'job_nodelist':
+ val = _job_nodelist()
+ else:
+ val = testcase.get(key)
- def __getitem__(self, key):
- val = super().__getitem__(key)
- if key == 'job_nodelist':
- val = nodelist_abbrev(val)
-
- return val
-
- def __missing__(self, key):
- if key == 'basename':
- return self.data['name'].split()[0]
- elif key == 'sysenv':
- return _format_sysenv(self.data['system'],
- self.data['partition'],
- self.data['environ'])
- elif key == 'pdiff':
- return None
+ self.data.setdefault(key, val)
else:
- raise KeyError(key)
-
-
-def _group_key(groups, testcase: _TCProxy):
- key = []
- for grp in groups:
- with reraise_as(ReframeError, (KeyError,), 'no such group'):
- val = testcase[grp]
- if not isinstance(val, Hashable):
- val = str(val)
-
- key.append(val)
-
- return tuple(key)
+ # Include the derived attributes too
+ testcase.update({
+ 'basename': _basename(),
+ 'sysenv': _sysenv(),
+ 'job_nodelist': _job_nodelist()
+ })
+ self.data = testcase
@time_function
-def _group_testcases(testcases, groups, columns):
- grouped = {}
- record_cols = groups + [c for c in columns if c not in groups]
+def _create_dataframe(testcases, groups, columns):
+ record_cols = list(OrderedSet(groups) | OrderedSet(columns))
+ data = []
for tc in map(_TCProxy, testcases):
for pvar, reftuple in tc['perfvalues'].items():
pvar = pvar.split(':')[-1]
@@ -636,140 +634,68 @@ def _group_testcases(testcases, groups, columns):
'punit': punit,
'presult': presult
})
- key = _group_key(groups, record)
- grouped.setdefault(key, [])
- grouped[key].append(record)
-
- return grouped
+ data.append(record)
-
-@time_function
-def _aggregate_perf(grouped_testcases, aggr_fn, cols):
- # Update delimiter for joining unique values based on the table format
- table_format = runtime().get_option('general/0/table_format')
- if table_format == 'pretty':
- delim = '\n'
+ if data:
+ return pl.DataFrame(data)
else:
- delim = '|'
-
- other_aggr = Aggregator.create('join_uniq', delim)
- count_aggr = Aggregator.create('count')
- aggr_data = {}
- for key, seq in grouped_testcases.items():
- aggr_data.setdefault(key, {})
- with reraise_as(ReframeError, (KeyError,), 'no such column'):
- for c in cols:
- if c == 'pval':
- fn = aggr_fn
- elif c == 'psamples':
- fn = count_aggr
- else:
- fn = other_aggr
+ return pl.DataFrame(schema=record_cols)
- if fn is count_aggr:
- aggr_data[key][c] = fn(seq)
- else:
- aggr_data[key][c] = fn(tc[c] for tc in seq)
- return aggr_data
+@time_function
+def _aggregate_data(testcases, query):
+ df = _create_dataframe(testcases, query.group_by, query.attributes)
+ df = df.group_by(query.group_by).agg(
+ query.aggregation.col_spec(query.aggregated_attributes)
+ ).sort(query.group_by)
+ return df
@time_function
-def compare_testcase_data(base_testcases, target_testcases, base_fn, target_fn,
- groups=None, columns=None):
- groups = groups or []
-
- # Clean up columns and store those for which we want explicitly the A or B
- # variants
- cols = []
- variants_A = set()
- variants_B = set()
- for c in columns:
- if c.endswith('_A'):
- variants_A.add(c[:-2])
- cols.append(c[:-2])
- elif c.endswith('_B'):
- variants_B.add(c[:-2])
- cols.append(c[:-2])
- else:
- variants_A.add(c)
- variants_B.add(c)
- cols.append(c)
-
- grouped_base = _group_testcases(base_testcases, groups, cols)
- grouped_target = _group_testcases(target_testcases, groups, cols)
- pbase = _aggregate_perf(grouped_base, base_fn, cols)
- ptarget = _aggregate_perf(grouped_target, target_fn, cols)
-
- # For visual purposes if `name` is in `groups`, consider also its
- # derivative `basename` to be in, so as to avoid duplicate columns
- if 'name' in groups:
- groups.append('basename')
-
- # Build the final table data
- extra_cols = set(cols) - set(groups) - {'pdiff'}
-
- # Header line
- header = []
- for c in cols:
- if c in extra_cols:
- if c in variants_A:
- header.append(f'{c}_A')
-
- if c in variants_B:
- header.append(f'{c}_B')
- else:
- header.append(c)
-
- data = [header]
- for key, aggr_data in pbase.items():
- pdiff = None
- line = []
- for c in cols:
- base = aggr_data.get(c)
- try:
- target = ptarget[key][c]
- except KeyError:
- target = None
-
- if c == 'pval':
- line.append('n/a' if base is None else base)
- line.append('n/a' if target is None else target)
-
- # compute diff for later usage
- if base is not None and target is not None:
- if base == 0 and target == 0:
- pdiff = math.nan
- elif target == 0:
- pdiff = math.inf
- else:
- pdiff = (base - target) / target
- pdiff = '{:+7.2%}'.format(pdiff)
- elif c == 'pdiff':
- line.append('n/a' if pdiff is None else pdiff)
- elif c in extra_cols:
- if c in variants_A:
- line.append('n/a' if base is None else base)
-
- if c in variants_B:
- line.append('n/a' if target is None else target)
- else:
- line.append('n/a' if base is None else base)
+def compare_testcase_data(base_testcases, target_testcases, query):
+ df_base = _aggregate_data(base_testcases, query).with_columns(
+ pl.col(query.aggregated_columns).name.suffix(query.lhs_column_suffix)
+ )
+ df_target = _aggregate_data(target_testcases, query).with_columns(
+ pl.col(query.aggregated_columns).name.suffix(query.rhs_column_suffix)
+ )
+ pval = query.aggregation.column_names('pval')[0]
+ pval_lhs = f'{pval}{query.lhs_column_suffix}'
+ pval_rhs = f'{pval}{query.rhs_column_suffix}'
+ cols = OrderedSet(query.group_by) | OrderedSet(query.aggregated_variants)
+ if not df_base.is_empty() and not df_target.is_empty():
+ cols |= {query.diff_column}
+ df = df_base.join(df_target, on=query.group_by).with_columns(
+ (100*(pl.col(pval_lhs) - pl.col(pval_rhs)) / pl.col(pval_rhs))
+ .round(2).alias(query.diff_column)
+ ).select(cols)
+ elif df_base.is_empty():
+ df = pl.DataFrame(schema=list(cols))
+ else:
+ # df_target is empty; add an empty col for all `rhs` variants
+ df = df_base.select(
+ pl.col(col)
+ if col in df_base.columns else pl.lit('').alias(col)
+ for col in cols
+ )
- data.append(line)
+ data = [df.columns]
+ for row in df.iter_rows():
+ data.append(row)
return data
@time_function
def performance_compare(cmp, report=None, namepatt=None,
- filterA=None, filterB=None):
+ filterA=None, filterB=None,
+ term_lhs=None, term_rhs=None):
with reraise_as(ReframeError, (ValueError,),
'could not parse comparison spec'):
- match = parse_cmp_spec(cmp)
+ query = parse_cmp_spec(cmp, term_lhs, term_rhs)
backend = StorageBackend.default()
- if match.base is None:
+ if query.lhs is None:
if report is None:
raise ValueError('report cannot be `None` '
'for current run comparisons')
@@ -785,11 +711,10 @@ def performance_compare(cmp, report=None, namepatt=None,
except IndexError:
tcs_base = []
else:
- tcs_base = backend.fetch_testcases(match.base, namepatt, filterA)
+ tcs_base = backend.fetch_testcases(query.lhs, namepatt, filterA)
- tcs_target = backend.fetch_testcases(match.target, namepatt, filterB)
- return compare_testcase_data(tcs_base, tcs_target, match.aggregator,
- match.aggregator, match.groups, match.columns)
+ tcs_target = backend.fetch_testcases(query.rhs, namepatt, filterB)
+ return compare_testcase_data(tcs_base, tcs_target, query)
@time_function
@@ -837,22 +762,20 @@ def session_data(query):
def testcase_data(spec, namepatt=None, test_filter=None):
with reraise_as(ReframeError, (ValueError,),
'could not parse comparison spec'):
- match = parse_cmp_spec(spec, default_extra_cols=['pval'])
+ query = parse_cmp_spec(spec)
- if match.base is not None:
+ if query.lhs is not None:
raise ReframeError('only one time period or session are allowed: '
'if you want to compare performance, '
'use the `--performance-compare` option')
storage = StorageBackend.default()
- testcases = storage.fetch_testcases(match.target, namepatt, test_filter)
- aggregated = _aggregate_perf(
- _group_testcases(testcases, match.groups, match.columns),
- match.aggregator, match.columns
+ df = _aggregate_data(
+ storage.fetch_testcases(query.rhs, namepatt, test_filter), query
)
- data = [match.columns]
- for aggr_data in aggregated.values():
- data.append([aggr_data[c] for c in match.columns])
+ data = [df.columns]
+ for row in df.iter_rows():
+ data.append(row)
return data
diff --git a/reframe/frontend/reporting/storage.py b/reframe/frontend/reporting/storage.py
index 7175744691..e40e5a79ce 100644
--- a/reframe/frontend/reporting/storage.py
+++ b/reframe/frontend/reporting/storage.py
@@ -17,7 +17,7 @@
from reframe.core.logging import getlogger, time_function, getprofiler
from reframe.core.runtime import runtime
from reframe.utility import nodelist_abbrev
-from ..reporting.utility import QuerySelector
+from ..reporting.utility import QuerySelectorTestcase
class StorageBackend:
@@ -41,7 +41,7 @@ def store(self, report, report_file):
'''Store the given report'''
@abc.abstractmethod
- def fetch_testcases(self, selector: QuerySelector, name_patt=None,
+ def fetch_testcases(self, selector: QuerySelectorTestcase, name_patt=None,
test_filter=None):
'''Fetch test cases based on the specified query selector.
@@ -54,7 +54,7 @@ def fetch_testcases(self, selector: QuerySelector, name_patt=None,
'''
@abc.abstractmethod
- def fetch_sessions(self, selector: QuerySelector, decode=True):
+ def fetch_sessions(self, selector: QuerySelectorTestcase, decode=True):
'''Fetch sessions based on the specified query selector.
:arg selector: an instance of :class:`QuerySelector` that will specify
@@ -65,7 +65,7 @@ def fetch_sessions(self, selector: QuerySelector, decode=True):
'''
@abc.abstractmethod
- def remove_sessions(self, selector: QuerySelector):
+ def remove_sessions(self, selector: QuerySelectorTestcase):
'''Remove sessions based on the specified query selector
:arg selector: an instance of :class:`QuerySelector` that will specify
@@ -382,7 +382,7 @@ def _fetch_testcases_time_period(self, ts_start, ts_end, name_patt=None,
return [*filter(filt_fn, testcases)]
@time_function
- def fetch_testcases(self, selector: QuerySelector,
+ def fetch_testcases(self, selector: QuerySelectorTestcase,
name_patt=None, test_filter=None):
if selector.by_session():
return self._fetch_testcases_from_session(
@@ -394,7 +394,7 @@ def fetch_testcases(self, selector: QuerySelector,
)
@time_function
- def fetch_sessions(self, selector: QuerySelector, decode=True):
+ def fetch_sessions(self, selector: QuerySelectorTestcase, decode=True):
query = 'SELECT uuid, json_blob FROM sessions'
if selector.by_time_period():
ts_start, ts_end = selector.time_period
@@ -448,7 +448,7 @@ def _do_remove2(self, conn, uuids):
return [rec[0] for rec in results]
@time_function
- def remove_sessions(self, selector: QuerySelector):
+ def remove_sessions(self, selector: QuerySelectorTestcase):
if selector.by_session_uuid():
uuids = [selector.uuid]
else:
diff --git a/reframe/frontend/reporting/utility.py b/reframe/frontend/reporting/utility.py
index 7d127b9409..4d9fb21489 100644
--- a/reframe/frontend/reporting/utility.py
+++ b/reframe/frontend/reporting/utility.py
@@ -3,99 +3,114 @@
#
# SPDX-License-Identifier: BSD-3-Clause
-import abc
+import polars as pl
import re
-import statistics
-import types
-from collections import namedtuple
from datetime import datetime, timedelta, timezone
from numbers import Number
+from typing import Dict, List
+from reframe.utility import OrderedSet
+
+
+class Aggregation:
+ '''Represents a user aggregation'''
+
+ OP_REGEX = re.compile(r'(?P\S+)\((?P\S+)\)|(?P\S+)')
+ OP_VALID = {'min', 'max', 'median', 'mean', 'std',
+ 'first', 'last', 'stats', 'sum'}
+
+ def __init__(self, agg_spec: str):
+ '''Create an Aggregation from an aggretion spec'''
+ self._aggregations: Dict[str, List[str]] = {}
+ self._agg_names: Dict[str, str] = {}
+ for agg in agg_spec.split(','):
+ m = self.OP_REGEX.match(agg)
+ if m:
+ op = m.group('op') or m.group('op2')
+ col = m.group('col') or 'pval'
+ if op not in self.OP_VALID:
+ raise ValueError(f'unknown aggregation: {op}')
+
+ if op == 'stats':
+ agg_ops = ('min', 'p01', 'p05', 'median', 'p95', 'p99',
+ 'max', 'mean', 'stddev')
+ else:
+ agg_ops = [op]
+
+ self._aggregations.setdefault(col, [])
+ self._aggregations[col] += agg_ops
+ for op in agg_ops:
+ self._agg_names[self._fmt_col(col, op)] = col
+ else:
+ raise ValueError(f'invalid aggregation spec: {agg}')
+
+ def __repr__(self) -> str:
+ return f'Aggregation({self._aggregations})'
+
+ def _fmt_col(self, col: str, op: str) -> str:
+ '''Format the aggregation's column name'''
+ return f'{col} ({op})'
+
+ def attributes(self) -> List[str]:
+ '''Return the attributes to be aggregated'''
+ return list(self._aggregations.keys())
+
+ def column_names(self, col: str) -> List[str]:
+ '''Return the aggragation's column names'''
+ try:
+ ops = self._aggregations[col]
+ return [self._fmt_col(col, op) for op in ops]
+ except KeyError:
+ return [col]
+
+ def strip_suffix(self, col: str) -> str:
+ '''Strip aggregation suffix from column'''
+ return self._agg_names.get(col, col)
+
+ def col_spec(self, extra_cols: List[str]) -> List[pl.Expr]:
+ '''Return a list of polars expressions for this aggregation'''
+ def _expr_from_op(col, op):
+ if op == 'min':
+ return pl.col(col).min().alias(f'{col} (min)')
+ elif op == 'max':
+ return pl.col(col).max().alias(f'{col} (max)')
+ elif op == 'median':
+ return pl.col(col).median().alias(f'{col} (median)')
+ elif op == 'mean':
+ return pl.col(col).mean().alias(f'{col} (mean)')
+ elif op == 'std':
+ return pl.col(col).std().alias(f'{col} (stddev)')
+ elif op == 'first':
+ return pl.col(col).first().alias(f'{col} (first)')
+ elif op == 'last':
+ return pl.col(col).last().alias(f'{col} (last)')
+ elif op == 'p01':
+ return pl.col(col).quantile(0.01).alias(f'{col} (p01)')
+ elif op == 'p05':
+ return pl.col(col).quantile(0.01).alias(f'{col} (p05)')
+ elif op == 'p95':
+ return pl.col(col).quantile(0.01).alias(f'{col} (p95)')
+ elif op == 'p99':
+ return pl.col(col).quantile(0.01).alias(f'{col} (p99)')
+ elif op == 'stddev':
+ return pl.col(col).std().alias(f'{col} (stddev)')
+ elif op == 'sum':
+ return pl.col(col).sum().alias(f'{col} (sum)')
+
+ specs = []
+ for col, ops in self._aggregations.items():
+ for op in ops:
+ specs.append(_expr_from_op(col, op))
+
+ # Add col specs for the extra columns requested
+ for col in extra_cols:
+ if col == 'pval':
+ continue
+ elif col == 'psamples':
+ specs.append(pl.len().alias('psamples'))
+ else:
+ specs.append(pl.col(col).unique().str.join('|'))
-
-class Aggregator:
- @classmethod
- def create(cls, name, *args, **kwargs):
- if name == 'first':
- return AggrFirst(*args, **kwargs)
- elif name == 'last':
- return AggrLast(*args, **kwargs)
- elif name == 'mean':
- return AggrMean(*args, **kwargs)
- elif name == 'median':
- return AggrMedian(*args, **kwargs)
- elif name == 'min':
- return AggrMin(*args, **kwargs)
- elif name == 'max':
- return AggrMax(*args, **kwargs)
- elif name == 'count':
- return AggrCount(*args, **kwargs)
- elif name == 'join_uniq':
- return AggrJoinUniqueValues(*args, **kwargs)
- else:
- raise ValueError(f'unknown aggregation function: {name!r}')
-
- @abc.abstractmethod
- def __call__(self, iterable):
- pass
-
-
-class AggrFirst(Aggregator):
- def __call__(self, iterable):
- for i, elem in enumerate(iterable):
- if i == 0:
- return elem
-
-
-class AggrLast(Aggregator):
- def __call__(self, iterable):
- if not isinstance(iterable, types.GeneratorType):
- return iterable[-1]
-
- for elem in iterable:
- pass
-
- return elem
-
-
-class AggrMean(Aggregator):
- def __call__(self, iterable):
- return statistics.mean(iterable)
-
-
-class AggrMedian(Aggregator):
- def __call__(self, iterable):
- return statistics.median(iterable)
-
-
-class AggrMin(Aggregator):
- def __call__(self, iterable):
- return min(iterable)
-
-
-class AggrMax(Aggregator):
- def __call__(self, iterable):
- return max(iterable)
-
-
-class AggrJoinUniqueValues(Aggregator):
- def __init__(self, delim):
- self.__delim = delim
-
- def __call__(self, iterable):
- unique_vals = {str(elem) for elem in iterable}
- return self.__delim.join(unique_vals)
-
-
-class AggrCount(Aggregator):
- def __call__(self, iterable):
- if hasattr(iterable, '__len__'):
- return len(iterable)
-
- count = 0
- for _ in iterable:
- count += 1
-
- return count
+ return specs
def _parse_timestamp(s):
@@ -153,7 +168,7 @@ def is_uuid(s):
return _UUID_PATTERN.match(s) is not None
-class QuerySelector:
+class QuerySelectorTestcase:
'''A class for encapsulating the different session and testcase queries.
A session or testcase query can be of one of the following kinds:
@@ -237,7 +252,8 @@ def _parse_aggregation(s, base_columns=None):
except ValueError:
raise ValueError(f'invalid aggregate function spec: {s}') from None
- return Aggregator.create(op), _parse_columns(group_cols, base_columns)
+ # return Aggregator.create(op), _parse_columns(group_cols, base_columns)
+ return Aggregation(op), _parse_columns(group_cols, base_columns)
def parse_query_spec(s):
@@ -245,29 +261,154 @@ def parse_query_spec(s):
return None
if is_uuid(s):
- return QuerySelector(uuid=s)
+ return QuerySelectorTestcase(uuid=s)
if '?' in s:
time_period, sess_filter = s.split('?', maxsplit=1)
if time_period:
- return QuerySelector(sess_filter=sess_filter,
- time_period=parse_time_period(time_period))
+ return QuerySelectorTestcase(
+ sess_filter=sess_filter,
+ time_period=parse_time_period(time_period)
+ )
else:
- return QuerySelector(sess_filter=sess_filter)
+ return QuerySelectorTestcase(sess_filter=sess_filter)
+
+ return QuerySelectorTestcase(time_period=parse_time_period(s))
+
+
+class _QueryMatch:
+ '''Class to represent the user's query'''
+
+ def __init__(self,
+ lhs: QuerySelectorTestcase,
+ rhs: QuerySelectorTestcase,
+ aggregation: Aggregation,
+ groups: List[str],
+ columns: List[str],
+ term_lhs: str, term_rhs: str):
+ self.__lhs: QuerySelectorTestcase = lhs
+ self.__rhs: QuerySelectorTestcase = rhs
+ self.__aggregation: Aggregation = aggregation
+ self.__tc_group_by: List[str] = groups
+ self.__tc_attrs: List[str] = []
+ self.__col_variants: Dict[str, List[str]] = {}
+ self.__lhs_term: str = term_lhs or 'lhs'
+ self.__rhs_term: str = term_rhs or 'rhs'
+
+ if self.is_compare() and 'pval' not in columns:
+ # Always add `pval` if the query is a performance comparison
+ columns.append('pval')
+
+ for col in columns:
+ if self.is_compare():
+ # This is a comparison; trim any column suffixes and store
+ # them for later selection
+ if col.endswith(self.lhs_select_suffix):
+ col = col[:-len(self.lhs_select_suffix)]
+ self.__col_variants.setdefault(col, [])
+ self.__col_variants[col].append(self.lhs_column_suffix)
+ elif col.endswith(self.rhs_select_suffix):
+ col = col[:-len(self.rhs_select_suffix)]
+ self.__col_variants.setdefault(col, [])
+ self.__col_variants[col].append(self.rhs_column_suffix)
+ else:
+ self.__col_variants.setdefault(col, [])
+ self.__col_variants[col].append(self.lhs_column_suffix)
+ self.__col_variants[col].append(self.rhs_column_suffix)
+
+ self.__tc_attrs.append(col)
+
+ self.__tc_attrs_agg: List[str] = (OrderedSet(self.__tc_attrs) -
+ OrderedSet(self.__tc_group_by))
+ self.__aggregated_cols: List[str] = []
+ for col in self.__tc_attrs_agg:
+ self.__aggregated_cols += self.__aggregation.column_names(col)
+
+ self.__col_variants_agg: List[str] = []
+ for col in self.__aggregated_cols:
+ col_stripped = self.aggregation.strip_suffix(col)
+ if col_stripped in self.__col_variants:
+ self.__col_variants_agg += [
+ f'{col}{variant}'
+ for variant in self.__col_variants[col_stripped]
+ ]
+ else:
+ self.__col_variants_agg.append(col)
+
+ def is_compare(self):
+ '''Check if this query is a performance comparison'''
+ return self.__lhs is not None
- return QuerySelector(time_period=parse_time_period(s))
+ @property
+ def lhs_column_suffix(self):
+ '''The suffix of the lhs column in a comparison'''
+ return f' ({self.__lhs_term})'
+
+ @property
+ def lhs_select_suffix(self):
+ '''The suffix for selecting the lhs column in a comparison'''
+ return '_L'
+
+ @property
+ def rhs_column_suffix(self):
+ '''The suffix of the rhs column in a comparison'''
+ return f' ({self.__rhs_term})'
+
+ @property
+ def rhs_select_suffix(self):
+ '''The suffix for selecting the rhs column in a comparison'''
+ return '_R'
+
+ @property
+ def diff_column(self):
+ '''The name of the performance difference column'''
+ return 'pdiff (%)'
+
+ @property
+ def lhs(self) -> QuerySelectorTestcase:
+ '''The lhs data sub-query'''
+ return self.__lhs
+
+ @property
+ def rhs(self) -> QuerySelectorTestcase:
+ '''The rhs data sub-query'''
+ return self.__rhs
+
+ @property
+ def aggregation(self) -> Aggregation:
+ '''The aggregation of this query'''
+ return self.__aggregation
+
+ @property
+ def attributes(self) -> List[str]:
+ '''Test attributes requested by this query'''
+ return self.__tc_attrs
+
+ @property
+ def aggregated_attributes(self) -> List[str]:
+ '''Test attributes whose values must be aggregated'''
+ return self.__tc_attrs_agg
+
+ @property
+ def aggregated_columns(self) -> List[str]:
+ '''Column names of the aggregated attributes'''
+ return self.__aggregated_cols
+ @property
+ def aggregated_variants(self) -> List[str]:
+ '''Column names of the aggregated lhs/rhs attributes'''
+ return self.__col_variants_agg
+
+ @property
+ def group_by(self) -> List[str]:
+ '''Test attributes to be grouped'''
+ return self.__tc_group_by
-_Match = namedtuple('_Match',
- ['base', 'target', 'aggregator', 'groups', 'columns'])
DEFAULT_GROUP_BY = ['name', 'sysenv', 'pvar', 'punit']
-DEFAULT_EXTRA_COLS = ['pval', 'pdiff']
-def parse_cmp_spec(spec, default_group_by=None, default_extra_cols=None):
- default_group_by = default_group_by or list(DEFAULT_GROUP_BY)
- default_extra_cols = default_extra_cols or list(DEFAULT_EXTRA_COLS)
+def parse_cmp_spec(spec, term_lhs=None, term_rhs=None):
parts = spec.split('/')
if len(parts) == 3:
base_spec, target_spec, aggr, cols = None, *parts
@@ -278,8 +419,9 @@ def parse_cmp_spec(spec, default_group_by=None, default_extra_cols=None):
base = parse_query_spec(base_spec)
target = parse_query_spec(target_spec)
- aggr_fn, group_cols = _parse_aggregation(aggr, default_group_by)
+ aggr, group_cols = _parse_aggregation(aggr, DEFAULT_GROUP_BY)
# Update base columns for listing
- columns = _parse_columns(cols, group_cols + default_extra_cols)
- return _Match(base, target, aggr_fn, group_cols, columns)
+ columns = _parse_columns(cols, group_cols + aggr.attributes())
+ return _QueryMatch(base, target, aggr, group_cols, columns,
+ term_lhs, term_rhs)
diff --git a/reframe/schemas/config.json b/reframe/schemas/config.json
index ac03ea0fda..255acf948a 100644
--- a/reframe/schemas/config.json
+++ b/reframe/schemas/config.json
@@ -270,7 +270,7 @@
"max_local_jobs": {"type": "number"},
"modules_system": {
"type": "string",
- "enum": ["tmod", "tmod31", "tmod32", "tmod4",
+ "enum": ["tmod4", "envmod",
"lmod", "nomod", "spack"]
},
"modules": {"$ref": "#/defs/modules_list"},
diff --git a/reframe/utility/osext.py b/reframe/utility/osext.py
index 544c47900b..1b58972f4c 100644
--- a/reframe/utility/osext.py
+++ b/reframe/utility/osext.py
@@ -27,6 +27,7 @@
import reframe.utility as util
from reframe.core.exceptions import (ReframeError, SpawnedProcessError,
SpawnedProcessTimeout)
+from reframe.core.warnings import user_deprecation_warning
from . import OrderedSet
@@ -409,52 +410,20 @@ def copytree(src, dst, symlinks=False, ignore=None, copy_function=shutil.copy2,
This function will automatically delegate to :py:func:`shutil.copytree`
for Python versions >= 3.8.
+
+ .. deprecated:: 4.10
+
+ Please use :py:func:`shutil.copytree` directly.
'''
+ user_deprecation_warning('`osext.copytree()` is deprecated; '
+ 'please use `shutil.copytree()` directly')
+
if src == os.path.commonpath([src, dst]):
raise ValueError("cannot copy recursively the parent directory "
"`%s' into one of its descendants `%s'" % (src, dst))
- if sys.version_info[1] >= 8:
- return shutil.copytree(src, dst, symlinks, ignore, copy_function,
- ignore_dangling_symlinks, dirs_exist_ok)
-
- if not dirs_exist_ok:
- return shutil.copytree(src, dst, symlinks, ignore, copy_function,
- ignore_dangling_symlinks)
-
- # dirs_exist_ok=True and Python < 3.8
- if not os.path.exists(dst):
- return shutil.copytree(src, dst, symlinks, ignore, copy_function,
- ignore_dangling_symlinks)
-
- # dst exists; manually descend into the subdirectories, but do some sanity
- # checking first
-
- # We raise the following errors to comply with the copytree()'s behaviour
-
- if not os.path.isdir(dst):
- raise FileExistsError(errno.EEXIST, 'File exists', dst)
-
- if not os.path.exists(src):
- raise FileNotFoundError(errno.ENOENT, 'No such file or directory', src)
-
- if not os.path.isdir(src):
- raise NotADirectoryError(errno.ENOTDIR, 'Not a directory', src)
-
- _, subdirs, files = list(os.walk(src))[0]
- ignore_paths = ignore(src, os.listdir(src)) if ignore else {}
- for f in files:
- if f not in ignore_paths:
- copy_function(os.path.join(src, f), os.path.join(dst, f),
- follow_symlinks=not symlinks)
-
- for d in subdirs:
- if d not in ignore_paths:
- copytree(os.path.join(src, d), os.path.join(dst, d),
- symlinks, ignore, copy_function,
- ignore_dangling_symlinks, dirs_exist_ok)
-
- return dst
+ return shutil.copytree(src, dst, symlinks, ignore, copy_function,
+ ignore_dangling_symlinks, dirs_exist_ok)
def copytree_virtual(src, dst, file_links=None,
@@ -464,10 +433,10 @@ def copytree_virtual(src, dst, file_links=None,
``file_links``.
If ``file_links`` is empty or :class:`None`, this is equivalent to
- :func:`copytree()`. The rest of the arguments are passed as-is to
- :func:`copytree()`. Paths in ``file_links`` must be relative to ``src``.
- If you try to pass ``'.'`` in ``file_links``, an :py:class:`OSError` will
- be raised.
+ :py:func:`shutil.copytree()`. The rest of the arguments are passed as-is
+ to :py:func:`shutil.copytree()`. Paths in ``file_links`` must be relative
+ to ``src``. If you try to pass ``'.'`` in ``file_links``, an
+ :py:class:`OSError` will be raised.
'''
@@ -510,8 +479,8 @@ def ignore(dir, contents):
if os.path.join(dir, c) in link_targets}
# Copy to dst ignoring the file_links
- copytree(src, dst, symlinks, ignore,
- copy_function, ignore_dangling_symlinks, dirs_exist_ok)
+ shutil.copytree(src, dst, symlinks, ignore,
+ copy_function, ignore_dangling_symlinks, dirs_exist_ok)
# Now create the symlinks
for f in link_targets:
diff --git a/reframe/utility/profile.py b/reframe/utility/profile.py
index 144970df0d..c6b94d52eb 100644
--- a/reframe/utility/profile.py
+++ b/reframe/utility/profile.py
@@ -6,9 +6,6 @@
# A lightweight time profiler
import time
-import sys
-
-from collections import OrderedDict
class ProfilerError(Exception):
@@ -33,10 +30,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
class TimeProfiler:
def __init__(self):
self._region_stack = ['root']
- if sys.version_info[:2] < (3, 8):
- self._region_times = OrderedDict()
- else:
- self._region_times = {}
+ self._region_times = {}
@property
def current_region(self):
diff --git a/requirements.txt b/requirements.txt
index 4b5a2c45a9..2df00ec1e4 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,40 +1,22 @@
archspec==0.2.5
-argcomplete==3.1.2; python_version < '3.8'
argcomplete==3.6.3; python_version >= '3.8'
ClusterShell==1.9.3
fasteners==0.19; python_version < '3.10'
fasteners==0.20; python_version >= '3.10'
-importlib_metadata==4.0.1; python_version < '3.8'
-jinja2==3.0.3; python_version == '3.6'
-jinja2==3.1.6; python_version >= '3.7'
+jinja2==3.1.6
jsonschema==3.2.0
-lxml==5.2.0; python_version < '3.8' and platform_machine == 'aarch64'
-lxml==5.4.0; python_version < '3.8' and platform_machine != 'aarch64'
-lxml==6.0.2; python_version >= '3.8'
-pytest==7.0.1; python_version < '3.8'
-pytest==8.3.5; python_version == '3.8'
+lxml==6.0.2
+polars==1.35.1
pytest==8.4.2; python_version == '3.9'
pytest==9.0.1; python_version >= '3.10'
-pytest-forked==1.4.0; python_version == '3.6'
-pytest-forked==1.6.0; python_version >= '3.7'
+pytest-forked==1.6.0
pytest-parallel==0.1.1
-pytest-rerunfailures==10.3; python_version == '3.6'
-pytest-rerunfailures==13.0; python_version == '3.7'
-pytest-rerunfailures==14.0; python_version == '3.8'
pytest-rerunfailures==16.0.1; python_version == '3.9'
pytest-rerunfailures==16.1; python_version >= '3.10'
-PyYAML==6.0.1; python_version < '3.8'
-PyYAML==6.0.3; python_version >= '3.8'
-requests==2.27.1; python_version == '3.6'
-requests==2.31.0; python_version == '3.7'
-requests==2.32.4; python_version >= '3.8'
-semver==2.13.0; python_version == '3.6'
-semver==3.0.4; python_version >= '3.7'
-setuptools==59.6.0; python_version == '3.6'
-setuptools==68.0.0; python_version == '3.7'
-setuptools==75.3.0; python_version == '3.8'
-setuptools==80.9.0; python_version >= '3.9'
-tabulate==0.8.10; python_version == '3.6'
-tabulate==0.9.0; python_version >= '3.7'
+PyYAML==6.0.3
+requests==2.32.4
+semver==3.0.4
+setuptools==80.9.0
+tabulate==0.9.0
wcwidth==0.2.14
-#+pygelf%pygelf==0.4.0
+#+pygelf%pygelf==0.4.3
diff --git a/setup.cfg b/setup.cfg
index a3db8c3f05..8acab03341 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -9,9 +9,6 @@ long_description = file: README_minimal.md
long_description_content_type = text/markdown
classifiers =
Development Status :: 5 - Production/Stable
- Programming Language :: Python :: 3.6
- Programming Language :: Python :: 3.7
- Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
@@ -24,29 +21,22 @@ classifiers =
[options]
packages = find_namespace:
-python_requires = >=3.6
+python_requires = >=3.9
scripts = bin/reframe
install_requires =
archspec >= 0.2.4
argcomplete
- argcomplete <= 3.1.2; python_version < '3.8'
ClusterShell
fasteners==0.19; python_version < '3.10'
fasteners
- jinja2==3.0.3; python_version == '3.6'
jinja2
jsonschema
- lxml==5.2.0; python_version < '3.8' and platform_machine == 'aarch64'
- lxml==5.4.0; python_version < '3.8' and platform_machine != 'aarch64'
lxml
- PyYAML==6.0.1; python_version < '3.8'
+ polars
PyYAML
requests
- requests <= 2.27.1; python_version == '3.6'
semver
- semver <= 2.13.0; python_version == '3.6'
tabulate
- tabulate <= 0.8.10; python_version == '3.6'
[options.packages.find]
include = reframe,reframe.*,hpctestlib.*
diff --git a/unittests/test_cli.py b/unittests/test_cli.py
index 8742dc173b..e0224a243f 100644
--- a/unittests/test_cli.py
+++ b/unittests/test_cli.py
@@ -634,8 +634,9 @@ def test_timestamp_option_default(run_reframe):
assert returncode == 0
matches = re.findall(
- r'(stage|output) directory: .*\/(\d{8}T\d{6}\+\d{4})', stdout
+ r'(stage|output) directory: .*\/(\d{8}T\d{6}(\+|-)\d{4})', stdout
)
+ print(stdout)
assert len(matches) == 2
@@ -1377,6 +1378,7 @@ def table_format(request):
def assert_no_crash(returncode, stdout, stderr, exitcode=0):
+ print(stdout)
assert returncode == exitcode
assert 'Traceback' not in stdout
assert 'Traceback' not in stderr
@@ -1528,5 +1530,5 @@ def assert_no_crash(returncode, stdout, stderr, exitcode=0):
assert_no_crash(
*run_reframe2(
action='--performance-compare=now-1m:now/now-1d:now/mean:+foo/+bar'
- ), exitcode=1
+ )
)
diff --git a/unittests/test_loader.py b/unittests/test_loader.py
index 729708bde7..ba32e5bc31 100644
--- a/unittests/test_loader.py
+++ b/unittests/test_loader.py
@@ -8,7 +8,6 @@
import shutil
import reframe as rfm
-import reframe.utility.osext as osext
from reframe.core.exceptions import ReframeSyntaxError
from reframe.frontend.loader import RegressionCheckLoader
@@ -148,7 +147,7 @@ def test_relative_import_outside_rfm_prefix(loader, tmp_path):
# imported as a hierarchical module. If not, we want to make sure that
# reframe will still load its parent modules
- osext.copytree(
+ shutil.copytree(
os.path.abspath('unittests/resources/checks_unlisted/testlib'),
tmp_path / 'testlib', dirs_exist_ok=True
)
diff --git a/unittests/test_modules.py b/unittests/test_modules.py
index feffdc5eb3..5f09e7dd7d 100644
--- a/unittests/test_modules.py
+++ b/unittests/test_modules.py
@@ -9,10 +9,11 @@
import reframe.core.environments as env
import reframe.core.modules as modules
import unittests.utility as test_util
+from reframe.utility.versioning import parse as parse_version
from reframe.core.exceptions import ConfigError, EnvironError
-@pytest.fixture(params=['tmod', 'tmod4', 'lmod', 'spack', 'nomod'])
+@pytest.fixture(params=['envmod', 'lmod', 'spack', 'nomod'])
def modules_system_nopath(request, monkeypatch):
# Always pretend to be on a clean modules environment
monkeypatch.setenv('MODULEPATH', '')
@@ -77,7 +78,11 @@ def module_collection(modules_system, tmp_path, monkeypatch):
# Remove the temporary collection
if modules_system.name == 'lmod':
- prefix = os.path.join(os.environ['HOME'], '.lmod.d')
+ lmod_version = parse_version(modules_system.version)
+ if lmod_version < (9,):
+ prefix = os.path.join(os.environ['HOME'], '.lmod.d')
+ else:
+ prefix = os.path.join(os.environ['HOME'], '.config', 'lmod')
else:
prefix = os.path.join(os.environ['HOME'], '.module')
diff --git a/unittests/test_reporting.py b/unittests/test_reporting.py
index c83a35ca6b..c9a30cd13a 100644
--- a/unittests/test_reporting.py
+++ b/unittests/test_reporting.py
@@ -6,6 +6,7 @@
import json
import jsonschema
import os
+import polars as pl
import pytest
import sys
import time
@@ -18,14 +19,13 @@
import reframe.frontend.reporting as reporting
import reframe.frontend.reporting.storage as report_storage
from reframe.frontend.reporting.utility import (parse_cmp_spec, is_uuid,
- QuerySelector,
- DEFAULT_GROUP_BY,
- DEFAULT_EXTRA_COLS)
+ QuerySelectorTestcase,
+ DEFAULT_GROUP_BY)
from reframe.core.exceptions import ReframeError
from reframe.frontend.reporting import RunReport
-_DEFAULT_BASE_COLS = DEFAULT_GROUP_BY + DEFAULT_EXTRA_COLS
+_DEFAULT_BASE_COLS = DEFAULT_GROUP_BY + ['pval']
# NOTE: We could move this to utility
@@ -211,7 +211,7 @@ def test_parse_cmp_spec_period(time_period):
spec, duration = time_period
duration = int(duration)
match = parse_cmp_spec(f'{spec}/{spec}/mean:/')
- for query in ('base', 'target'):
+ for query in ('lhs', 'rhs'):
assert getattr(match, query).by_time_period()
ts_start, ts_end = getattr(match, query).time_period
if 'now' in spec:
@@ -223,36 +223,65 @@ def test_parse_cmp_spec_period(time_period):
# Check variant without base period
match = parse_cmp_spec(f'{spec}/mean:/')
- assert match.base is None
+ assert match.lhs is None
@pytest.fixture(params=['first', 'last', 'mean', 'median',
- 'min', 'max', 'count'])
+ 'min', 'max', 'std', 'stats', 'sum'])
def aggregator(request):
return request.param
def test_parse_cmp_spec_aggregations(aggregator):
match = parse_cmp_spec(f'now-1m:now/now-1d:now/{aggregator}:/')
- data = [1, 2, 3, 4, 5]
+ num_recs = 10
+ nodelist = [f'nid{i}' for i in range(num_recs)]
+ df = pl.DataFrame({
+ 'name': ['test' for i in range(num_recs)],
+ 'pvar': ['time' for i in range(num_recs)],
+ 'unit': ['s' for i in range(num_recs)],
+ 'pval': [1 + i/10 for i in range(num_recs)],
+ 'node': nodelist
+ })
+ agg = df.group_by('name').agg(match.aggregation.col_spec(['node']))
+ assert set(agg['node'][0].split('|')) == set(nodelist)
if aggregator == 'first':
- match.aggregator(data) == data[0]
+ assert 'pval (first)' in agg.columns
+ assert agg['pval (first)'][0] == 1
elif aggregator == 'last':
- match.aggregator(data) == data[-1]
+ assert 'pval (last)' in agg.columns
+ assert agg['pval (last)'][0] == 1.9
elif aggregator == 'min':
- match.aggregator(data) == 1
+ assert 'pval (min)' in agg.columns
+ assert agg['pval (min)'][0] == 1
elif aggregator == 'max':
- match.aggregator(data) == 5
+ assert 'pval (max)' in agg.columns
+ assert agg['pval (max)'][0] == 1.9
elif aggregator == 'median':
- match.aggregator(data) == 3
+ assert 'pval (median)' in agg.columns
+ assert agg['pval (median)'][0] == 1.45
elif aggregator == 'mean':
- match.aggregator(data) == sum(data) / len(data)
- elif aggregator == 'count':
- match.aggregator(data) == len(data)
+ assert 'pval (mean)' in agg.columns
+ assert agg['pval (mean)'][0] == 1.45
+ elif aggregator == 'std':
+ assert 'pval (stddev)' in agg.columns
+ elif aggregator == 'stats':
+ assert 'pval (min)' in agg.columns
+ assert 'pval (p01)' in agg.columns
+ assert 'pval (p05)' in agg.columns
+ assert 'pval (median)' in agg.columns
+ assert 'pval (p95)' in agg.columns
+ assert 'pval (p99)' in agg.columns
+ assert 'pval (max)' in agg.columns
+ assert 'pval (mean)' in agg.columns
+ assert 'pval (stddev)' in agg.columns
+ elif aggregator == 'sum':
+ assert 'pval (sum)' in agg.columns
+ assert agg['pval (sum)'][0] == 14.5
# Check variant without base period
match = parse_cmp_spec(f'now-1d:now/{aggregator}:/')
- assert match.base is None
+ assert match.lhs is None
@pytest.fixture(params=[('', DEFAULT_GROUP_BY),
@@ -270,11 +299,11 @@ def test_parse_cmp_spec_group_by(group_by_columns):
match = parse_cmp_spec(
f'now-1m:now/now-1d:now/min:{spec}/'
)
- assert match.groups == expected
+ assert match.group_by == expected
# Check variant without base period
match = parse_cmp_spec(f'now-1d:now/min:{spec}/')
- assert match.base is None
+ assert match.lhs is None
@pytest.fixture(params=[('', _DEFAULT_BASE_COLS),
@@ -292,11 +321,17 @@ def test_parse_cmp_spec_extra_cols(columns):
match = parse_cmp_spec(
f'now-1m:now/now-1d:now/min:/{spec}'
)
- assert match.columns == expected
+
+ # `pval` is always added in case of comparisons
+ if spec == 'col1,col2':
+ assert match.attributes == expected + ['pval']
+ else:
+ assert match.attributes == expected
# Check variant without base period
match = parse_cmp_spec(f'now-1d:now/min:/{spec}')
- assert match.base is None
+ assert match.lhs is None
+ assert match.attributes == expected
def test_is_uuid():
@@ -340,11 +375,11 @@ def _uuids(s):
match = parse_cmp_spec(uuid_spec)
base_uuid, target_uuid = _uuids(uuid_spec)
- if match.base.by_session_uuid():
- assert match.base.uuid == base_uuid
+ if match.lhs.by_session_uuid():
+ assert match.lhs.uuid == base_uuid
- if match.target.by_session_uuid():
- assert match.target.uuid == target_uuid
+ if match.rhs.by_session_uuid():
+ assert match.rhs.uuid == target_uuid
@pytest.fixture(params=[
@@ -358,16 +393,16 @@ def sess_filter(request):
def test_parse_cmp_spec_with_filter(sess_filter):
match = parse_cmp_spec(sess_filter)
- if match.base:
- assert match.base.by_session_filter()
- assert match.base.sess_filter == 'xyz == "123"'
+ if match.lhs:
+ assert match.lhs.by_session_filter()
+ assert match.lhs.sess_filter == 'xyz == "123"'
- assert match.target.by_session_filter()
- assert match.target.sess_filter == 'xyz == "789"'
+ assert match.rhs.by_session_filter()
+ assert match.rhs.sess_filter == 'xyz == "789"'
if sess_filter.startswith('now'):
- assert match.target.by_time_period()
- ts_start, ts_end = match.target.time_period
+ assert match.rhs.by_time_period()
+ ts_start, ts_end = match.rhs.time_period
assert int(ts_end - ts_start) == 86400
@@ -423,7 +458,6 @@ def test_parse_cmp_spec_invalid_extra_cols(invalid_col_spec):
'now-1m:now/now-1d:now',
'now-1m:now/now-1d:now/mean',
'now-1m:now/now-1d:now/mean:',
- 'now-1m:now/now-1d:now/mean:',
'/now-1d:now/mean:/',
'now-1m:now//mean:'])
def various_invalid_specs(request):
@@ -446,13 +480,13 @@ def _count_failed(testcases):
return count
def from_time_period(ts_start, ts_end):
- return QuerySelector(time_period=(ts_start, ts_end))
+ return QuerySelectorTestcase(time_period=(ts_start, ts_end))
def from_session_uuid(x):
- return QuerySelector(uuid=x)
+ return QuerySelectorTestcase(uuid=x)
def from_session_filter(filt, ts_start, ts_end):
- return QuerySelector(time_period=(ts_start, ts_end), sess_filter=filt)
+ return QuerySelectorTestcase(time_period=(ts_start, ts_end), sess_filter=filt)
monkeypatch.setenv('HOME', str(tmp_path))
uuids = []
diff --git a/unittests/test_utility.py b/unittests/test_utility.py
index ca0939bd97..fb0a204ebb 100644
--- a/unittests/test_utility.py
+++ b/unittests/test_utility.py
@@ -22,6 +22,7 @@
from reframe.core.exceptions import (ConfigError,
SpawnedProcessError,
SpawnedProcessTimeout)
+from reframe.core.warnings import ReframeDeprecationWarning
def test_command_success():
@@ -281,7 +282,8 @@ def test_copytree(tmp_path):
dir_src.mkdir()
dir_dst = tmp_path / 'dst'
dir_dst.mkdir()
- osext.copytree(str(dir_src), str(dir_dst), dirs_exist_ok=True)
+ with pytest.warns(ReframeDeprecationWarning):
+ osext.copytree(str(dir_src), str(dir_dst), dirs_exist_ok=True)
def test_copytree_src_parent_of_dst(tmp_path):
@@ -289,7 +291,8 @@ def test_copytree_src_parent_of_dst(tmp_path):
src_path = (dst_path / '..').resolve()
with pytest.raises(ValueError):
- osext.copytree(str(src_path), str(dst_path))
+ with pytest.warns(ReframeDeprecationWarning):
+ osext.copytree(str(src_path), str(dst_path))
@pytest.fixture(params=['dirs_exist_ok=True', 'dirs_exist_ok=False'])
@@ -303,7 +306,8 @@ def test_copytree_dst_notdir(tmp_path, dirs_exist_ok):
dst = tmp_path / 'dst'
dst.touch()
with pytest.raises(FileExistsError, match=fr'{dst}'):
- osext.copytree(str(dir_src), str(dst), dirs_exist_ok=dirs_exist_ok)
+ with pytest.warns(ReframeDeprecationWarning):
+ osext.copytree(str(dir_src), str(dst), dirs_exist_ok=dirs_exist_ok)
def test_copytree_src_notdir(tmp_path, dirs_exist_ok):
@@ -312,7 +316,8 @@ def test_copytree_src_notdir(tmp_path, dirs_exist_ok):
dst = tmp_path / 'dst'
dst.mkdir()
with pytest.raises(NotADirectoryError, match=fr'{src}'):
- osext.copytree(str(src), str(dst), dirs_exist_ok=dirs_exist_ok)
+ with pytest.warns(ReframeDeprecationWarning):
+ osext.copytree(str(src), str(dst), dirs_exist_ok=dirs_exist_ok)
def test_copytree_src_does_not_exist(tmp_path, dirs_exist_ok):
@@ -320,7 +325,8 @@ def test_copytree_src_does_not_exist(tmp_path, dirs_exist_ok):
dst = tmp_path / 'dst'
dst.mkdir()
with pytest.raises(FileNotFoundError, match=fr'{src}'):
- osext.copytree(str(src), str(dst), dirs_exist_ok=dirs_exist_ok)
+ with pytest.warns(ReframeDeprecationWarning):
+ osext.copytree(str(src), str(dst), dirs_exist_ok=dirs_exist_ok)
@pytest.fixture