Skip to content
Merged
4 changes: 2 additions & 2 deletions .github/workflows/tests_scripts.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ jobs:
# can't test with EasyBuild versions older than v4.5.2 when using EESSI 2023.06,
# since Python in compat layer is Python 3.11.x;
# testing with a single EasyBuild version takes a while in GitHub Actions, so stick to a single sensible version
for EB_VERSION in '4.6.0'; do
for EB_VERSION in '5.1.0'; do
# Create script that uses load_easybuild_module.sh which we can run in compat layer environment
# note: Be careful with single vs double quotes below!
# ${EB_VERSION} should be expanded, so use double quotes;
Expand Down Expand Up @@ -113,7 +113,7 @@ jobs:

# scripts need to be copied to /tmp,
# since create_directory_tarballs.sh must be accessible from within build container
./eessi_container.sh --mode run --verbose /software-layer-scripts/create_directory_tarballs.sh 2023.06
./eessi_container.sh --mode run --verbose /software-layer-scripts/create_directory_tarballs.sh "${{matrix.EESSI_VERSION}}"
# check if tarballs have been produced
ls -l *.tar.gz

Expand Down
20 changes: 3 additions & 17 deletions EESSI-install-software.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,6 @@ display_help() {
echo " --skip-cuda-install - disable installing a full CUDA SDK in the host_injections prefix (e.g. in CI)"
}

# Function to check if a command exists
function command_exists() {
command -v "$1" >/dev/null 2>&1
}

function copy_build_log() {
# copy specified build log to specified directory, with some context added
build_log=${1}
Expand Down Expand Up @@ -315,18 +310,9 @@ else
fi

# Install NVIDIA drivers in host_injections (if they exist)
if command_exists "nvidia-smi"; then
export LD_LIBRARY_PATH="/.singularity.d/libs:${LD_LIBRARY_PATH}"
nvidia-smi --version
ec=$?
if [ ${ec} -eq 0 ]; then
echo "Command 'nvidia-smi' found. Installing NVIDIA drivers for use in prefix shell..."
${EESSI_PREFIX}/scripts/gpu_support/nvidia/link_nvidia_host_libraries.sh
else
echo "Warning: command 'nvidia-smi' found, but 'nvidia-smi --version' did not run succesfully."
echo "This script now assumes this is NOT a GPU node."
echo "If, and only if, the current node actually does contain Nvidia GPUs, this should be considered an error."
fi
if nvidia_gpu_available; then
echo "Installing NVIDIA drivers for use in prefix shell..."
${EESSI_PREFIX}/scripts/gpu_support/nvidia/link_nvidia_host_libraries.sh
fi

if [ ! -z "${shared_fs_path}" ]; then
Expand Down
18 changes: 2 additions & 16 deletions bot/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -220,23 +220,9 @@ BUILD_STEP_ARGS+=("--save" "${TARBALL_TMP_BUILD_STEP_DIR}")
BUILD_STEP_ARGS+=("--storage" "${STORAGE}")

# add options required to handle NVIDIA support
if command_exists "nvidia-smi"; then
# Accept that this may fail
set +e
nvidia-smi --version
ec=$?
set -e
if [ ${ec} -eq 0 ]; then
echo "Command 'nvidia-smi' found, using available GPU"
BUILD_STEP_ARGS+=("--nvidia" "all")
else
echo "Warning: command 'nvidia-smi' found, but 'nvidia-smi --version' did not run succesfully."
echo "This script now assumes this is NOT a GPU node."
echo "If, and only if, the current node actually does contain Nvidia GPUs, this should be considered an error."
BUILD_STEP_ARGS+=("--nvidia" "install")
fi
if nvidia_gpu_available; then
BUILD_STEP_ARGS+=("--nvidia" "all")
else
echo "No 'nvidia-smi' found, no available GPU but allowing overriding this check"
BUILD_STEP_ARGS+=("--nvidia" "install")
Copy link
Contributor Author

@laraPPr laraPPr Aug 1, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is now exactly mimicking the behavior but I'm not sure this is correct.
This is now set in this case No 'nvidia-smi' found, no available GPU but allowing overriding this check.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is indeed now always set. I do not think this is what we want, right. exerpt of logs from EESSI/software-layer#1143

No 'nvidia-smi' found, no available GPU but allowing overriding this check

Executing command to build software:

/project/60006/SHARED/jobs/2025.08/pr_1107/event_cfecebd0-6eb2-11f0-9bd7-28ae4dcb0ae2/run_000/linux_x86_64_intel_sapphirerapids/eessi.io-2023.06-software/software-layer-scripts/eessi_container.sh 
--verbose --access rw --mode run --container docker://ghcr.io/eessi/build-node:debian12 
--repository eessi.io-2023.06-software --extra-bind-paths /project/60006/SHARED/jobs/2025.08/pr_1107/event_cfecebd0-6eb2-11f0-9bd7-28ae4dcb0ae2/run_000/linux_x86_64_intel_sapphirerapids/eessi.io-2023.06-software/software-layer-scripts,/dev 
--pass-through --contain --save /project/60006/SHARED/jobs/2025.08/pr_1107/event_cfecebd0-6eb2-11f0-9bd7-28ae4dcb0ae2/run_000/linux_x86_64_intel_sapphirerapids/eessi.io-2023.06-software/previous_tmp/build_step 
--storage /tmp/bot/EESSI/eessi_job.N27hQTIAbE --nvidia install 
--host-injections /project/def-users/bot/shared/host-injections

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In our scripts this is fine, it is a once only thing, once things are installed they are not reinstalled.

fi

Expand Down
16 changes: 2 additions & 14 deletions bot/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -225,20 +225,8 @@ fi
TEST_STEP_ARGS+=("--extra-bind-paths" "/sys/fs/cgroup:/hostsys/fs/cgroup:ro")

# add options required to handle NVIDIA support
if command_exists "nvidia-smi"; then
# Accept that this may fail
set +e
nvidia-smi --version
ec=$?
set -e
if [ ${ec} -eq 0 ]; then
echo "Command 'nvidia-smi' found, using available GPU"
TEST_STEP_ARGS+=("--nvidia" "run")
else
echo "Warning: command 'nvidia-smi' found, but 'nvidia-smi --version' did not run succesfully."
echo "This script now assumes this is NOT a GPU node."
echo "If, and only if, the current node actually does contain Nvidia GPUs, this should be considered an error."
fi
if nvidia_gpu_available; then
TEST_STEP_ARGS+=("--nvidia" "run")
fi

# prepare arguments to test_suite.sh (specific to test step)
Expand Down
19 changes: 19 additions & 0 deletions scripts/utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -147,3 +147,22 @@ function get_ipv4_address {
echo "${hipv4}"
return 0
}

function nvidia_gpu_available {
if command_exists "nvidia-smi"; then
# We are careful here in case we are running in a container and LD_LIBARY_PATH has been wiped
LD_LIBRARY_PATH="/.singularity.d/libs:${LD_LIBRARY_PATH}" nvidia-smi --version
ec=$?
if [ ${ec} -eq 0 ]; then
echo "Command 'nvidia-smi' found."
return 0
else
echo "Warning: command 'nvidia-smi' found, but 'nvidia-smi --version' did not run succesfully."
echo "This script now assumes this is NOT a GPU node."
return 1
fi
else
echo "No 'nvidia-smi' found, no available GPU."
return 2
fi
}