From 997c507bd5d54595e03575daf4f6d8a079115c2f Mon Sep 17 00:00:00 2001 From: hasan7n Date: Mon, 16 Mar 2026 01:40:11 +0000 Subject: [PATCH 1/6] fix admin script --- examples/cc/admin_scripts/admin.sh | 86 +++++++++++++++++++++++++----- 1 file changed, 72 insertions(+), 14 deletions(-) diff --git a/examples/cc/admin_scripts/admin.sh b/examples/cc/admin_scripts/admin.sh index 8e2d150c1..a4b7dd652 100644 --- a/examples/cc/admin_scripts/admin.sh +++ b/examples/cc/admin_scripts/admin.sh @@ -7,26 +7,26 @@ set -eo pipefail export PROJECT_ID="medperf-330914" # User email -export USER="hasan.gcptest@gmail.com" +export USER_EMAIL="hasan.gcptest@gmail.com" # New service account name to create -export SERVICE_ACCOUNT_NAME="medperf-cc-sa" +export SERVICE_ACCOUNT_NAME="cc-test" # New KMS info to create -export KEYRING_NAME="medperf-keyring" -export KEY_NAME="medperf-key" +export KEYRING_NAME="data-owner-keyring" +export KEY_NAME="data-owner-cc-key" export KEY_LOCATION="global" # New Workload identity pool and OIDC provider info to create -export WIP_ID="medperf-wip" -export WIP_PROVIDER_ID="medperf-wippro" +export WIP_ID="test1" +export WIP_PROVIDER_ID="attestation-verifier" # New bucket info to create export BUCKET_NAME="medperf-bucket" export BUCKET_LOCATION="us-central1" # New virtual machine info to create -export VM_NAME="gputest" +export VM_NAME="gputestdebug" export BOOT_DISK_SIZE="500GB" export VM_ZONE="us-central1-a" export VM_NETWORK="medperf-brats-network" # default is usually "default" @@ -49,6 +49,9 @@ gcloud services enable \ confidentialcomputing.googleapis.com \ iamcredentials.googleapis.com +echo "********************************************************************************************" +echo "************************************* Services enabled *************************************" +echo "********************************************************************************************" #################################################### #################### KMS ########################### #################################################### @@ -58,6 +61,10 @@ gcloud services enable \ gcloud kms keyrings create "$KEYRING_NAME" \ --location="$KEY_LOCATION" +echo "********************************************************************************************" +echo "************************************* KMS Keyring created **********************************" +echo "********************************************************************************************" + # Create Key gcloud kms keys create "$KEY_NAME" \ --location="$KEY_LOCATION" \ @@ -65,16 +72,24 @@ gcloud kms keys create "$KEY_NAME" \ --purpose=encryption \ --protection-level=hsm +echo "********************************************************************************************" +echo "************************************* KMS Key created **************************************" +echo "********************************************************************************************" + # allow user to encrypt with the key gcloud kms keys add-iam-policy-binding "$FULL_KEY_NAME" \ - --member=user:"$USER" \ + --member=user:"$USER_EMAIL" \ --role="roles/cloudkms.cryptoKeyEncrypter" # allow user to manage iam policy of the key gcloud kms keys add-iam-policy-binding "$FULL_KEY_NAME" \ - --member=user:"$USER" \ + --member=user:"$USER_EMAIL" \ --role="roles/cloudkms.admin" +echo "********************************************************************************************" +echo "************************************* KMS permissions granted ******************************" +echo "********************************************************************************************" + #################################################### #################### WIP ########################### #################################################### @@ -82,6 +97,10 @@ gcloud kms keys add-iam-policy-binding "$FULL_KEY_NAME" \ # Create Workload Identity Pool gcloud iam workload-identity-pools create "$WIP_ID" --location=global +echo "********************************************************************************************" +echo "************************************* WIP created ******************************************" +echo "********************************************************************************************" + # Create OIDC provider for WIP gcloud iam workload-identity-pools providers create-oidc "$WIP_PROVIDER_ID" \ --location=global \ @@ -94,14 +113,24 @@ gcloud iam workload-identity-pools providers create-oidc "$WIP_PROVIDER_ID" \ ::\"+assertion.submods.gce.instance_id" \ --attribute-condition="assertion.swname == 'CONFIDENTIAL_SPACE'" + +echo "********************************************************************************************" +echo "************************************* WIP provider created *********************************" +echo "********************************************************************************************" + # Allow user to manage WIP gcloud iam workload-identity-pools add-iam-policy-binding "$WIP_ID" \ --location=global \ --project="$PROJECT_ID" \ - --member=user:"$USER" \ + --member=user:"$USER_EMAIL" \ --role="roles/iam.workloadIdentityPoolAdmin" +echo "********************************************************************************************" +echo "************************************* WIP permissions granted ******************************" +echo "********************************************************************************************" + + #################################################### #################### Bucket ######################## #################################################### @@ -111,11 +140,20 @@ gcloud storage buckets create "gs://$BUCKET_NAME" \ --location="$BUCKET_LOCATION" \ --uniform-bucket-level-access + +echo "********************************************************************************************" +echo "************************************* Bucket created ***************************************" +echo "********************************************************************************************" + # Allow user to manage the bucket gcloud storage buckets add-iam-policy-binding "gs://$BUCKET_NAME" \ - --member=user:"$USER" \ + --member=user:"$USER_EMAIL" \ --role="roles/storage.admin" +echo "********************************************************************************************" +echo "************************************* Bucket permissions granted ***************************" +echo "********************************************************************************************" + #################################################### #################### Service Account ############### #################################################### @@ -123,10 +161,14 @@ gcloud storage buckets add-iam-policy-binding "gs://$BUCKET_NAME" \ # create service account gcloud iam service-accounts create "$SERVICE_ACCOUNT_NAME" +echo "********************************************************************************************" +echo "************************************* Service Account created ******************************" +echo "********************************************************************************************" + # allow user to use the service account gcloud iam service-accounts add-iam-policy-binding \ "$SERVICE_ACCOUNT_EMAIL" \ - --member=user:"$USER" \ + --member=user:"$USER_EMAIL" \ --role="roles/iam.serviceAccountUser" # give the service account cc workload user role @@ -144,6 +186,10 @@ gcloud storage buckets add-iam-policy-binding "gs://$BUCKET_NAME" \ --member=serviceAccount:"$SERVICE_ACCOUNT_EMAIL" \ --role="roles/storage.objectAdmin" +echo "********************************************************************************************" +echo "********************** Service account permissions granted *********************************" +echo "********************************************************************************************" + #################################################### #################### Virtual Machine ############### #################################################### @@ -184,15 +230,27 @@ gcloud compute instances create "$VM_NAME" \ --instance-termination-action=STOP \ --discard-local-ssds-at-termination-timestamp=true +echo "********************************************************************************************" +echo "************************************* VM created *******************************************" +echo "********************************************************************************************" + # Stop the VM -gcloud compute instances stop "$VM_NAME" --zone="$VM_ZONE" --project="$PROJECT_ID" +gcloud compute instances stop "$VM_NAME" --zone="$VM_ZONE" --project="$PROJECT_ID" --discard-local-ssd=false + +echo "********************************************************************************************" +echo "************************************* VM stopped *******************************************" +echo "********************************************************************************************" # allow user to edit the VM metadata and to start it gcloud compute instances add-iam-policy-binding "$VM_NAME" \ --zone="$VM_ZONE" \ - --member=user:"$USER" \ + --member=user:"$USER_EMAIL" \ --role="roles/compute.instanceAdmin.v1" +echo "********************************************************************************************" +echo "************************************* VM permissions granted *******************************" +echo "********************************************************************************************" + # Give the user the following information PROJECT_NUMBER=$(gcloud projects describe "$PROJECT_ID" --format="value(projectNumber)") From 9634fc0c533cf63cc3cc01d86f6609d3bbb0f4a6 Mon Sep 17 00:00:00 2001 From: hasan7n Date: Mon, 16 Mar 2026 01:40:31 +0000 Subject: [PATCH 2/6] add LD library for GPU example to run --- .../cc/rano/implementation/benchmark/inference/entrypoint.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/cc/rano/implementation/benchmark/inference/entrypoint.sh b/examples/cc/rano/implementation/benchmark/inference/entrypoint.sh index 5269950be..693bb347d 100644 --- a/examples/cc/rano/implementation/benchmark/inference/entrypoint.sh +++ b/examples/cc/rano/implementation/benchmark/inference/entrypoint.sh @@ -1 +1,2 @@ +export LD_LIBRARY_PATH=/usr/local/nvidia/lib64:$LD_LIBRARY_PATH python /project/benchmark/inference/infer.py $@ From fa98c05647c858b8152fa0f9d179a1363e669c55 Mon Sep 17 00:00:00 2001 From: hasan7n Date: Mon, 16 Mar 2026 01:40:37 +0000 Subject: [PATCH 3/6] add data owner instructions --- docs/concepts/confidential_computing.md | 53 +++++++++++++++++++++++++ mkdocs.yml | 1 + 2 files changed, 54 insertions(+) create mode 100644 docs/concepts/confidential_computing.md diff --git a/docs/concepts/confidential_computing.md b/docs/concepts/confidential_computing.md new file mode 100644 index 000000000..186e7a574 --- /dev/null +++ b/docs/concepts/confidential_computing.md @@ -0,0 +1,53 @@ +# Configuring confidential Computing + +This guide assumes that you already have a registered dataset, you prepared it, you set it operational, and you are associated with the benchmark that contains a model that requires confidential computing. +Note: associate your dataset with the new benchmark TODO. + +## Configure you google cloud environment locally + +You need to: + +- Install the gcloud CLI () +- authenticate: `gcloud auth login` +- set project ID: `gcloud config set project PROJECT_ID` +- run `gcloud auth application-default login` + +## Start the web UI and login + +run `medperf_webui` +click on the `login` button. + +## Get a certificate + +Navigate to the `settings page` in the web UI and scroll down to the `Certificate Settings` section. If you already have a certificate, skip this step. Otherwise, click the button to get a certificate and follow the steps. + +## Configure your cloud environment information in MedPerf + +You should have recieved the following information from your google cloud administrator: + +- Project ID +- Project Number +- Bucket +- Keyring Name +- Key Name +- Key Location +- Workload Identity Pool +- Workload Identity Provider +- Service Account Name +- VM Zone +- VM Name + +You will use this information to configure your Medperf client. + +### Configure your confidential VM settings + +Navigate to the `settings` page in the web UI, scroll down to the `Confidential Computing Operator Settings`, check the box to `Configure confidential Computing` and fill in the required information. After that, click `Apply Changes`. + +### Configure your Dataset cloud resources settings + +Navigate to your dataset page (Datasets tab, then your dataset name. You can click `mine_only` to view only yours). + +Then, scroll down to the section `Confidential Computing Preferences`. Check the box to `Configure dataset for Confidential Computing` and fill in the required information. After that, click `Apply Changes`. + +After the changes are updated, there will be a new button `Sync CC policy`. Click on that button and wait for it to finish. After this, you are ready to click the `Run` button on the confidential model. +Make sure you only click on the confidential model's button, don't clikc `Run all`. Inference and metrics calcualtion will take time, so only run the relevant model. diff --git a/mkdocs.yml b/mkdocs.yml index 86dc61c03..162d2cb57 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -30,6 +30,7 @@ nav: - Transferring to another Machine: concepts/import_export_data.md - Encrypted Models: concepts/encrypted_models.md - Certificates: concepts/certificates.md + - Confidential Computing: concepts/confidential_computing.md # - Benchmark Associations: concepts/associations.md # - Model Priority: concepts/priorities.md # - Running Specific Models: concepts/single_run.md From 8733801696b8506f7dca77a8c6ed4d77a2a62110 Mon Sep 17 00:00:00 2001 From: hasan7n Date: Mon, 16 Mar 2026 04:57:24 +0100 Subject: [PATCH 4/6] update docs --- docs/concepts/confidential_computing.md | 57 ++++++++++++++++--------- examples/cc/admin_scripts/README.md | 33 ++++++++------ 2 files changed, 57 insertions(+), 33 deletions(-) diff --git a/docs/concepts/confidential_computing.md b/docs/concepts/confidential_computing.md index 186e7a574..341975d34 100644 --- a/docs/concepts/confidential_computing.md +++ b/docs/concepts/confidential_computing.md @@ -1,29 +1,29 @@ # Configuring confidential Computing -This guide assumes that you already have a registered dataset, you prepared it, you set it operational, and you are associated with the benchmark that contains a model that requires confidential computing. -Note: associate your dataset with the new benchmark TODO. +## Overview -## Configure you google cloud environment locally +You are a data owner. You already have a registered, prepared, operational dataset. You already associated your dataset with the benchmark that contains a model that requires confidential computing. +This guide helps you configure the MedPerf client to run a confidential computing model on your dataset in the google cloud environment. -You need to: +## Start the web UI and login -- Install the gcloud CLI () -- authenticate: `gcloud auth login` -- set project ID: `gcloud config set project PROJECT_ID` -- run `gcloud auth application-default login` +Make sure you have MedPerf installed. -## Start the web UI and login +Run the command `medperf_webui` on your terminal to start the local web user interface. -run `medperf_webui` -click on the `login` button. +In the web UI, login by clicking on the `login` button and follow the required steps. ## Get a certificate -Navigate to the `settings page` in the web UI and scroll down to the `Certificate Settings` section. If you already have a certificate, skip this step. Otherwise, click the button to get a certificate and follow the steps. +1. Navigate to the `settings` page +2. Scroll down to the `Certificate Settings` section. +3. If you already have a certificate, skip this step. Otherwise, click the button and follow the required steps to get a certificate. + +Note: you may see a status `to be uploaded`. No need to upload your certificate for this usecase. ## Configure your cloud environment information in MedPerf -You should have recieved the following information from your google cloud administrator: +Ask your cloud administrator for the following information: - Project ID - Project Number @@ -39,15 +39,32 @@ You should have recieved the following information from your google cloud admini You will use this information to configure your Medperf client. -### Configure your confidential VM settings +### Set up google cloud CLI + +Note: This step should be done in a terminal. + +1. Install the gcloud CLI (). Follow only the two sections about installing the CLI and initializing google cloud. +2. Run `gcloud auth list` and make sure your account is active (an asterisk should be next to your account email) +3. Set the project ID by running the command `gcloud config set project PROJECT_ID` where `PROJECT_ID` is the project ID you got from your cloud admin. +4. Run the following command `gcloud auth application-default login` and follow the required steps. + +### Configure Medperf with your confidential VM settings -Navigate to the `settings` page in the web UI, scroll down to the `Confidential Computing Operator Settings`, check the box to `Configure confidential Computing` and fill in the required information. After that, click `Apply Changes`. +1. Navigate to the `settings` page in the web UI +2. Scroll down to the `Confidential Computing Operator Settings` +3. Check the box `Configure confidential Computing` +4. Fill in the required information. +5. Click `Apply Changes`. -### Configure your Dataset cloud resources settings +### Configure Medperf with your Dataset cloud resources settings -Navigate to your dataset page (Datasets tab, then your dataset name. You can click `mine_only` to view only yours). +1. Navigate to your dataset dashboard (Click on the `Datasets` tab, then find your dataset. You can click `mine_only` to view only your datasets.) +2. Scroll down to the section `Confidential Computing Preferences`. +3. Check the box `Configure dataset for Confidential Computing` +4. Fill in the required information. +5. Click `Apply Changes`. +6. After step 5, a new button will appear. Click on the new button `Sync CC policy`. -Then, scroll down to the section `Confidential Computing Preferences`. Check the box to `Configure dataset for Confidential Computing` and fill in the required information. After that, click `Apply Changes`. +## What's next? -After the changes are updated, there will be a new button `Sync CC policy`. Click on that button and wait for it to finish. After this, you are ready to click the `Run` button on the confidential model. -Make sure you only click on the confidential model's button, don't clikc `Run all`. Inference and metrics calcualtion will take time, so only run the relevant model. +You can now run the model that required confidential computing, by clicking the button `Run` near the model of interest. After execution finishes, submit the results by clicking the `Submit` button that will later appear. diff --git a/examples/cc/admin_scripts/README.md b/examples/cc/admin_scripts/README.md index fc37ddb56..15bb4c602 100644 --- a/examples/cc/admin_scripts/README.md +++ b/examples/cc/admin_scripts/README.md @@ -2,25 +2,38 @@ ## For GCP Project Admin +Context: You will be creating required resources for the data user in order to allow them to use MedPerf to run inference on their dataset in a confidential virtual machine on google cloud. Here is what will happen behind the scenes when the user uses MedPerf to run a confidential computing workload; this will help understand the reason behind the resources and user roles being asked for. + +Medperf will: + +1. Encrypt the dataset using a locally generated key. +2. Encrypt the key using cloud KMS +3. Upload the encrypted dataset and the encrypted key to the cloud bucket. +4. Update the workload identity pool OIDC provider with relevant attribute conditions and configure it to bind certain attestation claims to identities. +5. Update the IAM policy of the bucket and of the KMS to only allow a confidential computing workload with certain attestation claims to get the encrypted data and to use the KMS to decrypt. +6. Update the provisioned virtual machine with relevant metadata (e.g., docker container) +7. Start the virtual machine, which will at the end write results to the bucket +8. Stream logs from the virtual machine serial port. +9. Download results from the bucket to the local machine. + ### Quotas You will be creating: -- bucket +- a bucket - a KMS HSM key -- a workload identity pool +- a workload identity pool and an OIDC provider. - a service account - a GPU-based confidential VM (machine type: a3-highgpu-1g). To view zones where this machine type is available, visit and look for availability of "A3 High". -You will need to make sure you have enough quota for nvidia h100 GPUs: - -- visit and read sections "Request preemptible quota" and "Request global quota"; make sure you have these quotas. only 1 GPU is needed. +You will need to make sure you have enough quota for the resources mentioned above. +Additionally, `a3-highgpu-1g` machines use Nvidia H100 GPUs. Visit and read sections "Request preemptible quota" and "Request global quota" to make sure you have enough quota. Only 1 GPU is needed. ### Creating resources -Note: a script `admin.sh` can be found in this folder. You can configure the constants (e.g., project id, names of the resources to be created, etc...), run the script in cloud shell, and you are done. It will print at the end the information needed to be passed to the user. +Note: a script `admin.sh` can be found in this folder. You can configure the constants (e.g., project id, names of the resources to be created, etc...), run the script in cloud shell, and you are done. It will print at the end the information needed to be passed to the user. You can also export the constants and then run the commands one by one. -If you want to create resources manually, follow the instructions below. +If you want to create resources manually using the google cloud console, follow the instructions below. Note however that there are some steps that can't be done using the console and should be run as commands using the gcloud CLI. #### Resources for Hosting the dataset and managing access @@ -63,9 +76,3 @@ If you want to create resources manually, follow the instructions below. - Create a VM - You should use the command given in the `admin.sh` script to create the VM. Run it in the cloud shell. - grant the user "roles/compute.instanceAdmin.v1" role on the VM. - -## For users - -login -set project -run `gcloud auth application-default login` From c3f4252a10598029fa18040ff7de5421ee44ea1c Mon Sep 17 00:00:00 2001 From: hasan7n Date: Mon, 16 Mar 2026 05:17:37 +0100 Subject: [PATCH 5/6] fix bug when multiple benchmarks are present --- cli/medperf/commands/cc/dataset_update_cc_policy.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cli/medperf/commands/cc/dataset_update_cc_policy.py b/cli/medperf/commands/cc/dataset_update_cc_policy.py index 69e408b0d..427323869 100644 --- a/cli/medperf/commands/cc/dataset_update_cc_policy.py +++ b/cli/medperf/commands/cc/dataset_update_cc_policy.py @@ -44,6 +44,8 @@ def get_permitted_workloads(dataset: Dataset): model_assocs = config.comms.get_benchmark_models_associations(benchmark_id) for model_assoc in model_assocs: model = Model.get(model_assoc["model"]) + if not model.requires_cc(): + continue asset = model.asset_obj workload_info = CCWorkloadID( data_hash=dataset.generated_uid, From a4bd7ccfe5581dfbd45630853e4ba5fcb94cab6d Mon Sep 17 00:00:00 2001 From: hasan7n Date: Mon, 16 Mar 2026 19:40:06 +0100 Subject: [PATCH 6/6] update admin docs --- examples/cc/admin_scripts/README.md | 2 +- examples/cc/admin_scripts/admin.sh | 26 +++++++++++++------------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/examples/cc/admin_scripts/README.md b/examples/cc/admin_scripts/README.md index 15bb4c602..5ff1c31bf 100644 --- a/examples/cc/admin_scripts/README.md +++ b/examples/cc/admin_scripts/README.md @@ -2,7 +2,7 @@ ## For GCP Project Admin -Context: You will be creating required resources for the data user in order to allow them to use MedPerf to run inference on their dataset in a confidential virtual machine on google cloud. Here is what will happen behind the scenes when the user uses MedPerf to run a confidential computing workload; this will help understand the reason behind the resources and user roles being asked for. +Context: The user will use the MedPerf client on the local machine where the data resides. You (IT/cloud admin) will be creating required resources for the data user in order to allow them to use MedPerf to run inference on their dataset in a confidential virtual machine on google cloud. Here is what will happen behind the scenes when the user uses MedPerf to run a confidential computing workload; this will help understand the reason behind the resources and user roles being asked for. Medperf will: diff --git a/examples/cc/admin_scripts/admin.sh b/examples/cc/admin_scripts/admin.sh index a4b7dd652..5a797ab4d 100644 --- a/examples/cc/admin_scripts/admin.sh +++ b/examples/cc/admin_scripts/admin.sh @@ -4,32 +4,32 @@ set -eo pipefail #################################################### # Project ID -export PROJECT_ID="medperf-330914" +export PROJECT_ID="project_id" # User email -export USER_EMAIL="hasan.gcptest@gmail.com" +export USER_EMAIL="user@example.com" # New service account name to create -export SERVICE_ACCOUNT_NAME="cc-test" +export SERVICE_ACCOUNT_NAME="sa_name" # New KMS info to create -export KEYRING_NAME="data-owner-keyring" -export KEY_NAME="data-owner-cc-key" -export KEY_LOCATION="global" +export KEYRING_NAME="keyring_name" +export KEY_NAME="key_name" +export KEY_LOCATION="key_location" # e.g., us-central1, europe-west3, ... # New Workload identity pool and OIDC provider info to create -export WIP_ID="test1" +export WIP_ID="wip_name" export WIP_PROVIDER_ID="attestation-verifier" # New bucket info to create -export BUCKET_NAME="medperf-bucket" -export BUCKET_LOCATION="us-central1" +export BUCKET_NAME="bucket_name" # bucket names are globally unique, please use a unique name +export BUCKET_LOCATION="bucket_location" # e.g., us-central1, europe-west3, ... # New virtual machine info to create -export VM_NAME="gputestdebug" -export BOOT_DISK_SIZE="500GB" -export VM_ZONE="us-central1-a" -export VM_NETWORK="medperf-brats-network" # default is usually "default" +export VM_NAME="vm_name" +export BOOT_DISK_SIZE="500GB" # adjust as needed, depends on the data size +export VM_ZONE="vm_zone" # e.g., us-central1-a, europe-west4-c, ... +export VM_NETWORK="default" # Usually the default network name is "default", but adjust if you have a custom network setup #################################################### #################### End Config ####################