From 77257320a95958de468011a8cacb9d8ffa6a45ef Mon Sep 17 00:00:00 2001 From: Pranav Iyer Date: Thu, 13 Nov 2025 13:16:47 -0800 Subject: [PATCH 01/23] Included changes for python script for custom credential suppliers. --- auth/custom-credentials/aws/README.md | 131 ++++++++++++++++ .../aws/requirements-test.txt | 2 + auth/custom-credentials/aws/requirements.txt | 3 + auth/custom-credentials/aws/snippets.py | 120 +++++++++++++++ auth/custom-credentials/aws/snippets_test.py | 111 ++++++++++++++ auth/custom-credentials/okta/README.md | 84 +++++++++++ .../okta/requirements-test.txt | 2 + auth/custom-credentials/okta/requirements.txt | 3 + auth/custom-credentials/okta/snippets.py | 140 ++++++++++++++++++ auth/custom-credentials/okta/snippets_test.py | 108 ++++++++++++++ 10 files changed, 704 insertions(+) create mode 100644 auth/custom-credentials/aws/README.md create mode 100644 auth/custom-credentials/aws/requirements-test.txt create mode 100644 auth/custom-credentials/aws/requirements.txt create mode 100644 auth/custom-credentials/aws/snippets.py create mode 100644 auth/custom-credentials/aws/snippets_test.py create mode 100644 auth/custom-credentials/okta/README.md create mode 100644 auth/custom-credentials/okta/requirements-test.txt create mode 100644 auth/custom-credentials/okta/requirements.txt create mode 100644 auth/custom-credentials/okta/snippets.py create mode 100644 auth/custom-credentials/okta/snippets_test.py diff --git a/auth/custom-credentials/aws/README.md b/auth/custom-credentials/aws/README.md new file mode 100644 index 00000000000..1c2c9fa0ef1 --- /dev/null +++ b/auth/custom-credentials/aws/README.md @@ -0,0 +1,131 @@ +# Running the Custom Credential Supplier Sample + +If you want to use AWS security credentials that cannot be retrieved using methods supported natively by the [google-auth](https://github.com/googleapis/google-auth-library-python) library, a custom `AwsSecurityCredentialsSupplier` implementation may be specified. The supplier must return valid, unexpired AWS security credentials when called by the GCP credential. + +This sample demonstrates how to use **Boto3** (the AWS SDK for Python) as a custom supplier to bridge AWS credentials—from sources like EKS IRSA, ECS, or Fargate—to Google Cloud Workload Identity. + +## Running Locally + +To run the sample on your local system, you need to install the dependencies and configure your AWS and GCP credentials as environment variables. + +### 1. Install Dependencies + +Ensure you have Python installed, then install the required libraries: + +```bash +pip install -r requirements.txt +``` + +### 2. Set Environment Variables + +```bash +export AWS_ACCESS_KEY_ID="YOUR_AWS_ACCESS_KEY_ID" +export AWS_SECRET_ACCESS_KEY="YOUR_AWS_SECRET_ACCESS_KEY" +export AWS_REGION="YOUR_AWS_REGION" # e.g., us-east-1 +export GCP_WORKLOAD_AUDIENCE="YOUR_GCP_WORKLOAD_AUDIENCE" +export GCS_BUCKET_NAME="YOUR_GCS_BUCKET_NAME" + +# Optional: If you want to use service account impersonation +export GCP_SERVICE_ACCOUNT_IMPERSONATION_URL="YOUR_GCP_SERVICE_ACCOUNT_IMPERSONATION_URL" +``` + +### 3. Run the Script + +```bash +python3 snippets.py +``` + +## Running in a Containerized Environment (EKS) + +This section provides a brief overview of how to run the sample in an Amazon EKS cluster. + +### 1. EKS Cluster Setup + +First, you need an EKS cluster. You can create one using `eksctl` or the AWS Management Console. For detailed instructions, refer to the [Amazon EKS documentation](https://docs.aws.amazon.com/eks/latest/userguide/create-cluster.html). + +### 2. Configure IAM Roles for Service Accounts (IRSA) + +IRSA allows you to associate an IAM role with a Kubernetes service account. This provides a secure way for your pods to access AWS services without hardcoding long-lived credentials. + +- Create an IAM OIDC provider for your cluster. +- Create an IAM role and policy that grants the necessary AWS permissions. +- Associate the IAM role with a Kubernetes service account. + +For detailed steps, see the [IAM Roles for Service Accounts](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html) documentation. + +### 3. Configure GCP to Trust the AWS Role + +You need to configure your GCP project to trust the AWS IAM role you created. This is done by creating a Workload Identity Pool and Provider in GCP. + +- Create a Workload Identity Pool. +- Create a Workload Identity Provider that trusts the AWS role ARN. +- Grant the GCP service account the necessary permissions. + +### 4. Containerize and Package the Application + +Create a `Dockerfile` for the Python application and push the image to a container registry (e.g., Amazon ECR) that your EKS cluster can access. + +**Dockerfile** +```Dockerfile +FROM python:3.11-slim + +WORKDIR /app + +# Copy requirements and install dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the script +COPY snippets.py . + +# Run the script +CMD ["python3", "snippets.py"] +``` + +Build and push the image: +```bash +docker build -t your-container-image:latest . +docker push your-container-image:latest +``` + +### 5. Deploy to EKS + +Create a Kubernetes deployment manifest (`pod.yaml`) to deploy your application to the EKS cluster. + +**pod.yaml** +```yaml +apiVersion: v1 +kind: Pod +metadata: + name: custom-credential-pod +spec: + serviceAccountName: your-k8s-service-account # The service account associated with the AWS IAM role + containers: + - name: gcp-auth-sample + image: your-container-image:latest # Your image from ECR + env: + # AWS_REGION is often required for Boto3 to initialize correctly in containers + - name: AWS_REGION + value: "your-aws-region" + - name: GCP_WORKLOAD_AUDIENCE + value: "your-gcp-workload-audience" + # Optional: If you want to use service account impersonation + # - name: GCP_SERVICE_ACCOUNT_IMPERSONATION_URL + # value: "your-gcp-service-account-impersonation-url" + - name: GCS_BUCKET_NAME + value: "your-gcs-bucket-name" +``` + +Deploy the pod: + +```bash +kubectl apply -f pod.yaml +``` + +### 6. Clean Up + +To clean up the resources, delete the EKS cluster and any other AWS and GCP resources you created. + +```bash +eksctl delete cluster --name your-cluster-name +``` \ No newline at end of file diff --git a/auth/custom-credentials/aws/requirements-test.txt b/auth/custom-credentials/aws/requirements-test.txt new file mode 100644 index 00000000000..43b24059d3e --- /dev/null +++ b/auth/custom-credentials/aws/requirements-test.txt @@ -0,0 +1,2 @@ +-r requirements.txt +pytest==8.2.0 diff --git a/auth/custom-credentials/aws/requirements.txt b/auth/custom-credentials/aws/requirements.txt new file mode 100644 index 00000000000..c3ed82574c8 --- /dev/null +++ b/auth/custom-credentials/aws/requirements.txt @@ -0,0 +1,3 @@ +boto3==1.40.53 +google-auth==2.43.0 +python-dotenv==1.1.1 diff --git a/auth/custom-credentials/aws/snippets.py b/auth/custom-credentials/aws/snippets.py new file mode 100644 index 00000000000..0c0a289dd02 --- /dev/null +++ b/auth/custom-credentials/aws/snippets.py @@ -0,0 +1,120 @@ +# Copyright 2025 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START auth_custom_credential_supplier_aws] +import json +import os +import sys + +import boto3 +from google.auth import aws +from google.auth import exceptions +from google.auth.transport import requests as auth_requests + + +class CustomAwsSupplier(aws.AwsSecurityCredentialsSupplier): + """Custom AWS Security Credentials Supplier using Boto3.""" + + def __init__(self): + """Initializes the Boto3 session, prioritizing environment variables for region.""" + # Explicitly read the region from the environment first. This ensures that + # a value from a .env file is picked up reliably for local testing. + region = os.getenv("AWS_REGION") or os.getenv("AWS_DEFAULT_REGION") + + # If region is None, Boto3's discovery chain will be used when needed. + self.session = boto3.Session(region_name=region) + self._cached_region = None + + def get_aws_region(self, context, request) -> str: + """Returns the AWS region using Boto3's default provider chain.""" + if self._cached_region: + return self._cached_region + + # Accessing region_name will use the value from the constructor if provided, + # otherwise it triggers Boto3's lazy-loading discovery (e.g., metadata service). + self._cached_region = self.session.region_name + + if not self._cached_region: + raise exceptions.GoogleAuthError("Boto3 was unable to resolve an AWS region.") + + return self._cached_region + + def get_aws_security_credentials(self, context, request=None) -> aws.AwsSecurityCredentials: + """Retrieves AWS security credentials using Boto3's default provider chain.""" + creds = self.session.get_credentials() + if not creds: + raise exceptions.GoogleAuthError("Unable to resolve AWS credentials from Boto3.") + + return aws.AwsSecurityCredentials( + access_key_id=creds.access_key, + secret_access_key=creds.secret_key, + session_token=creds.token, + ) + + +def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url): + """Authenticates using the custom AWS supplier and lists bucket metadata.""" + + # 1. Instantiate the custom supplier. + custom_supplier = CustomAwsSupplier() + + # 2. Instantiate the AWS Credentials object. + # This object handles the exchange of AWS credentials for Google credentials. + credentials = aws.Credentials( + audience=audience, + subject_token_type="urn:ietf:params:aws:token-type:aws4_request", + service_account_impersonation_url=impersonation_url, + aws_security_credentials_supplier=custom_supplier, + scopes=['https://www.googleapis.com/auth/devstorage.read_write'], + ) + + # 3. Create an authenticated session. + authed_session = auth_requests.AuthorizedSession(credentials) + + # 4. Make the API Request. + bucket_url = f"https://storage.googleapis.com/storage/v1/b/{bucket_name}" + print(f"Request URL: {bucket_url}") + + response = authed_session.get(bucket_url) + response.raise_for_status() + + print("\n--- SUCCESS! ---") + print(json.dumps(response.json(), indent=2)) + +# [END auth_custom_credential_supplier_aws] + +from dotenv import load_dotenv + +def main(): + """Main function to parse env vars and call the authenticator.""" + load_dotenv() + print("--- Starting Script ---") + + gcp_audience = os.getenv("GCP_WORKLOAD_AUDIENCE") + sa_impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL") + gcs_bucket_name = os.getenv("GCS_BUCKET_NAME") + + if not all([gcp_audience, gcs_bucket_name]): + print("[ERROR] Missing required environment variables.", file=sys.stderr) + print("Required: GCP_WORKLOAD_AUDIENCE, GCS_BUCKET_NAME", file=sys.stderr) + sys.exit(1) + + try: + authenticate_with_aws_credentials(gcs_bucket_name, gcp_audience, sa_impersonation_url) + except Exception as e: + print(f"[ERROR] Authentication or Request failed: {e}", file=sys.stderr) + sys.exit(1) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/auth/custom-credentials/aws/snippets_test.py b/auth/custom-credentials/aws/snippets_test.py new file mode 100644 index 00000000000..cfdb6f38240 --- /dev/null +++ b/auth/custom-credentials/aws/snippets_test.py @@ -0,0 +1,111 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import unittest +from unittest.mock import MagicMock, patch + +from google.auth import exceptions +import requests + +# Import the module to be tested. +# NOTE: Update 'main' to the actual filename if different. +import snippets as app_module + +class TestCustomAwsSupplier(unittest.TestCase): + + @patch.dict(os.environ, {"AWS_REGION": "us-west-2"}) + @patch("boto3.Session") + def test_init_priority_env_var(self, mock_boto_session): + """Test that AWS_REGION env var takes priority during init.""" + app_module.CustomAwsSupplier() + # Verify boto3.Session was initialized with the region from env + mock_boto_session.assert_called_with(region_name="us-west-2") + + + @patch.dict(os.environ, {}, clear=True) + @patch("boto3.Session") + def test_get_aws_region_missing(self, mock_boto_session): + """Test that an error is raised if region cannot be resolved.""" + mock_session_instance = mock_boto_session.return_value + # Simulate Boto3 failing to find a region + mock_session_instance.region_name = None + + supplier = app_module.CustomAwsSupplier() + + with self.assertRaisesRegex(exceptions.GoogleAuthError, "unable to resolve an AWS region"): + supplier.get_aws_region(None, None) + + @patch("boto3.Session") + def test_get_aws_security_credentials_success(self, mock_boto_session): + """Test successful retrieval of AWS credentials.""" + mock_session_instance = mock_boto_session.return_value + + # Mock the credentials object returned by boto3 + mock_creds = MagicMock() + mock_creds.access_key = "test-access-key" + mock_creds.secret_key = "test-secret-key" + mock_creds.token = "test-session-token" + mock_session_instance.get_credentials.return_value = mock_creds + + supplier = app_module.CustomAwsSupplier() + creds = supplier.get_aws_security_credentials(None) + + self.assertEqual(creds.access_key_id, "test-access-key") + self.assertEqual(creds.secret_access_key, "test-secret-key") + self.assertEqual(creds.session_token, "test-session-token") + + @patch("boto3.Session") + def test_get_aws_security_credentials_none(self, mock_boto_session): + """Test handling when Boto3 returns no credentials.""" + mock_session_instance = mock_boto_session.return_value + mock_session_instance.get_credentials.return_value = None + + supplier = app_module.CustomAwsSupplier() + + with self.assertRaisesRegex(exceptions.GoogleAuthError, "Unable to resolve AWS credentials"): + supplier.get_aws_security_credentials(None) + + +class TestAuthenticateLogic(unittest.TestCase): + + @patch("snippets.auth_requests.AuthorizedSession") + @patch("snippets.aws.Credentials") + @patch("snippets.CustomAwsSupplier") + def test_authenticate_success(self, MockSupplier, MockAwsCreds, MockSession): + """Test the success path of the main logic function.""" + # Mock the HTTP response + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = {"kind": "storage#bucket", "name": "my-bucket"} + + mock_session_instance = MockSession.return_value + mock_session_instance.get.return_value = mock_response + + # Run the function + app_module.authenticate_with_aws_credentials( + bucket_name="my-bucket", + audience="//iam.googleapis.com/...", + impersonation_url="https://..." + ) + + # Assertions + MockSupplier.assert_called_once() + MockAwsCreds.assert_called_once() + # Verify bucket URL was constructed correctly + mock_session_instance.get.assert_called_with("https://storage.googleapis.com/storage/v1/b/my-bucket") + mock_response.raise_for_status.assert_called_once() + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/auth/custom-credentials/okta/README.md b/auth/custom-credentials/okta/README.md new file mode 100644 index 00000000000..147a7fb8ef4 --- /dev/null +++ b/auth/custom-credentials/okta/README.md @@ -0,0 +1,84 @@ +Here is the adapted `README.md` for the Python version of the Okta Custom Credential Supplier sample. + +# Running the Custom Okta Credential Supplier Sample (Python) + +If you want to use OIDC or SAML 2.0 tokens that cannot be retrieved using methods supported natively by the [google-auth](https://github.com/googleapis/google-auth-library-python) library, a custom `SubjectTokenSupplier` implementation may be specified when creating an identity pool client. The supplier must return a valid, unexpired subject token when called by the GCP credential. + +This document provides instructions on how to run the custom Okta credential supplier sample using Python. + +## 1. Okta Configuration + +Before running the sample, you need to configure an Okta application for Machine-to-Machine (M2M) communication. + +### Create an M2M Application in Okta + +1. Log in to your Okta developer console. +2. Navigate to **Applications** > **Applications** and click **Create App Integration**. +3. Select **API Services** as the sign-on method and click **Next**. +4. Give your application a name and click **Save**. + +### Obtain Okta Credentials + +Once the application is created, you will find the following information in the **General** tab: + +* **Okta Domain**: Your Okta developer domain (e.g., `https://dev-123456.okta.com`). +* **Client ID**: The client ID for your application. +* **Client Secret**: The client secret for your application. + +You will need these values to configure the sample. + +## 2. GCP Configuration + +You need to configure a Workload Identity Pool in GCP to trust the Okta application. + +### Set up Workload Identity Federation + +1. In the Google Cloud Console, navigate to **IAM & Admin** > **Workload Identity Federation**. +2. Click **Create Pool** to create a new Workload Identity Pool. +3. Add a new **OIDC provider** to the pool. +4. Configure the provider with your Okta domain as the issuer URL. +5. Map the Okta `sub` (subject) assertion to a GCP principal. + +For detailed instructions, refer to the [Workload Identity Federation documentation](https://cloud.google.com/iam/docs/workload-identity-federation). + +### GCS Bucket + +Ensure you have a GCS bucket that the authenticated user will have access to. You will need the name of this bucket to run the sample. + +## 3. Running the Script + +To run the sample on your local system, you need to install the dependencies and configure the environment variables. + +### Install Dependencies + +```bash +pip install -r requirements.txt +``` + +### Set Environment Variables + +The script relies on the `GCP_WORKLOAD_AUDIENCE` variable, which typically follows this format: +`//iam.googleapis.com/projects/YOUR_PROJECT_NUMBER/locations/global/workloadIdentityPools/YOUR_POOL_ID/providers/YOUR_PROVIDER_ID` + +```bash +# Okta Configuration +export OKTA_DOMAIN="https://your-okta-domain.okta.com" +export OKTA_CLIENT_ID="your-okta-client-id" +export OKTA_CLIENT_SECRET="your-okta-client-secret" + +# GCP Configuration +export GCP_WORKLOAD_AUDIENCE="//iam.googleapis.com/projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider" +export GCS_BUCKET_NAME="your-gcs-bucket-name" + +# Optional: Service Account Impersonation +# If set, the script will exchange the federated token for a Google Service Account token. +export GCP_SERVICE_ACCOUNT_IMPERSONATION_URL="https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/my-service-account@my-project.iam.gserviceaccount.com:generateAccessToken" +``` + +### Run the Application + +```bash +python3 snippets.py +``` + +The script will then authenticate with Okta to get an OIDC token, exchange that token for a GCP federated token (and optionally a Service Account token), and use it to list metadata for the specified GCS bucket. \ No newline at end of file diff --git a/auth/custom-credentials/okta/requirements-test.txt b/auth/custom-credentials/okta/requirements-test.txt new file mode 100644 index 00000000000..f47609d2651 --- /dev/null +++ b/auth/custom-credentials/okta/requirements-test.txt @@ -0,0 +1,2 @@ +-r requirements.txt +pytest==7.1.2 diff --git a/auth/custom-credentials/okta/requirements.txt b/auth/custom-credentials/okta/requirements.txt new file mode 100644 index 00000000000..ca2a46a5a41 --- /dev/null +++ b/auth/custom-credentials/okta/requirements.txt @@ -0,0 +1,3 @@ +requests==2.32.3 +google-auth==2.43.0 +python-dotenv==1.1.1 \ No newline at end of file diff --git a/auth/custom-credentials/okta/snippets.py b/auth/custom-credentials/okta/snippets.py new file mode 100644 index 00000000000..d393515a841 --- /dev/null +++ b/auth/custom-credentials/okta/snippets.py @@ -0,0 +1,140 @@ +# Copyright 2025 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START auth_custom_credential_supplier_okta] +import json +import os +import sys +import time +import urllib.parse + +import requests +from google.auth import identity_pool +from google.auth.transport import requests as auth_requests + +class OktaClientCredentialsSupplier: + """A custom SubjectTokenSupplier that authenticates with Okta. + + This supplier uses the Client Credentials grant flow for machine-to-machine + (M2M) authentication with Okta. + """ + + def __init__(self, domain, client_id, client_secret): + self.okta_token_url = f"{domain.rstrip('/')}/oauth2/default/v1/token" + self.client_id = client_id + self.client_secret = client_secret + self.access_token = None + self.expiry_time = 0 + + def get_subject_token(self, context, request=None) -> str: + """Fetches a new token if the current one is expired or missing.""" + # Check if the current token is still valid (with a 60-second buffer). + if self.access_token and time.time() < self.expiry_time - 60: + return self.access_token + + print("[Supplier] Fetching new Okta Access token...") + self._fetch_okta_access_token() + return self.access_token + + def _fetch_okta_access_token(self): + """Performs the Client Credentials grant flow with Okta.""" + headers = { + "Content-Type": "application/x-www-form-urlencoded", + "Accept": "application/json", + } + data = { + "grant_type": "client_credentials", + "scope": "gcp.test.read", # Set scope as per Okta app config. + } + + response = requests.post( + self.okta_token_url, + headers=headers, + data=urllib.parse.urlencode(data), + auth=(self.client_id, self.client_secret), + ) + response.raise_for_status() + + token_data = response.json() + self.access_token = token_data["access_token"] + self.expiry_time = time.time() + token_data["expires_in"] + + +def authenticate_with_okta_credentials( + bucket_name, audience, domain, client_id, client_secret, impersonation_url +): + """Authenticates using the custom Okta supplier and lists bucket metadata.""" + + # 1. Instantiate the custom supplier. + okta_supplier = OktaClientCredentialsSupplier(domain, client_id, client_secret) + + # 2. Instantiate the IdentityPoolClient. + # This client exchanges the Okta token for a Google Federated Token. + client = identity_pool.Credentials( + audience=audience, + subject_token_type="urn:ietf:params:oauth:token-type:jwt", + token_url="https://sts.googleapis.com/v1/token", + subject_token_supplier=okta_supplier, + default_scopes=["https://www.googleapis.com/auth/devstorage.read_write"], + service_account_impersonation_url=impersonation_url, + ) + + # 3. Create an authenticated session. + authed_session = auth_requests.AuthorizedSession(client) + + # 4. Make the API Request. + bucket_url = f"https://storage.googleapis.com/storage/v1/b/{bucket_name}" + print(f"Request URL: {bucket_url}") + + response = authed_session.get(bucket_url) + response.raise_for_status() + + print("\n--- SUCCESS! ---") + print(json.dumps(response.json(), indent=2)) + +# [END auth_custom_credential_supplier_okta] + +from dotenv import load_dotenv + +def main(): + load_dotenv() + + # Configuration + gcp_audience = os.getenv("GCP_WORKLOAD_AUDIENCE") + gcs_bucket_name = os.getenv("GCS_BUCKET_NAME") + # Optional: Service Account Impersonation + sa_impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL") + + okta_domain = os.getenv("OKTA_DOMAIN") + okta_client_id = os.getenv("OKTA_CLIENT_ID") + okta_client_secret = os.getenv("OKTA_CLIENT_SECRET") + + if not all([gcp_audience, gcs_bucket_name, okta_domain, okta_client_id, okta_client_secret]): + print("[ERROR] Missing required environment variables.", file=sys.stderr) + sys.exit(1) + + try: + authenticate_with_okta_credentials( + bucket_name=gcs_bucket_name, + audience=gcp_audience, + domain=okta_domain, + client_id=okta_client_id, + client_secret=okta_client_secret, + impersonation_url=sa_impersonation_url, + ) + except Exception as e: + print(f"[ERROR] Authentication or Request failed: {e}", file=sys.stderr) + sys.exit(1) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/auth/custom-credentials/okta/snippets_test.py b/auth/custom-credentials/okta/snippets_test.py new file mode 100644 index 00000000000..7a3d67dfa53 --- /dev/null +++ b/auth/custom-credentials/okta/snippets_test.py @@ -0,0 +1,108 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import urllib.parse +from unittest.mock import MagicMock, patch + +import snippets as app_module + + +class TestOktaClientCredentialsSupplier(unittest.TestCase): + + def setUp(self): + self.domain = "https://okta.example.com" + self.client_id = "test-id" + self.client_secret = "test-secret" + self.supplier = app_module.OktaClientCredentialsSupplier( + self.domain, self.client_id, self.client_secret + ) + + def test_init_url_cleaning(self): + """Test that the token URL strips trailing slashes.""" + # Case 1: Trailing slash + s1 = app_module.OktaClientCredentialsSupplier("https://okta.com/", "id", "sec") + self.assertEqual(s1.okta_token_url, "https://okta.com/oauth2/default/v1/token") + + # Case 2: No trailing slash + s2 = app_module.OktaClientCredentialsSupplier("https://okta.com", "id", "sec") + self.assertEqual(s2.okta_token_url, "https://okta.com/oauth2/default/v1/token") + + @patch("requests.post") + def test_get_subject_token_fetch(self, mock_post): + """Test fetching a new token from Okta.""" + # Mock the Okta response + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "access_token": "new-token", + "expires_in": 3600 + } + mock_post.return_value = mock_response + + # Execute + token = self.supplier.get_subject_token(None, None) + + # Verify + self.assertEqual(token, "new-token") + self.assertEqual(self.supplier.access_token, "new-token") + + # Check that requests.post was called correctly + mock_post.assert_called_once() + args, kwargs = mock_post.call_args + self.assertEqual(kwargs["auth"], (self.client_id, self.client_secret)) + self.assertEqual(kwargs["headers"]["Content-Type"], "application/x-www-form-urlencoded") + + # The script encodes data using urllib, so we decode it to verify contents + sent_data = urllib.parse.parse_qs(kwargs["data"]) + self.assertEqual(sent_data["grant_type"][0], "client_credentials") + self.assertEqual(sent_data["scope"][0], "gcp.test.read") + + +class TestAuthenticationFlow(unittest.TestCase): + + @patch("snippets.auth_requests.AuthorizedSession") + @patch("snippets.identity_pool.Credentials") + @patch("snippets.OktaClientCredentialsSupplier") + def test_authenticate_success(self, MockSupplier, MockCreds, MockSession): + """Test the main logic flow for successful authentication.""" + # Setup Mocks + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = {"kind": "storage#bucket", "name": "test-bucket"} + + mock_session_instance = MockSession.return_value + mock_session_instance.get.return_value = mock_response + + # Execute + app_module.authenticate_with_okta_credentials( + bucket_name="test-bucket", + audience="test-aud", + domain="https://okta.com", + client_id="id", + client_secret="sec", + impersonation_url=None + ) + + # Verify + MockSupplier.assert_called_once() + MockCreds.assert_called_once() + mock_session_instance.get.assert_called_with( + "https://storage.googleapis.com/storage/v1/b/test-bucket" + ) + mock_response.raise_for_status.assert_called_once() + + +if __name__ == "__main__": + unittest.main() \ No newline at end of file From 95418ac9599b81ed3d5cd966daefaedd5ecfc68f Mon Sep 17 00:00:00 2001 From: Pranav Iyer Date: Thu, 13 Nov 2025 18:04:56 -0800 Subject: [PATCH 02/23] Made some test and format changes. --- auth/custom-credentials/aws/noxfile_config.py | 26 +++ auth/custom-credentials/aws/requirements.txt | 1 + auth/custom-credentials/aws/snippets.py | 63 +++--- auth/custom-credentials/aws/snippets_test.py | 195 +++++++++-------- .../custom-credentials/okta/noxfile_config.py | 26 +++ auth/custom-credentials/okta/snippets.py | 52 ++--- auth/custom-credentials/okta/snippets_test.py | 204 ++++++++++-------- 7 files changed, 326 insertions(+), 241 deletions(-) create mode 100644 auth/custom-credentials/aws/noxfile_config.py create mode 100644 auth/custom-credentials/okta/noxfile_config.py diff --git a/auth/custom-credentials/aws/noxfile_config.py b/auth/custom-credentials/aws/noxfile_config.py new file mode 100644 index 00000000000..6b43f01e2ea --- /dev/null +++ b/auth/custom-credentials/aws/noxfile_config.py @@ -0,0 +1,26 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +TEST_CONFIG_OVERRIDE = { + # Ignore all versions except 3.9, which is the version available. + "ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.10", "3.11", "3.12", "3.13"], + "envs": { + "AWS_ACCESS_KEY_ID": "", + "AWS_SECRET_ACCESS_KEY": "", + "AWS_REGION": "", + "GCP_WORKLOAD_AUDIENCE": "", + "GCS_BUCKET_NAME": "", + "GCP_SERVICE_ACCOUNT_IMPERSONATION_URL": "", + }, +} diff --git a/auth/custom-credentials/aws/requirements.txt b/auth/custom-credentials/aws/requirements.txt index c3ed82574c8..4a091fb39fb 100644 --- a/auth/custom-credentials/aws/requirements.txt +++ b/auth/custom-credentials/aws/requirements.txt @@ -1,3 +1,4 @@ boto3==1.40.53 google-auth==2.43.0 python-dotenv==1.1.1 +requests==2.32.3 diff --git a/auth/custom-credentials/aws/snippets.py b/auth/custom-credentials/aws/snippets.py index 0c0a289dd02..cfb87f9ad82 100644 --- a/auth/custom-credentials/aws/snippets.py +++ b/auth/custom-credentials/aws/snippets.py @@ -14,7 +14,6 @@ # [START auth_custom_credential_supplier_aws] import json import os -import sys import boto3 from google.auth import aws @@ -27,8 +26,7 @@ class CustomAwsSupplier(aws.AwsSecurityCredentialsSupplier): def __init__(self): """Initializes the Boto3 session, prioritizing environment variables for region.""" - # Explicitly read the region from the environment first. This ensures that - # a value from a .env file is picked up reliably for local testing. + # Explicitly read the region from the environment first. region = os.getenv("AWS_REGION") or os.getenv("AWS_DEFAULT_REGION") # If region is None, Boto3's discovery chain will be used when needed. @@ -40,20 +38,24 @@ def get_aws_region(self, context, request) -> str: if self._cached_region: return self._cached_region - # Accessing region_name will use the value from the constructor if provided, - # otherwise it triggers Boto3's lazy-loading discovery (e.g., metadata service). self._cached_region = self.session.region_name if not self._cached_region: - raise exceptions.GoogleAuthError("Boto3 was unable to resolve an AWS region.") + raise exceptions.GoogleAuthError( + "Boto3 was unable to resolve an AWS region." + ) return self._cached_region - def get_aws_security_credentials(self, context, request=None) -> aws.AwsSecurityCredentials: + def get_aws_security_credentials( + self, context, request=None + ) -> aws.AwsSecurityCredentials: """Retrieves AWS security credentials using Boto3's default provider chain.""" creds = self.session.get_credentials() if not creds: - raise exceptions.GoogleAuthError("Unable to resolve AWS credentials from Boto3.") + raise exceptions.GoogleAuthError( + "Unable to resolve AWS credentials from Boto3." + ) return aws.AwsSecurityCredentials( access_key_id=creds.access_key, @@ -62,59 +64,62 @@ def get_aws_security_credentials(self, context, request=None) -> aws.AwsSecurity ) -def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url): - """Authenticates using the custom AWS supplier and lists bucket metadata.""" - +def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=None): + """Authenticates using the custom AWS supplier and gets bucket metadata. + + Returns: + dict: The bucket metadata response from the Google Cloud Storage API. + """ + # 1. Instantiate the custom supplier. custom_supplier = CustomAwsSupplier() # 2. Instantiate the AWS Credentials object. - # This object handles the exchange of AWS credentials for Google credentials. credentials = aws.Credentials( audience=audience, subject_token_type="urn:ietf:params:aws:token-type:aws4_request", service_account_impersonation_url=impersonation_url, aws_security_credentials_supplier=custom_supplier, - scopes=['https://www.googleapis.com/auth/devstorage.read_write'], + scopes=["https://www.googleapis.com/auth/devstorage.read_write"], ) # 3. Create an authenticated session. authed_session = auth_requests.AuthorizedSession(credentials) - + # 4. Make the API Request. bucket_url = f"https://storage.googleapis.com/storage/v1/b/{bucket_name}" - print(f"Request URL: {bucket_url}") response = authed_session.get(bucket_url) response.raise_for_status() - - print("\n--- SUCCESS! ---") - print(json.dumps(response.json(), indent=2)) + + return response.json() + # [END auth_custom_credential_supplier_aws] -from dotenv import load_dotenv def main(): """Main function to parse env vars and call the authenticator.""" - load_dotenv() - print("--- Starting Script ---") - gcp_audience = os.getenv("GCP_WORKLOAD_AUDIENCE") sa_impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL") gcs_bucket_name = os.getenv("GCS_BUCKET_NAME") if not all([gcp_audience, gcs_bucket_name]): - print("[ERROR] Missing required environment variables.", file=sys.stderr) - print("Required: GCP_WORKLOAD_AUDIENCE, GCS_BUCKET_NAME", file=sys.stderr) - sys.exit(1) + print( + "Required environment variables missing: GCP_WORKLOAD_AUDIENCE, GCS_BUCKET_NAME" + ) + return try: - authenticate_with_aws_credentials(gcs_bucket_name, gcp_audience, sa_impersonation_url) + print(f"Retrieving metadata for bucket: {gcs_bucket_name}...") + metadata = authenticate_with_aws_credentials( + gcs_bucket_name, gcp_audience, sa_impersonation_url + ) + print("--- SUCCESS! ---") + print(json.dumps(metadata, indent=2)) except Exception as e: - print(f"[ERROR] Authentication or Request failed: {e}", file=sys.stderr) - sys.exit(1) + print(f"Authentication or Request failed: {e}") if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/auth/custom-credentials/aws/snippets_test.py b/auth/custom-credentials/aws/snippets_test.py index cfdb6f38240..af8928417ba 100644 --- a/auth/custom-credentials/aws/snippets_test.py +++ b/auth/custom-credentials/aws/snippets_test.py @@ -13,99 +13,108 @@ # limitations under the License. import os -import unittest -from unittest.mock import MagicMock, patch - -from google.auth import exceptions -import requests - -# Import the module to be tested. -# NOTE: Update 'main' to the actual filename if different. -import snippets as app_module - -class TestCustomAwsSupplier(unittest.TestCase): - - @patch.dict(os.environ, {"AWS_REGION": "us-west-2"}) - @patch("boto3.Session") - def test_init_priority_env_var(self, mock_boto_session): - """Test that AWS_REGION env var takes priority during init.""" - app_module.CustomAwsSupplier() - # Verify boto3.Session was initialized with the region from env - mock_boto_session.assert_called_with(region_name="us-west-2") - - - @patch.dict(os.environ, {}, clear=True) - @patch("boto3.Session") - def test_get_aws_region_missing(self, mock_boto_session): - """Test that an error is raised if region cannot be resolved.""" - mock_session_instance = mock_boto_session.return_value - # Simulate Boto3 failing to find a region - mock_session_instance.region_name = None - - supplier = app_module.CustomAwsSupplier() - - with self.assertRaisesRegex(exceptions.GoogleAuthError, "unable to resolve an AWS region"): - supplier.get_aws_region(None, None) - - @patch("boto3.Session") - def test_get_aws_security_credentials_success(self, mock_boto_session): - """Test successful retrieval of AWS credentials.""" - mock_session_instance = mock_boto_session.return_value - - # Mock the credentials object returned by boto3 - mock_creds = MagicMock() - mock_creds.access_key = "test-access-key" - mock_creds.secret_key = "test-secret-key" - mock_creds.token = "test-session-token" - mock_session_instance.get_credentials.return_value = mock_creds - - supplier = app_module.CustomAwsSupplier() - creds = supplier.get_aws_security_credentials(None) - - self.assertEqual(creds.access_key_id, "test-access-key") - self.assertEqual(creds.secret_access_key, "test-secret-key") - self.assertEqual(creds.session_token, "test-session-token") - - @patch("boto3.Session") - def test_get_aws_security_credentials_none(self, mock_boto_session): - """Test handling when Boto3 returns no credentials.""" - mock_session_instance = mock_boto_session.return_value - mock_session_instance.get_credentials.return_value = None - - supplier = app_module.CustomAwsSupplier() - - with self.assertRaisesRegex(exceptions.GoogleAuthError, "Unable to resolve AWS credentials"): - supplier.get_aws_security_credentials(None) - - -class TestAuthenticateLogic(unittest.TestCase): - - @patch("snippets.auth_requests.AuthorizedSession") - @patch("snippets.aws.Credentials") - @patch("snippets.CustomAwsSupplier") - def test_authenticate_success(self, MockSupplier, MockAwsCreds, MockSession): - """Test the success path of the main logic function.""" - # Mock the HTTP response - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.json.return_value = {"kind": "storage#bucket", "name": "my-bucket"} - - mock_session_instance = MockSession.return_value - mock_session_instance.get.return_value = mock_response - - # Run the function - app_module.authenticate_with_aws_credentials( - bucket_name="my-bucket", - audience="//iam.googleapis.com/...", - impersonation_url="https://..." +from unittest import mock + +import pytest + +import snippets + +# --- Unit Tests --- + + +@mock.patch.dict(os.environ, {"AWS_REGION": "us-west-2"}) +@mock.patch("boto3.Session") +def test_init_priority_env_var(mock_boto_session): + """Test that AWS_REGION env var takes priority during init.""" + snippets.CustomAwsSupplier() + mock_boto_session.assert_called_with(region_name="us-west-2") + + +@mock.patch.dict(os.environ, {}, clear=True) +@mock.patch("boto3.Session") +def test_get_aws_region_caching(mock_boto_session): + """Test that get_aws_region caches the result from Boto3.""" + mock_session_instance = mock_boto_session.return_value + mock_session_instance.region_name = "us-east-1" + + supplier = snippets.CustomAwsSupplier() + + # First call should hit the session + region = supplier.get_aws_region(None, None) + assert region == "us-east-1" + + # Change the mock to ensure we aren't calling it again + mock_session_instance.region_name = "us-west-2" + + # Second call should return the cached value + region2 = supplier.get_aws_region(None, None) + assert region2 == "us-east-1" + + +@mock.patch("boto3.Session") +def test_get_aws_security_credentials_success(mock_boto_session): + """Test successful retrieval of AWS credentials.""" + mock_session_instance = mock_boto_session.return_value + + mock_creds = mock.MagicMock() + mock_creds.access_key = "test-key" + mock_creds.secret_key = "test-secret" + mock_creds.token = "test-token" + mock_session_instance.get_credentials.return_value = mock_creds + + supplier = snippets.CustomAwsSupplier() + creds = supplier.get_aws_security_credentials(None) + + assert creds.access_key_id == "test-key" + assert creds.secret_access_key == "test-secret" + assert creds.session_token == "test-token" + + +@mock.patch("snippets.auth_requests.AuthorizedSession") +@mock.patch("snippets.aws.Credentials") +@mock.patch("snippets.CustomAwsSupplier") +def test_authenticate_unit_success(MockSupplier, MockAwsCreds, MockSession): + """Unit test for the main flow using mocks.""" + mock_response = mock.MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = {"name": "my-bucket"} + + mock_session_instance = MockSession.return_value + mock_session_instance.get.return_value = mock_response + + result = snippets.authenticate_with_aws_credentials( + bucket_name="my-bucket", + audience="//iam.googleapis.com/...", + impersonation_url=None, + ) + + assert result == {"name": "my-bucket"} + MockSupplier.assert_called_once() + MockAwsCreds.assert_called_once() + + +# --- System Test (Integration) --- + + +def test_authenticate_system(): + """ + System test that runs against the real API. + Skips automatically if required environment variables are missing. + """ + required_env = ["GCP_WORKLOAD_AUDIENCE", "GCS_BUCKET_NAME", "AWS_ACCESS_KEY_ID"] + if not all(os.getenv(var) for var in required_env): + pytest.skip( + "Skipping system test: missing required env vars (GCP/AWS credentials)." ) - # Assertions - MockSupplier.assert_called_once() - MockAwsCreds.assert_called_once() - # Verify bucket URL was constructed correctly - mock_session_instance.get.assert_called_with("https://storage.googleapis.com/storage/v1/b/my-bucket") - mock_response.raise_for_status.assert_called_once() + audience = os.getenv("GCP_WORKLOAD_AUDIENCE") + bucket_name = os.getenv("GCS_BUCKET_NAME") + impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL") + + # This calls the real API + metadata = snippets.authenticate_with_aws_credentials( + bucket_name=bucket_name, audience=audience, impersonation_url=impersonation_url + ) -if __name__ == "__main__": - unittest.main() \ No newline at end of file + assert metadata is not None + assert metadata.get("name") == bucket_name diff --git a/auth/custom-credentials/okta/noxfile_config.py b/auth/custom-credentials/okta/noxfile_config.py new file mode 100644 index 00000000000..a4c09718253 --- /dev/null +++ b/auth/custom-credentials/okta/noxfile_config.py @@ -0,0 +1,26 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +TEST_CONFIG_OVERRIDE = { + # Ignore all versions except 3.9, which is the version available. + "ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.10", "3.11", "3.12", "3.13"], + "envs": { + "OKTA_DOMAIN": "", + "OKTA_CLIENT_ID": "", + "OKTA_CLIENT_SECRET": "", + "GCP_WORKLOAD_AUDIENCE": "", + "GCS_BUCKET_NAME": "", + "GCP_SERVICE_ACCOUNT_IMPERSONATION_URL": "", + }, +} diff --git a/auth/custom-credentials/okta/snippets.py b/auth/custom-credentials/okta/snippets.py index d393515a841..efd45796687 100644 --- a/auth/custom-credentials/okta/snippets.py +++ b/auth/custom-credentials/okta/snippets.py @@ -14,13 +14,13 @@ # [START auth_custom_credential_supplier_okta] import json import os -import sys import time import urllib.parse -import requests from google.auth import identity_pool from google.auth.transport import requests as auth_requests +import requests + class OktaClientCredentialsSupplier: """A custom SubjectTokenSupplier that authenticates with Okta. @@ -38,11 +38,9 @@ def __init__(self, domain, client_id, client_secret): def get_subject_token(self, context, request=None) -> str: """Fetches a new token if the current one is expired or missing.""" - # Check if the current token is still valid (with a 60-second buffer). if self.access_token and time.time() < self.expiry_time - 60: return self.access_token - print("[Supplier] Fetching new Okta Access token...") self._fetch_okta_access_token() return self.access_token @@ -54,7 +52,7 @@ def _fetch_okta_access_token(self): } data = { "grant_type": "client_credentials", - "scope": "gcp.test.read", # Set scope as per Okta app config. + "scope": "gcp.test.read", # Set scope as per Okta app config. } response = requests.post( @@ -64,22 +62,25 @@ def _fetch_okta_access_token(self): auth=(self.client_id, self.client_secret), ) response.raise_for_status() - + token_data = response.json() self.access_token = token_data["access_token"] self.expiry_time = time.time() + token_data["expires_in"] def authenticate_with_okta_credentials( - bucket_name, audience, domain, client_id, client_secret, impersonation_url + bucket_name, audience, domain, client_id, client_secret, impersonation_url=None ): - """Authenticates using the custom Okta supplier and lists bucket metadata.""" - + """Authenticates using the custom Okta supplier and gets bucket metadata. + + Returns: + dict: The bucket metadata response from the Google Cloud Storage API. + """ + # 1. Instantiate the custom supplier. okta_supplier = OktaClientCredentialsSupplier(domain, client_id, client_secret) # 2. Instantiate the IdentityPoolClient. - # This client exchanges the Okta token for a Google Federated Token. client = identity_pool.Credentials( audience=audience, subject_token_type="urn:ietf:params:oauth:token-type:jwt", @@ -94,37 +95,34 @@ def authenticate_with_okta_credentials( # 4. Make the API Request. bucket_url = f"https://storage.googleapis.com/storage/v1/b/{bucket_name}" - print(f"Request URL: {bucket_url}") - + response = authed_session.get(bucket_url) response.raise_for_status() - print("\n--- SUCCESS! ---") - print(json.dumps(response.json(), indent=2)) + return response.json() + # [END auth_custom_credential_supplier_okta] -from dotenv import load_dotenv def main(): - load_dotenv() - - # Configuration gcp_audience = os.getenv("GCP_WORKLOAD_AUDIENCE") gcs_bucket_name = os.getenv("GCS_BUCKET_NAME") - # Optional: Service Account Impersonation sa_impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL") okta_domain = os.getenv("OKTA_DOMAIN") okta_client_id = os.getenv("OKTA_CLIENT_ID") okta_client_secret = os.getenv("OKTA_CLIENT_SECRET") - if not all([gcp_audience, gcs_bucket_name, okta_domain, okta_client_id, okta_client_secret]): - print("[ERROR] Missing required environment variables.", file=sys.stderr) - sys.exit(1) + if not all( + [gcp_audience, gcs_bucket_name, okta_domain, okta_client_id, okta_client_secret] + ): + print("Missing required environment variables.") + return try: - authenticate_with_okta_credentials( + print(f"Retrieving metadata for bucket: {gcs_bucket_name}...") + metadata = authenticate_with_okta_credentials( bucket_name=gcs_bucket_name, audience=gcp_audience, domain=okta_domain, @@ -132,9 +130,11 @@ def main(): client_secret=okta_client_secret, impersonation_url=sa_impersonation_url, ) + print("--- SUCCESS! ---") + print(json.dumps(metadata, indent=2)) except Exception as e: - print(f"[ERROR] Authentication or Request failed: {e}", file=sys.stderr) - sys.exit(1) + print(f"Authentication or Request failed: {e}") + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/auth/custom-credentials/okta/snippets_test.py b/auth/custom-credentials/okta/snippets_test.py index 7a3d67dfa53..bf3218057ed 100644 --- a/auth/custom-credentials/okta/snippets_test.py +++ b/auth/custom-credentials/okta/snippets_test.py @@ -12,97 +12,115 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest +import os +import time +from unittest import mock import urllib.parse -from unittest.mock import MagicMock, patch - -import snippets as app_module - - -class TestOktaClientCredentialsSupplier(unittest.TestCase): - - def setUp(self): - self.domain = "https://okta.example.com" - self.client_id = "test-id" - self.client_secret = "test-secret" - self.supplier = app_module.OktaClientCredentialsSupplier( - self.domain, self.client_id, self.client_secret - ) - - def test_init_url_cleaning(self): - """Test that the token URL strips trailing slashes.""" - # Case 1: Trailing slash - s1 = app_module.OktaClientCredentialsSupplier("https://okta.com/", "id", "sec") - self.assertEqual(s1.okta_token_url, "https://okta.com/oauth2/default/v1/token") - - # Case 2: No trailing slash - s2 = app_module.OktaClientCredentialsSupplier("https://okta.com", "id", "sec") - self.assertEqual(s2.okta_token_url, "https://okta.com/oauth2/default/v1/token") - - @patch("requests.post") - def test_get_subject_token_fetch(self, mock_post): - """Test fetching a new token from Okta.""" - # Mock the Okta response - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.json.return_value = { - "access_token": "new-token", - "expires_in": 3600 - } - mock_post.return_value = mock_response - - # Execute - token = self.supplier.get_subject_token(None, None) - - # Verify - self.assertEqual(token, "new-token") - self.assertEqual(self.supplier.access_token, "new-token") - - # Check that requests.post was called correctly - mock_post.assert_called_once() - args, kwargs = mock_post.call_args - self.assertEqual(kwargs["auth"], (self.client_id, self.client_secret)) - self.assertEqual(kwargs["headers"]["Content-Type"], "application/x-www-form-urlencoded") - - # The script encodes data using urllib, so we decode it to verify contents - sent_data = urllib.parse.parse_qs(kwargs["data"]) - self.assertEqual(sent_data["grant_type"][0], "client_credentials") - self.assertEqual(sent_data["scope"][0], "gcp.test.read") - - -class TestAuthenticationFlow(unittest.TestCase): - - @patch("snippets.auth_requests.AuthorizedSession") - @patch("snippets.identity_pool.Credentials") - @patch("snippets.OktaClientCredentialsSupplier") - def test_authenticate_success(self, MockSupplier, MockCreds, MockSession): - """Test the main logic flow for successful authentication.""" - # Setup Mocks - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.json.return_value = {"kind": "storage#bucket", "name": "test-bucket"} - - mock_session_instance = MockSession.return_value - mock_session_instance.get.return_value = mock_response - - # Execute - app_module.authenticate_with_okta_credentials( - bucket_name="test-bucket", - audience="test-aud", - domain="https://okta.com", - client_id="id", - client_secret="sec", - impersonation_url=None - ) - - # Verify - MockSupplier.assert_called_once() - MockCreds.assert_called_once() - mock_session_instance.get.assert_called_with( - "https://storage.googleapis.com/storage/v1/b/test-bucket" - ) - mock_response.raise_for_status.assert_called_once() - - -if __name__ == "__main__": - unittest.main() \ No newline at end of file + +import pytest + +import snippets + +# --- Unit Tests --- + + +def test_init_url_cleaning(): + """Test that the token URL strips trailing slashes.""" + s1 = snippets.OktaClientCredentialsSupplier("https://okta.com/", "id", "sec") + assert s1.okta_token_url == "https://okta.com/oauth2/default/v1/token" + + s2 = snippets.OktaClientCredentialsSupplier("https://okta.com", "id", "sec") + assert s2.okta_token_url == "https://okta.com/oauth2/default/v1/token" + + +@mock.patch("requests.post") +def test_get_subject_token_fetch(mock_post): + """Test fetching a new token from Okta.""" + supplier = snippets.OktaClientCredentialsSupplier("https://okta.com", "id", "sec") + + mock_response = mock.MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = {"access_token": "new-token", "expires_in": 3600} + mock_post.return_value = mock_response + + token = supplier.get_subject_token(None, None) + + assert token == "new-token" + mock_post.assert_called_once() + + # Verify args + _, kwargs = mock_post.call_args + assert kwargs["auth"] == ("id", "sec") + + sent_data = urllib.parse.parse_qs(kwargs["data"]) + assert sent_data["grant_type"][0] == "client_credentials" + + +@mock.patch("requests.post") +def test_get_subject_token_cached(mock_post): + """Test that cached token is returned if valid.""" + supplier = snippets.OktaClientCredentialsSupplier("https://okta.com", "id", "sec") + supplier.access_token = "cached-token" + supplier.expiry_time = time.time() + 3600 + + token = supplier.get_subject_token(None, None) + + assert token == "cached-token" + mock_post.assert_not_called() + + +@mock.patch("snippets.auth_requests.AuthorizedSession") +@mock.patch("snippets.identity_pool.Credentials") +@mock.patch("snippets.OktaClientCredentialsSupplier") +def test_authenticate_unit_success(MockSupplier, MockCreds, MockSession): + """Unit test for the main Okta auth flow.""" + mock_response = mock.MagicMock() + mock_response.status_code = 200 + mock_response.json.return_value = {"name": "test-bucket"} + + mock_session_instance = MockSession.return_value + mock_session_instance.get.return_value = mock_response + + metadata = snippets.authenticate_with_okta_credentials( + bucket_name="test-bucket", + audience="test-aud", + domain="https://okta.com", + client_id="id", + client_secret="sec", + impersonation_url=None, + ) + + assert metadata == {"name": "test-bucket"} + MockSupplier.assert_called_once() + MockCreds.assert_called_once() + + +# --- System Test --- + + +def test_authenticate_system(): + """ + System test that runs against the real API. + Skips automatically if required Okta/GCP env vars are missing. + """ + required_env = [ + "GCP_WORKLOAD_AUDIENCE", + "OKTA_CLIENT_ID", + "OKTA_CLIENT_SECRET", + "OKTA_DOMAIN", + "GCS_BUCKET_NAME", + ] + if not all(os.getenv(var) for var in required_env): + pytest.skip("Skipping system test: missing required env vars.") + + metadata = snippets.authenticate_with_okta_credentials( + bucket_name=os.getenv("GCS_BUCKET_NAME"), + audience=os.getenv("GCP_WORKLOAD_AUDIENCE"), + domain=os.getenv("OKTA_DOMAIN"), + client_id=os.getenv("OKTA_CLIENT_ID"), + client_secret=os.getenv("OKTA_CLIENT_SECRET"), + impersonation_url=os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL"), + ) + + assert metadata is not None + assert metadata.get("name") == os.getenv("GCS_BUCKET_NAME") From 6f9727c93e7cffff280fb73265d80cf73f055db8 Mon Sep 17 00:00:00 2001 From: Pranav Iyer Date: Fri, 21 Nov 2025 17:00:10 -0800 Subject: [PATCH 03/23] Scripts now read from a file instead of env variables. Changed readmes as well. --- .gitignore | 3 +- auth/custom-credentials/aws/Dockerfile | 20 ++++ auth/custom-credentials/aws/README.md | 97 +++++++------------ ...ustom-credentials-aws-secrets.json.example | 8 ++ auth/custom-credentials/aws/noxfile_config.py | 8 -- auth/custom-credentials/aws/pod.yaml | 20 ++++ auth/custom-credentials/aws/snippets.py | 33 ++++++- auth/custom-credentials/aws/snippets_test.py | 39 +++++--- auth/custom-credentials/okta/README.md | 31 +++--- ...stom-credentials-okta-secrets.json.example | 8 ++ .../custom-credentials/okta/noxfile_config.py | 8 -- auth/custom-credentials/okta/snippets.py | 24 +++-- auth/custom-credentials/okta/snippets_test.py | 51 +++++----- 13 files changed, 209 insertions(+), 141 deletions(-) create mode 100644 auth/custom-credentials/aws/Dockerfile create mode 100644 auth/custom-credentials/aws/custom-credentials-aws-secrets.json.example create mode 100644 auth/custom-credentials/aws/pod.yaml create mode 100644 auth/custom-credentials/okta/custom-credentials-okta-secrets.json.example diff --git a/.gitignore b/.gitignore index bcb6b89f6ff..9eff6289235 100644 --- a/.gitignore +++ b/.gitignore @@ -30,4 +30,5 @@ env/ .idea .env* **/venv -**/noxfile.py \ No newline at end of file +**/noxfile.py\n# Local secrets file\nauth/custom-credentials/okta/custom-credentials-okta-secrets.json +auth/custom-credentials/aws/custom-credentials-aws-secrets.json diff --git a/auth/custom-credentials/aws/Dockerfile b/auth/custom-credentials/aws/Dockerfile new file mode 100644 index 00000000000..1cf23d3ac50 --- /dev/null +++ b/auth/custom-credentials/aws/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.9-slim + +# Create a non-root user +RUN useradd -m appuser + +# Create a working directory +WORKDIR /app + +# Copy files and install dependencies +COPY --chown=appuser:appuser requirements.txt . +COPY --chown=appuser:appuser snippets.py . + +# Switch to the non-root user +USER appuser + +# Install dependencies for the user +RUN pip install --no-cache-dir --user -r requirements.txt + +# Set the entrypoint +CMD ["python3", "snippets.py"] diff --git a/auth/custom-credentials/aws/README.md b/auth/custom-credentials/aws/README.md index 1c2c9fa0ef1..767906682a2 100644 --- a/auth/custom-credentials/aws/README.md +++ b/auth/custom-credentials/aws/README.md @@ -1,12 +1,12 @@ # Running the Custom Credential Supplier Sample -If you want to use AWS security credentials that cannot be retrieved using methods supported natively by the [google-auth](https://github.com/googleapis/google-auth-library-python) library, a custom `AwsSecurityCredentialsSupplier` implementation may be specified. The supplier must return valid, unexpired AWS security credentials when called by the GCP credential. +If you want to use AWS security credentials that cannot be retrieved using methods supported natively by the [google-auth](https://github.com/googleapis/google-auth-library-python) library, a custom `AwsSecurityCredentialsSupplier` implementation may be specified. The supplier must return valid, unexpired AWS security credentials when called by the Google Cloud Auth library. This sample demonstrates how to use **Boto3** (the AWS SDK for Python) as a custom supplier to bridge AWS credentials—from sources like EKS IRSA, ECS, or Fargate—to Google Cloud Workload Identity. ## Running Locally -To run the sample on your local system, you need to install the dependencies and configure your AWS and GCP credentials as environment variables. +For local development, you can provide credentials and configuration in a JSON file. For containerized environments like EKS, the script can fall back to environment variables. ### 1. Install Dependencies @@ -16,18 +16,13 @@ Ensure you have Python installed, then install the required libraries: pip install -r requirements.txt ``` -### 2. Set Environment Variables +### 2. Configure Credentials for Local Development -```bash -export AWS_ACCESS_KEY_ID="YOUR_AWS_ACCESS_KEY_ID" -export AWS_SECRET_ACCESS_KEY="YOUR_AWS_SECRET_ACCESS_KEY" -export AWS_REGION="YOUR_AWS_REGION" # e.g., us-east-1 -export GCP_WORKLOAD_AUDIENCE="YOUR_GCP_WORKLOAD_AUDIENCE" -export GCS_BUCKET_NAME="YOUR_GCS_BUCKET_NAME" - -# Optional: If you want to use service account impersonation -export GCP_SERVICE_ACCOUNT_IMPERSONATION_URL="YOUR_GCP_SERVICE_ACCOUNT_IMPERSONATION_URL" -``` +1. Copy the example secrets file to a new file named `custom-credentials-aws-secrets.json`: + ```bash + cp custom-credentials-aws-secrets.json.example custom-credentials-aws-secrets.json + ``` +2. Open `custom-credentials-aws-secrets.json` and fill in the required values for your AWS and GCP configuration. The `custom-credentials-aws-secrets.json` file is ignored by Git, so your credentials will not be checked into version control. ### 3. Run the Script @@ -35,6 +30,8 @@ export GCP_SERVICE_ACCOUNT_IMPERSONATION_URL="YOUR_GCP_SERVICE_ACCOUNT_IMPERSONA python3 snippets.py ``` +When run locally, the script will detect the `custom-credentials-aws-secrets.json` file and use it to configure the necessary environment variables for the Boto3 client. + ## Running in a Containerized Environment (EKS) This section provides a brief overview of how to run the sample in an Amazon EKS cluster. @@ -47,40 +44,44 @@ First, you need an EKS cluster. You can create one using `eksctl` or the AWS Man IRSA allows you to associate an IAM role with a Kubernetes service account. This provides a secure way for your pods to access AWS services without hardcoding long-lived credentials. -- Create an IAM OIDC provider for your cluster. -- Create an IAM role and policy that grants the necessary AWS permissions. -- Associate the IAM role with a Kubernetes service account. +You can essentially complete the OIDC setup, IAM role creation, and Service Account association in one step using `eksctl`. + +Run the following command to create the IAM role and bind it to a Kubernetes Service Account: + +```bash +eksctl create iamserviceaccount \ + --name your-k8s-service-account \ + --namespace default \ + --cluster your-cluster-name \ + --region your-aws-region \ + --role-name your-role-name \ + --attach-policy-arn arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess \ + --approve +``` + +> **Note**: The `--attach-policy-arn` flag is used here to demonstrate attaching permissions. Update this with the specific AWS policy ARN your application requires (e.g., if your Boto3 client needs to read from S3 or DynamoDB). -For detailed steps, see the [IAM Roles for Service Accounts](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html) documentation. +For a deep dive into how this works manually, refer to the [IAM Roles for Service Accounts](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html) documentation. ### 3. Configure GCP to Trust the AWS Role -You need to configure your GCP project to trust the AWS IAM role you created. This is done by creating a Workload Identity Pool and Provider in GCP. +To allow your AWS role to authenticate as a Google Cloud service account, you need to configure Workload Identity Federation. This process involves these key steps: -- Create a Workload Identity Pool. -- Create a Workload Identity Provider that trusts the AWS role ARN. -- Grant the GCP service account the necessary permissions. +1. **Create a Workload Identity Pool and an AWS Provider:** The pool holds the configuration, and the provider is set up to trust your AWS account. -### 4. Containerize and Package the Application +2. **Create or select a GCP Service Account:** This service account will be impersonated by your AWS role. Grant this service account the necessary GCP permissions for your application (e.g., access to GCS or BigQuery). -Create a `Dockerfile` for the Python application and push the image to a container registry (e.g., Amazon ECR) that your EKS cluster can access. +3. **Bind the AWS Role to the GCP Service Account:** Create an IAM policy binding that gives your AWS role the `Workload Identity User` (`roles/iam.workloadIdentityUser`) role on the GCP service account. This allows the AWS role to impersonate the service account. -**Dockerfile** -```Dockerfile -FROM python:3.11-slim +**Alternative: Direct Access** -WORKDIR /app +> For supported resources, you can grant roles directly to the AWS identity, bypassing service account impersonation. To do this, grant a role (like `roles/storage.objectViewer`) to the workload identity principal (`principalSet://...`) directly on the resource's IAM policy. -# Copy requirements and install dependencies -COPY requirements.txt . -RUN pip install --no-cache-dir -r requirements.txt +For more detailed information, see the documentation on [Configuring Workload Identity Federation](https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds). -# Copy the script -COPY snippets.py . +### 4. Containerize and Package the Application -# Run the script -CMD ["python3", "snippets.py"] -``` +Create a `Dockerfile` for the Python application and push the image to a container registry (e.g., Amazon ECR) that your EKS cluster can access. Refer to the [`Dockerfile`](Dockerfile) for the container image definition. Build and push the image: ```bash @@ -90,31 +91,7 @@ docker push your-container-image:latest ### 5. Deploy to EKS -Create a Kubernetes deployment manifest (`pod.yaml`) to deploy your application to the EKS cluster. - -**pod.yaml** -```yaml -apiVersion: v1 -kind: Pod -metadata: - name: custom-credential-pod -spec: - serviceAccountName: your-k8s-service-account # The service account associated with the AWS IAM role - containers: - - name: gcp-auth-sample - image: your-container-image:latest # Your image from ECR - env: - # AWS_REGION is often required for Boto3 to initialize correctly in containers - - name: AWS_REGION - value: "your-aws-region" - - name: GCP_WORKLOAD_AUDIENCE - value: "your-gcp-workload-audience" - # Optional: If you want to use service account impersonation - # - name: GCP_SERVICE_ACCOUNT_IMPERSONATION_URL - # value: "your-gcp-service-account-impersonation-url" - - name: GCS_BUCKET_NAME - value: "your-gcs-bucket-name" -``` +Create a Kubernetes deployment manifest to deploy your application to the EKS cluster. See the [`pod.yaml`](pod.yaml) file for an example. Deploy the pod: diff --git a/auth/custom-credentials/aws/custom-credentials-aws-secrets.json.example b/auth/custom-credentials/aws/custom-credentials-aws-secrets.json.example new file mode 100644 index 00000000000..300dc70c138 --- /dev/null +++ b/auth/custom-credentials/aws/custom-credentials-aws-secrets.json.example @@ -0,0 +1,8 @@ +{ + "aws_access_key_id": "YOUR_AWS_ACCESS_KEY_ID", + "aws_secret_access_key": "YOUR_AWS_SECRET_ACCESS_KEY", + "aws_region": "YOUR_AWS_REGION", + "gcp_workload_audience": "YOUR_GCP_WORKLOAD_AUDIENCE", + "gcs_bucket_name": "YOUR_GCS_BUCKET_NAME", + "gcp_service_account_impersonation_url": "YOUR_GCP_SERVICE_ACCOUNT_IMPERSONATION_URL" +} diff --git a/auth/custom-credentials/aws/noxfile_config.py b/auth/custom-credentials/aws/noxfile_config.py index 6b43f01e2ea..834faa2b2f6 100644 --- a/auth/custom-credentials/aws/noxfile_config.py +++ b/auth/custom-credentials/aws/noxfile_config.py @@ -15,12 +15,4 @@ TEST_CONFIG_OVERRIDE = { # Ignore all versions except 3.9, which is the version available. "ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.10", "3.11", "3.12", "3.13"], - "envs": { - "AWS_ACCESS_KEY_ID": "", - "AWS_SECRET_ACCESS_KEY": "", - "AWS_REGION": "", - "GCP_WORKLOAD_AUDIENCE": "", - "GCS_BUCKET_NAME": "", - "GCP_SERVICE_ACCOUNT_IMPERSONATION_URL": "", - }, } diff --git a/auth/custom-credentials/aws/pod.yaml b/auth/custom-credentials/aws/pod.yaml new file mode 100644 index 00000000000..9580569ced8 --- /dev/null +++ b/auth/custom-credentials/aws/pod.yaml @@ -0,0 +1,20 @@ +apiVersion: v1 +kind: Pod +metadata: + name: custom-credential-pod +spec: + serviceAccountName: your-k8s-service-account # The service account associated with the AWS IAM role + containers: + - name: gcp-auth-sample + image: your-container-image:latest # Your image from ECR + env: + # AWS_REGION is often required for Boto3 to initialize correctly in containers + - name: AWS_REGION + value: "your-aws-region" + - name: GCP_WORKLOAD_AUDIENCE + value: "your-gcp-workload-audience" + # Optional: If you want to use service account impersonation + # - name: GCP_SERVICE_ACCOUNT_IMPERSONATION_URL + # value: "your-gcp-service-account-impersonation-url" + - name: GCS_BUCKET_NAME + value: "your-gcs-bucket-name" diff --git a/auth/custom-credentials/aws/snippets.py b/auth/custom-credentials/aws/snippets.py index cfb87f9ad82..67625e1246d 100644 --- a/auth/custom-credentials/aws/snippets.py +++ b/auth/custom-credentials/aws/snippets.py @@ -98,15 +98,44 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N # [END auth_custom_credential_supplier_aws] +def _load_config_from_file(): + """ + If a local secrets file is present, load it into the environment. + This is a "just-in-time" configuration for local development. These + variables are only set for the current process and are not exposed to the + shell. + """ + if os.path.exists("custom-credentials-aws-secrets.json"): + with open("custom-credentials-aws-secrets.json", "r") as f: + secrets = json.load(f) + + os.environ["AWS_ACCESS_KEY_ID"] = secrets.get("aws_access_key_id", "") + os.environ["AWS_SECRET_ACCESS_KEY"] = secrets.get("aws_secret_access_key", "") + os.environ["AWS_REGION"] = secrets.get("aws_region", "") + os.environ["GCP_WORKLOAD_AUDIENCE"] = secrets.get("gcp_workload_audience", "") + os.environ["GCS_BUCKET_NAME"] = secrets.get("gcs_bucket_name", "") + os.environ["GCP_SERVICE_ACCOUNT_IMPERSONATION_URL"] = secrets.get( + "gcp_service_account_impersonation_url", "" + ) + + def main(): - """Main function to parse env vars and call the authenticator.""" + + # Reads the custom-credentials-aws-secrets.json if running locally. + _load_config_from_file() + + # Now, read the configuration from the environment. In a local run, these + # will be the values we just set. In a containerized run, they will be + # the values provided by the environment. gcp_audience = os.getenv("GCP_WORKLOAD_AUDIENCE") sa_impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL") gcs_bucket_name = os.getenv("GCS_BUCKET_NAME") if not all([gcp_audience, gcs_bucket_name]): print( - "Required environment variables missing: GCP_WORKLOAD_AUDIENCE, GCS_BUCKET_NAME" + "Required configuration missing. Please provide it in a " + "custom-credentials-aws-secrets.json file or as environment variables: " + "GCP_WORKLOAD_AUDIENCE, GCS_BUCKET_NAME" ) return diff --git a/auth/custom-credentials/aws/snippets_test.py b/auth/custom-credentials/aws/snippets_test.py index af8928417ba..779a492c08f 100644 --- a/auth/custom-credentials/aws/snippets_test.py +++ b/auth/custom-credentials/aws/snippets_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import os from unittest import mock @@ -95,26 +96,36 @@ def test_authenticate_unit_success(MockSupplier, MockAwsCreds, MockSession): # --- System Test (Integration) --- - def test_authenticate_system(): """ System test that runs against the real API. - Skips automatically if required environment variables are missing. + Skips automatically if custom-credentials-aws-secrets.json is missing or incomplete. """ - required_env = ["GCP_WORKLOAD_AUDIENCE", "GCS_BUCKET_NAME", "AWS_ACCESS_KEY_ID"] - if not all(os.getenv(var) for var in required_env): + if not os.path.exists("custom-credentials-aws-secrets.json"): pytest.skip( - "Skipping system test: missing required env vars (GCP/AWS credentials)." + "Skipping system test: custom-credentials-aws-secrets.json not found." ) - audience = os.getenv("GCP_WORKLOAD_AUDIENCE") - bucket_name = os.getenv("GCS_BUCKET_NAME") - impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL") + with open("custom-credentials-aws-secrets.json", "r") as f: + secrets = json.load(f) + + required_keys = [ + "gcp_workload_audience", + "gcs_bucket_name", + "aws_access_key_id", + "aws_secret_access_key", + "aws_region", + ] + if not all(key in secrets for key in required_keys): + pytest.skip( + "Skipping system test: custom-credentials-aws-secrets.json is missing required keys." + ) - # This calls the real API - metadata = snippets.authenticate_with_aws_credentials( - bucket_name=bucket_name, audience=audience, impersonation_url=impersonation_url - ) + # The main() function handles the auth flow and printing. + # We mock the print function to verify the output. + with mock.patch("builtins.print") as mock_print: + snippets.main() - assert metadata is not None - assert metadata.get("name") == bucket_name + # Check for the success message in the print output. + output = "\n".join([call.args[0] for call in mock_print.call_args_list]) + assert "--- SUCCESS! ---" in output diff --git a/auth/custom-credentials/okta/README.md b/auth/custom-credentials/okta/README.md index 147a7fb8ef4..dd40f20df11 100644 --- a/auth/custom-credentials/okta/README.md +++ b/auth/custom-credentials/okta/README.md @@ -47,7 +47,7 @@ Ensure you have a GCS bucket that the authenticated user will have access to. Yo ## 3. Running the Script -To run the sample on your local system, you need to install the dependencies and configure the environment variables. +To run the sample on your local system, you need to install the dependencies and configure your credentials. ### Install Dependencies @@ -55,25 +55,20 @@ To run the sample on your local system, you need to install the dependencies and pip install -r requirements.txt ``` -### Set Environment Variables +### Configure Credentials -The script relies on the `GCP_WORKLOAD_AUDIENCE` variable, which typically follows this format: -`//iam.googleapis.com/projects/YOUR_PROJECT_NUMBER/locations/global/workloadIdentityPools/YOUR_POOL_ID/providers/YOUR_PROVIDER_ID` +1. Copy the example secrets file to a new file named `custom-credentials-okta-secrets.json`: + ```bash + cp custom-credentials-okta-secrets.json.example custom-credentials-okta-secrets.json + ``` +2. Open `custom-credentials-okta-secrets.json` and fill in the following values: -```bash -# Okta Configuration -export OKTA_DOMAIN="https://your-okta-domain.okta.com" -export OKTA_CLIENT_ID="your-okta-client-id" -export OKTA_CLIENT_SECRET="your-okta-client-secret" - -# GCP Configuration -export GCP_WORKLOAD_AUDIENCE="//iam.googleapis.com/projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider" -export GCS_BUCKET_NAME="your-gcs-bucket-name" - -# Optional: Service Account Impersonation -# If set, the script will exchange the federated token for a Google Service Account token. -export GCP_SERVICE_ACCOUNT_IMPERSONATION_URL="https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/my-service-account@my-project.iam.gserviceaccount.com:generateAccessToken" -``` + * `okta_domain`: Your Okta developer domain (e.g., `https://dev-123456.okta.com`). + * `okta_client_id`: The client ID for your application. + * `okta_client_secret`: The client secret for your application. + * `gcp_workload_audience`: The audience for the GCP Workload Identity Pool. This typically follows the format: `//iam.googleapis.com/projects/YOUR_PROJECT_NUMBER/locations/global/workloadIdentityPools/YOUR_POOL_ID/providers/YOUR_PROVIDER_ID`. + * `gcs_bucket_name`: The name of the GCS bucket to access. + * `gcp_service_account_impersonation_url`: (Optional) The URL for service account impersonation. If set, the script will exchange the federated token for a Google Service Account token. Example: `https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/my-service-account@my-project.iam.gserviceaccount.com:generateAccessToken`. ### Run the Application diff --git a/auth/custom-credentials/okta/custom-credentials-okta-secrets.json.example b/auth/custom-credentials/okta/custom-credentials-okta-secrets.json.example new file mode 100644 index 00000000000..fa04fda7cb2 --- /dev/null +++ b/auth/custom-credentials/okta/custom-credentials-okta-secrets.json.example @@ -0,0 +1,8 @@ +{ + "okta_domain": "https://your-okta-domain.okta.com", + "okta_client_id": "your-okta-client-id", + "okta_client_secret": "your-okta-client-secret", + "gcp_workload_audience": "//iam.googleapis.com/projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider", + "gcs_bucket_name": "your-gcs-bucket-name", + "gcp_service_account_impersonation_url": "https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/my-service-account@my-project.iam.gserviceaccount.com:generateAccessToken" +} diff --git a/auth/custom-credentials/okta/noxfile_config.py b/auth/custom-credentials/okta/noxfile_config.py index a4c09718253..834faa2b2f6 100644 --- a/auth/custom-credentials/okta/noxfile_config.py +++ b/auth/custom-credentials/okta/noxfile_config.py @@ -15,12 +15,4 @@ TEST_CONFIG_OVERRIDE = { # Ignore all versions except 3.9, which is the version available. "ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.10", "3.11", "3.12", "3.13"], - "envs": { - "OKTA_DOMAIN": "", - "OKTA_CLIENT_ID": "", - "OKTA_CLIENT_SECRET": "", - "GCP_WORKLOAD_AUDIENCE": "", - "GCS_BUCKET_NAME": "", - "GCP_SERVICE_ACCOUNT_IMPERSONATION_URL": "", - }, } diff --git a/auth/custom-credentials/okta/snippets.py b/auth/custom-credentials/okta/snippets.py index efd45796687..c1b0718d779 100644 --- a/auth/custom-credentials/okta/snippets.py +++ b/auth/custom-credentials/okta/snippets.py @@ -13,7 +13,6 @@ # [START auth_custom_credential_supplier_okta] import json -import os import time import urllib.parse @@ -106,18 +105,27 @@ def authenticate_with_okta_credentials( def main(): - gcp_audience = os.getenv("GCP_WORKLOAD_AUDIENCE") - gcs_bucket_name = os.getenv("GCS_BUCKET_NAME") - sa_impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL") + try: + with open("custom-credentials-okta-secrets.json") as f: + secrets = json.load(f) + except FileNotFoundError: + print( + "Could not find custom-credentials-okta-secrets.json." + ) + return + + gcp_audience = secrets.get("gcp_workload_audience") + gcs_bucket_name = secrets.get("gcs_bucket_name") + sa_impersonation_url = secrets.get("gcp_service_account_impersonation_url") - okta_domain = os.getenv("OKTA_DOMAIN") - okta_client_id = os.getenv("OKTA_CLIENT_ID") - okta_client_secret = os.getenv("OKTA_CLIENT_SECRET") + okta_domain = secrets.get("okta_domain") + okta_client_id = secrets.get("okta_client_id") + okta_client_secret = secrets.get("okta_client_secret") if not all( [gcp_audience, gcs_bucket_name, okta_domain, okta_client_id, okta_client_secret] ): - print("Missing required environment variables.") + print("Missing required values in secrets.json.") return try: diff --git a/auth/custom-credentials/okta/snippets_test.py b/auth/custom-credentials/okta/snippets_test.py index bf3218057ed..539f33150cc 100644 --- a/auth/custom-credentials/okta/snippets_test.py +++ b/auth/custom-credentials/okta/snippets_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json import os import time from unittest import mock @@ -97,30 +98,36 @@ def test_authenticate_unit_success(MockSupplier, MockCreds, MockSession): # --- System Test --- - def test_authenticate_system(): """ System test that runs against the real API. - Skips automatically if required Okta/GCP env vars are missing. + Skips automatically if custom-credentials-okta-secrets.json is missing or incomplete. """ - required_env = [ - "GCP_WORKLOAD_AUDIENCE", - "OKTA_CLIENT_ID", - "OKTA_CLIENT_SECRET", - "OKTA_DOMAIN", - "GCS_BUCKET_NAME", + if not os.path.exists("custom-credentials-okta-secrets.json"): + pytest.skip( + "Skipping system test: custom-credentials-okta-secrets.json not found." + ) + + with open("custom-credentials-okta-secrets.json", "r") as f: + secrets = json.load(f) + + required_keys = [ + "gcp_workload_audience", + "gcs_bucket_name", + "okta_domain", + "okta_client_id", + "okta_client_secret", ] - if not all(os.getenv(var) for var in required_env): - pytest.skip("Skipping system test: missing required env vars.") - - metadata = snippets.authenticate_with_okta_credentials( - bucket_name=os.getenv("GCS_BUCKET_NAME"), - audience=os.getenv("GCP_WORKLOAD_AUDIENCE"), - domain=os.getenv("OKTA_DOMAIN"), - client_id=os.getenv("OKTA_CLIENT_ID"), - client_secret=os.getenv("OKTA_CLIENT_SECRET"), - impersonation_url=os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL"), - ) - - assert metadata is not None - assert metadata.get("name") == os.getenv("GCS_BUCKET_NAME") + if not all(key in secrets for key in required_keys): + pytest.skip( + "Skipping system test: custom-credentials-okta-secrets.json is missing required keys." + ) + + # The main() function handles the auth flow and printing. + # We mock the print function to verify the output. + with mock.patch("builtins.print") as mock_print: + snippets.main() + + # Check for the success message in the print output. + output = "\n".join([call.args[0] for call in mock_print.call_args_list]) + assert "--- SUCCESS! ---" in output From 4fc575e3f2f8d33df5bad3d1bac0f81fef475c2c Mon Sep 17 00:00:00 2001 From: Pranav Iyer Date: Fri, 21 Nov 2025 17:02:22 -0800 Subject: [PATCH 04/23] Added license header to pod.yaml. --- auth/custom-credentials/aws/pod.yaml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/auth/custom-credentials/aws/pod.yaml b/auth/custom-credentials/aws/pod.yaml index 9580569ced8..4ab6d7997e3 100644 --- a/auth/custom-credentials/aws/pod.yaml +++ b/auth/custom-credentials/aws/pod.yaml @@ -1,3 +1,16 @@ +# Copyright 2025 Google LLC +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + apiVersion: v1 kind: Pod metadata: From 1956519dc1aca4f2805881d82ed91487eab5cc3a Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 17:40:20 -0800 Subject: [PATCH 05/23] fix: Update Dockerfile this updates to a more modern version of python and leverages the benefits of Docker's Layer caching. requirements.txt will rarely change so this way the depedency layer is cached and only rebuilt if you explictly change requirements.txt. --- auth/custom-credentials/aws/Dockerfile | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/auth/custom-credentials/aws/Dockerfile b/auth/custom-credentials/aws/Dockerfile index 1cf23d3ac50..d90d88aa0a8 100644 --- a/auth/custom-credentials/aws/Dockerfile +++ b/auth/custom-credentials/aws/Dockerfile @@ -1,20 +1,15 @@ -FROM python:3.9-slim +FROM python:3.13-slim -# Create a non-root user RUN useradd -m appuser -# Create a working directory WORKDIR /app -# Copy files and install dependencies COPY --chown=appuser:appuser requirements.txt . -COPY --chown=appuser:appuser snippets.py . -# Switch to the non-root user USER appuser +RUN pip install --no-cache-dir -r requirements.txt + +COPY --chown=appuser:appuser snippets.py . -# Install dependencies for the user -RUN pip install --no-cache-dir --user -r requirements.txt -# Set the entrypoint CMD ["python3", "snippets.py"] From 8e727541d3e7f121281bf8ed4343500f09c56587 Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 17:52:40 -0800 Subject: [PATCH 06/23] fix: clarify comments --- auth/custom-credentials/aws/pod.yaml | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/auth/custom-credentials/aws/pod.yaml b/auth/custom-credentials/aws/pod.yaml index 4ab6d7997e3..70b94bf25e2 100644 --- a/auth/custom-credentials/aws/pod.yaml +++ b/auth/custom-credentials/aws/pod.yaml @@ -16,17 +16,24 @@ kind: Pod metadata: name: custom-credential-pod spec: - serviceAccountName: your-k8s-service-account # The service account associated with the AWS IAM role + # The Kubernetes Service Account that is annotated with the corresponding + # AWS IAM role ARN. See the README for instructions on setting up IAM + # Roles for Service Accounts (IRSA). + serviceAccountName: your-k8s-service-account containers: - name: gcp-auth-sample - image: your-container-image:latest # Your image from ECR + # The container image pushed to the container registry + # For example, Amazon Elastic Container Registry + image: your-container-image:latest env: - # AWS_REGION is often required for Boto3 to initialize correctly in containers + # REQUIRED: The AWS region. Boto3 requires this to be set explicitly + # in containers. - name: AWS_REGION value: "your-aws-region" + # REQUIRED: The full identifier of the Workload Identity Pool provider - name: GCP_WORKLOAD_AUDIENCE value: "your-gcp-workload-audience" - # Optional: If you want to use service account impersonation + # OPTIONAL: Enable Google Cloud service account impersonation # - name: GCP_SERVICE_ACCOUNT_IMPERSONATION_URL # value: "your-gcp-service-account-impersonation-url" - name: GCS_BUCKET_NAME From 8529f82978fef1a5d36a4307891b3e866d45c3a8 Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 18:25:38 -0800 Subject: [PATCH 07/23] fix: refactor main to seprate concerns simplify testing need to refactor testing based on this separation. --- auth/custom-credentials/aws/snippets.py | 64 ++++++++++++++----------- 1 file changed, 35 insertions(+), 29 deletions(-) diff --git a/auth/custom-credentials/aws/snippets.py b/auth/custom-credentials/aws/snippets.py index 67625e1246d..adc8e516066 100644 --- a/auth/custom-credentials/aws/snippets.py +++ b/auth/custom-credentials/aws/snippets.py @@ -14,6 +14,7 @@ # [START auth_custom_credential_supplier_aws] import json import os +import sys import boto3 from google.auth import aws @@ -43,6 +44,7 @@ def get_aws_region(self, context, request) -> str: if not self._cached_region: raise exceptions.GoogleAuthError( "Boto3 was unable to resolve an AWS region." + "Please set the AWS_REGION environment variable." ) return self._cached_region @@ -71,10 +73,8 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N dict: The bucket metadata response from the Google Cloud Storage API. """ - # 1. Instantiate the custom supplier. custom_supplier = CustomAwsSupplier() - # 2. Instantiate the AWS Credentials object. credentials = aws.Credentials( audience=audience, subject_token_type="urn:ietf:params:aws:token-type:aws4_request", @@ -83,10 +83,7 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N scopes=["https://www.googleapis.com/auth/devstorage.read_write"], ) - # 3. Create an authenticated session. authed_session = auth_requests.AuthorizedSession(credentials) - - # 4. Make the API Request. bucket_url = f"https://storage.googleapis.com/storage/v1/b/{bucket_name}" response = authed_session.get(bucket_url) @@ -101,13 +98,16 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N def _load_config_from_file(): """ If a local secrets file is present, load it into the environment. - This is a "just-in-time" configuration for local development. These - variables are only set for the current process and are not exposed to the - shell. + This is a "just-in-time" configuration for local development. """ - if os.path.exists("custom-credentials-aws-secrets.json"): - with open("custom-credentials-aws-secrets.json", "r") as f: - secrets = json.load(f) + secrets_file = "custom-credentials-aws-secrets.json" + if os.path.exists(secrets_file): + with open(secrets_file, "r") as f: + try: + secrets = json.load(f) + except json.JSONDecodeError: + print(f"Error: '{secrets_file}' is not valid JSON.", file=sys.stderr) + return os.environ["AWS_ACCESS_KEY_ID"] = secrets.get("aws_access_key_id", "") os.environ["AWS_SECRET_ACCESS_KEY"] = secrets.get("aws_secret_access_key", "") @@ -119,36 +119,42 @@ def _load_config_from_file(): ) -def main(): +def main() -> dict | None: + """ + Loads configuration, authenticates, and retrieves GCS bucket metadata. + + Raises: + ValueError: If required configuration is missing. + + Returns: + A dictionary containing the bucket metadata on success. + """ - # Reads the custom-credentials-aws-secrets.json if running locally. _load_config_from_file() - # Now, read the configuration from the environment. In a local run, these - # will be the values we just set. In a containerized run, they will be - # the values provided by the environment. gcp_audience = os.getenv("GCP_WORKLOAD_AUDIENCE") sa_impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL") gcs_bucket_name = os.getenv("GCS_BUCKET_NAME") if not all([gcp_audience, gcs_bucket_name]): - print( + raise ValueError( "Required configuration missing. Please provide it in a " "custom-credentials-aws-secrets.json file or as environment variables: " "GCP_WORKLOAD_AUDIENCE, GCS_BUCKET_NAME" ) - return - - try: - print(f"Retrieving metadata for bucket: {gcs_bucket_name}...") - metadata = authenticate_with_aws_credentials( - gcs_bucket_name, gcp_audience, sa_impersonation_url - ) - print("--- SUCCESS! ---") - print(json.dumps(metadata, indent=2)) - except Exception as e: - print(f"Authentication or Request failed: {e}") + print(f"Retrieving metadata for bucket: {gcs_bucket_name}...") + metadata = authenticate_with_aws_credentials( + gcs_bucket_name, gcp_audience, sa_impersonation_url + ) + return metadata if __name__ == "__main__": - main() + try: + metadata = main() + if metadata: + print("--- SUCCESS! ---") + print(json.dumps(metadata, indent=2)) + except Exception as e: + print(f"An error occurred: {e}", file=sys.stderr) + sys.exit(1) From f78858bc3a76babbb8312255119fb8a7fe1b9a23 Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 18:32:21 -0800 Subject: [PATCH 08/23] fix: update testing to match refactored main. --- auth/custom-credentials/aws/snippets_test.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/auth/custom-credentials/aws/snippets_test.py b/auth/custom-credentials/aws/snippets_test.py index 779a492c08f..c137684b418 100644 --- a/auth/custom-credentials/aws/snippets_test.py +++ b/auth/custom-credentials/aws/snippets_test.py @@ -116,16 +116,14 @@ def test_authenticate_system(): "aws_secret_access_key", "aws_region", ] - if not all(key in secrets for key in required_keys): + if not all(key in secrets and secrets[key] for key in required_keys): pytest.skip( - "Skipping system test: custom-credentials-aws-secrets.json is missing required keys." + "Skipping system test: custom-credentials-aws-secrets.json is missing or has empty required keys." ) - # The main() function handles the auth flow and printing. - # We mock the print function to verify the output. - with mock.patch("builtins.print") as mock_print: - snippets.main() + metadata = snippets.main() - # Check for the success message in the print output. - output = "\n".join([call.args[0] for call in mock_print.call_args_list]) - assert "--- SUCCESS! ---" in output + # Verify that the returned metadata is a dictionary with expected keys. + assert isinstance(metadata, dict) + assert "name" in metadata + assert metadata["name"] == secrets["gcs_bucket_name"] From 2953b7573873535c66f246d1debeac6f7bb3a68c Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 18:38:38 -0800 Subject: [PATCH 09/23] fix: update version to test --- auth/custom-credentials/aws/noxfile_config.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/auth/custom-credentials/aws/noxfile_config.py b/auth/custom-credentials/aws/noxfile_config.py index 834faa2b2f6..0ed973689f7 100644 --- a/auth/custom-credentials/aws/noxfile_config.py +++ b/auth/custom-credentials/aws/noxfile_config.py @@ -13,6 +13,5 @@ # limitations under the License. TEST_CONFIG_OVERRIDE = { - # Ignore all versions except 3.9, which is the version available. - "ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.10", "3.11", "3.12", "3.13"], + "ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"], } From ea809bc4726bb2a729206f12d8c1d3fd73427e3f Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 19:14:06 -0800 Subject: [PATCH 10/23] fix: use latest python --- auth/custom-credentials/okta/noxfile_config.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/auth/custom-credentials/okta/noxfile_config.py b/auth/custom-credentials/okta/noxfile_config.py index 834faa2b2f6..5edd0d00d59 100644 --- a/auth/custom-credentials/okta/noxfile_config.py +++ b/auth/custom-credentials/okta/noxfile_config.py @@ -13,6 +13,5 @@ # limitations under the License. TEST_CONFIG_OVERRIDE = { - # Ignore all versions except 3.9, which is the version available. - "ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.10", "3.11", "3.12", "3.13"], + "ignored_versions": ["2.7", "3.6", "3.7", "3.8","3.9","3.10", "3.11", "3.12"], } From 0bd179b8e2127ced6610403d89cd44b6355dbbab Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 19:15:07 -0800 Subject: [PATCH 11/23] fix: last line --- auth/custom-credentials/okta/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/auth/custom-credentials/okta/requirements.txt b/auth/custom-credentials/okta/requirements.txt index ca2a46a5a41..c40de273b71 100644 --- a/auth/custom-credentials/okta/requirements.txt +++ b/auth/custom-credentials/okta/requirements.txt @@ -1,3 +1,3 @@ requests==2.32.3 google-auth==2.43.0 -python-dotenv==1.1.1 \ No newline at end of file +python-dotenv==1.1.1 From 633fd3aea697c9be4390fef440795e91e8783c0f Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 19:16:39 -0800 Subject: [PATCH 12/23] fix: address issues introduced in gitignore file --- .gitignore | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 9eff6289235..80cf8846a58 100644 --- a/.gitignore +++ b/.gitignore @@ -30,5 +30,8 @@ env/ .idea .env* **/venv -**/noxfile.py\n# Local secrets file\nauth/custom-credentials/okta/custom-credentials-okta-secrets.json +**/noxfile.py + +# Auth Local secrets file +auth/custom-credentials/okta/custom-credentials-okta-secrets.json auth/custom-credentials/aws/custom-credentials-aws-secrets.json From d265bbbbfb0adf569da283a9e3d63037424ee692 Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 19:35:16 -0800 Subject: [PATCH 13/23] fix: cleanup README documentation. --- auth/custom-credentials/aws/README.md | 65 +++++++++++++++++---------- 1 file changed, 42 insertions(+), 23 deletions(-) diff --git a/auth/custom-credentials/aws/README.md b/auth/custom-credentials/aws/README.md index 767906682a2..ecfc990dcb1 100644 --- a/auth/custom-credentials/aws/README.md +++ b/auth/custom-credentials/aws/README.md @@ -1,14 +1,22 @@ -# Running the Custom Credential Supplier Sample +# Running the Custom AWS Credential Supplier Sample +This sample demonstrates how to use a custom AWS security credential supplier to authenticate with Google Cloud using AWS as an external identity provider. It uses Boto3 (the AWS SDK for Python) to fetch credentials from sources like Amazon Elastic Kubernetes Service (EKS) with IAM Roles for Service Accounts(IRSA), Elastic Container Service (ECS), or Fargate. + +## Prerequisites + +* An AWS account. +* A Google Cloud project with the IAM API enabled. +* A GCS bucket. +* Python 3.10 or later installed. +* If you want to use AWS security credentials that cannot be retrieved using methods supported natively by the [google-auth](https://github.com/googleapis/google-auth-library-python) library, a custom `AwsSecurityCredentialsSupplier` implementation may be specified. The supplier must return valid, unexpired AWS security credentials when called by the Google Cloud Auth library. -This sample demonstrates how to use **Boto3** (the AWS SDK for Python) as a custom supplier to bridge AWS credentials—from sources like EKS IRSA, ECS, or Fargate—to Google Cloud Workload Identity. ## Running Locally -For local development, you can provide credentials and configuration in a JSON file. For containerized environments like EKS, the script can fall back to environment variables. +For local development, you can provide credentials and configuration in a JSON file. -### 1. Install Dependencies +### Install Dependencies Ensure you have Python installed, then install the required libraries: @@ -16,15 +24,18 @@ Ensure you have Python installed, then install the required libraries: pip install -r requirements.txt ``` -### 2. Configure Credentials for Local Development +### Configure Credentials for Local Development 1. Copy the example secrets file to a new file named `custom-credentials-aws-secrets.json`: ```bash cp custom-credentials-aws-secrets.json.example custom-credentials-aws-secrets.json ``` -2. Open `custom-credentials-aws-secrets.json` and fill in the required values for your AWS and GCP configuration. The `custom-credentials-aws-secrets.json` file is ignored by Git, so your credentials will not be checked into version control. +2. Open `custom-credentials-aws-secrets.json` and fill in the required values for your AWS and Google Cloud configuration. Do not check your `custom-credentials-aws-secrets.json` file into version control. + +**Note:** This file is only used for local development and is not needed when running in a containerized environment like EKS with IRSA. -### 3. Run the Script + +### Run the Script ```bash python3 snippets.py @@ -36,15 +47,13 @@ When run locally, the script will detect the `custom-credentials-aws-secrets.jso This section provides a brief overview of how to run the sample in an Amazon EKS cluster. -### 1. EKS Cluster Setup +### EKS Cluster Setup First, you need an EKS cluster. You can create one using `eksctl` or the AWS Management Console. For detailed instructions, refer to the [Amazon EKS documentation](https://docs.aws.amazon.com/eks/latest/userguide/create-cluster.html). -### 2. Configure IAM Roles for Service Accounts (IRSA) - -IRSA allows you to associate an IAM role with a Kubernetes service account. This provides a secure way for your pods to access AWS services without hardcoding long-lived credentials. +### Configure IAM Roles for Service Accounts (IRSA) -You can essentially complete the OIDC setup, IAM role creation, and Service Account association in one step using `eksctl`. +IRSA enables you to associate an IAM role with a Kubernetes service account. This provides a secure way for your pods to access AWS services without hardcoding long-lived credentials. Run the following command to create the IAM role and bind it to a Kubernetes Service Account: @@ -59,19 +68,21 @@ eksctl create iamserviceaccount \ --approve ``` -> **Note**: The `--attach-policy-arn` flag is used here to demonstrate attaching permissions. Update this with the specific AWS policy ARN your application requires (e.g., if your Boto3 client needs to read from S3 or DynamoDB). +> **Note**: The `--attach-policy-arn` flag is used here to demonstrate attaching permissions. Update this with the specific AWS policy ARN your application requires. -For a deep dive into how this works manually, refer to the [IAM Roles for Service Accounts](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html) documentation. +For a deep dive into how this works without using `eksctl`, refer to the [IAM Roles for Service Accounts](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html) documentation. -### 3. Configure GCP to Trust the AWS Role +### Configure Google Cloud to Trust the AWS Role To allow your AWS role to authenticate as a Google Cloud service account, you need to configure Workload Identity Federation. This process involves these key steps: 1. **Create a Workload Identity Pool and an AWS Provider:** The pool holds the configuration, and the provider is set up to trust your AWS account. -2. **Create or select a GCP Service Account:** This service account will be impersonated by your AWS role. Grant this service account the necessary GCP permissions for your application (e.g., access to GCS or BigQuery). +2. **Create or select a Google Cloud Service Account:** This service account will be impersonated by your AWS role. + +3. **Bind the AWS Role to the Google Cloud Service Account:** Create an IAM policy binding that gives your AWS role the `Workload Identity User` (`roles/iam.workloadIdentityUser`) role on the Google Cloud service account. -3. **Bind the AWS Role to the GCP Service Account:** Create an IAM policy binding that gives your AWS role the `Workload Identity User` (`roles/iam.workloadIdentityUser`) role on the GCP service account. This allows the AWS role to impersonate the service account. +For more detailed information, see the documentation on [Configuring Workload Identity Federation](https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds). **Alternative: Direct Access** @@ -79,9 +90,11 @@ To allow your AWS role to authenticate as a Google Cloud service account, you ne For more detailed information, see the documentation on [Configuring Workload Identity Federation](https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds). -### 4. Containerize and Package the Application +### Containerize and Package the Application + +Create a `Dockerfile` for the Python application and push the image to a container registry (for example Amazon ECR) that your EKS cluster can access. -Create a `Dockerfile` for the Python application and push the image to a container registry (e.g., Amazon ECR) that your EKS cluster can access. Refer to the [`Dockerfile`](Dockerfile) for the container image definition. +**Note:** The provided [`Dockerfile`](Dockerfile) is an example and may need to be modified for your specific needs. Build and push the image: ```bash @@ -89,20 +102,26 @@ docker build -t your-container-image:latest . docker push your-container-image:latest ``` -### 5. Deploy to EKS +### Deploy to EKS Create a Kubernetes deployment manifest to deploy your application to the EKS cluster. See the [`pod.yaml`](pod.yaml) file for an example. +**Note:** The provided [`pod.yaml`](pod.yaml) is an example and may need to be modified for your specific needs. + Deploy the pod: ```bash kubectl apply -f pod.yaml ``` -### 6. Clean Up +### Clean Up -To clean up the resources, delete the EKS cluster and any other AWS and GCP resources you created. +To clean up the resources, delete the EKS cluster and any other AWS and Google Cloud resources you created. ```bash eksctl delete cluster --name your-cluster-name -``` \ No newline at end of file +``` + +## Testing + +This sample is not continuously tested. It is provided for instructional purposes and may require modifications to work in your environment. From 24e9b29a3cf53254ab678f2abad805f11de1777e Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 19:44:35 -0800 Subject: [PATCH 14/23] fix: refine the README instructions. --- auth/custom-credentials/okta/README.md | 38 ++++++++++++++------------ 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/auth/custom-credentials/okta/README.md b/auth/custom-credentials/okta/README.md index dd40f20df11..96d444e85a4 100644 --- a/auth/custom-credentials/okta/README.md +++ b/auth/custom-credentials/okta/README.md @@ -1,12 +1,15 @@ -Here is the adapted `README.md` for the Python version of the Okta Custom Credential Supplier sample. +# Running the Custom Okta Credential Supplier Sample -# Running the Custom Okta Credential Supplier Sample (Python) +This sample demonstrates how to use a custom subject token supplier to authenticate with Google Cloud using Okta as an external identity provider. It uses the Client Credentials flow for machine-to-machine (M2M) authentication. -If you want to use OIDC or SAML 2.0 tokens that cannot be retrieved using methods supported natively by the [google-auth](https://github.com/googleapis/google-auth-library-python) library, a custom `SubjectTokenSupplier` implementation may be specified when creating an identity pool client. The supplier must return a valid, unexpired subject token when called by the GCP credential. +## Prerequisites -This document provides instructions on how to run the custom Okta credential supplier sample using Python. - -## 1. Okta Configuration +* An Okta developer account. +* A Google Cloud project with the IAM API enabled. +* A Google Cloud Storage bucket. Ensure that the authenticated user has access to this bucket. +* Python 3.10 or later installed. +* +## Okta Configuration Before running the sample, you need to configure an Okta application for Machine-to-Machine (M2M) communication. @@ -27,9 +30,9 @@ Once the application is created, you will find the following information in the You will need these values to configure the sample. -## 2. GCP Configuration +## Google Cloud Configuration -You need to configure a Workload Identity Pool in GCP to trust the Okta application. +You need to configure a Workload Identity Pool in Google Cloud to trust the Okta application. ### Set up Workload Identity Federation @@ -41,10 +44,6 @@ You need to configure a Workload Identity Pool in GCP to trust the Okta applicat For detailed instructions, refer to the [Workload Identity Federation documentation](https://cloud.google.com/iam/docs/workload-identity-federation). -### GCS Bucket - -Ensure you have a GCS bucket that the authenticated user will have access to. You will need the name of this bucket to run the sample. - ## 3. Running the Script To run the sample on your local system, you need to install the dependencies and configure your credentials. @@ -63,12 +62,13 @@ pip install -r requirements.txt ``` 2. Open `custom-credentials-okta-secrets.json` and fill in the following values: - * `okta_domain`: Your Okta developer domain (e.g., `https://dev-123456.okta.com`). + * `okta_domain`: Your Okta developer domain (for example `https://dev-123456.okta.com`). * `okta_client_id`: The client ID for your application. * `okta_client_secret`: The client secret for your application. - * `gcp_workload_audience`: The audience for the GCP Workload Identity Pool. This typically follows the format: `//iam.googleapis.com/projects/YOUR_PROJECT_NUMBER/locations/global/workloadIdentityPools/YOUR_POOL_ID/providers/YOUR_PROVIDER_ID`. - * `gcs_bucket_name`: The name of the GCS bucket to access. - * `gcp_service_account_impersonation_url`: (Optional) The URL for service account impersonation. If set, the script will exchange the federated token for a Google Service Account token. Example: `https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/my-service-account@my-project.iam.gserviceaccount.com:generateAccessToken`. + * `gcp_workload_audience`: The audience for the Google Cloud Workload Identity Pool. This is the full identifier of the Workload Identity Pool provider. + * `gcs_bucket_name`: The name of the Google Cloud Storage bucket to access. + * `gcp_service_account_impersonation_url`: (Optional) The URL for service account impersonation. + ### Run the Application @@ -76,4 +76,8 @@ pip install -r requirements.txt python3 snippets.py ``` -The script will then authenticate with Okta to get an OIDC token, exchange that token for a GCP federated token (and optionally a Service Account token), and use it to list metadata for the specified GCS bucket. \ No newline at end of file +The script authenticates with Okta to get an OIDC token, exchanges that token for a Google Cloud federated token, and uses it to list metadata for the specified Google Cloud Storage bucket. + +## Testing + +This sample is not continuously tested. It is provided for instructional purposes and may require modifications to work in your environment. From 6d361d441a33b9cb9cca50f4cd5fd64d48a6926b Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 19:45:33 -0800 Subject: [PATCH 15/23] fix: Apply suggestion from @gemini-code-assist[bot] Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- auth/custom-credentials/okta/snippets.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/auth/custom-credentials/okta/snippets.py b/auth/custom-credentials/okta/snippets.py index c1b0718d779..bcff53e7910 100644 --- a/auth/custom-credentials/okta/snippets.py +++ b/auth/custom-credentials/okta/snippets.py @@ -11,12 +11,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -# [START auth_custom_credential_supplier_okta] import json +import os +import sys import time import urllib.parse -from google.auth import identity_pool +from google.auth import exceptions, identity_pool from google.auth.transport import requests as auth_requests import requests From b6fb71876da21a6af43a80cc3d369bcf18f58ec2 Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 19:48:31 -0800 Subject: [PATCH 16/23] fix: starting region tag --- auth/custom-credentials/okta/snippets.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/auth/custom-credentials/okta/snippets.py b/auth/custom-credentials/okta/snippets.py index bcff53e7910..1901b8aeec2 100644 --- a/auth/custom-credentials/okta/snippets.py +++ b/auth/custom-credentials/okta/snippets.py @@ -11,6 +11,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# [START auth_custom_credential_supplier_okta] + import json import os import sys From d0ed98deb22f926ec857747c5f71c8ac44e6066d Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 19:59:00 -0800 Subject: [PATCH 17/23] fix: address whitespace linting issue --- auth/custom-credentials/okta/noxfile_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/auth/custom-credentials/okta/noxfile_config.py b/auth/custom-credentials/okta/noxfile_config.py index 5edd0d00d59..0ed973689f7 100644 --- a/auth/custom-credentials/okta/noxfile_config.py +++ b/auth/custom-credentials/okta/noxfile_config.py @@ -13,5 +13,5 @@ # limitations under the License. TEST_CONFIG_OVERRIDE = { - "ignored_versions": ["2.7", "3.6", "3.7", "3.8","3.9","3.10", "3.11", "3.12"], + "ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"], } From e4b105375c1ac1a6d554e18b150cdad0b6162a6f Mon Sep 17 00:00:00 2001 From: Jennifer Davis Date: Thu, 11 Dec 2025 20:29:53 -0800 Subject: [PATCH 18/23] fix: address linting --- auth/custom-credentials/okta/snippets.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/auth/custom-credentials/okta/snippets.py b/auth/custom-credentials/okta/snippets.py index 1901b8aeec2..39c98c6e8b6 100644 --- a/auth/custom-credentials/okta/snippets.py +++ b/auth/custom-credentials/okta/snippets.py @@ -15,11 +15,10 @@ import json import os -import sys import time import urllib.parse -from google.auth import exceptions, identity_pool +from google.auth import identity_pool from google.auth.transport import requests as auth_requests import requests From e89f6a0b22124ad00cd58612fe683b550a5c1f7f Mon Sep 17 00:00:00 2001 From: Pranav Iyer Date: Fri, 12 Dec 2025 15:28:56 -0800 Subject: [PATCH 19/23] Now using the storage library instead of calling the storage endpoint. --- auth/custom-credentials/aws/snippets.py | 62 +++++++++++------------- auth/custom-credentials/okta/snippets.py | 20 ++------ 2 files changed, 34 insertions(+), 48 deletions(-) diff --git a/auth/custom-credentials/aws/snippets.py b/auth/custom-credentials/aws/snippets.py index adc8e516066..f6cf8967102 100644 --- a/auth/custom-credentials/aws/snippets.py +++ b/auth/custom-credentials/aws/snippets.py @@ -14,12 +14,11 @@ # [START auth_custom_credential_supplier_aws] import json import os -import sys import boto3 from google.auth import aws from google.auth import exceptions -from google.auth.transport import requests as auth_requests +from google.cloud import storage class CustomAwsSupplier(aws.AwsSecurityCredentialsSupplier): @@ -44,7 +43,6 @@ def get_aws_region(self, context, request) -> str: if not self._cached_region: raise exceptions.GoogleAuthError( "Boto3 was unable to resolve an AWS region." - "Please set the AWS_REGION environment variable." ) return self._cached_region @@ -73,8 +71,10 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N dict: The bucket metadata response from the Google Cloud Storage API. """ + # 1. Instantiate the custom supplier. custom_supplier = CustomAwsSupplier() + # 2. Instantiate the AWS Credentials object. credentials = aws.Credentials( audience=audience, subject_token_type="urn:ietf:params:aws:token-type:aws4_request", @@ -83,13 +83,13 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N scopes=["https://www.googleapis.com/auth/devstorage.read_write"], ) - authed_session = auth_requests.AuthorizedSession(credentials) - bucket_url = f"https://storage.googleapis.com/storage/v1/b/{bucket_name}" + # 3. Create a storage client. + storage_client = storage.Client(credentials=credentials) - response = authed_session.get(bucket_url) - response.raise_for_status() + # 4. Get bucket metadata. + bucket = storage_client.get_bucket(bucket_name) - return response.json() + return bucket._properties # [END auth_custom_credential_supplier_aws] @@ -98,7 +98,9 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N def _load_config_from_file(): """ If a local secrets file is present, load it into the environment. - This is a "just-in-time" configuration for local development. + This is a "just-in-time" configuration for local development. These + variables are only set for the current process and are not exposed to the + shell. """ secrets_file = "custom-credentials-aws-secrets.json" if os.path.exists(secrets_file): @@ -119,42 +121,36 @@ def _load_config_from_file(): ) -def main() -> dict | None: - """ - Loads configuration, authenticates, and retrieves GCS bucket metadata. - - Raises: - ValueError: If required configuration is missing. - - Returns: - A dictionary containing the bucket metadata on success. - """ +def main(): + # Reads the custom-credentials-aws-secrets.json if running locally. _load_config_from_file() + # Now, read the configuration from the environment. In a local run, these + # will be the values we just set. In a containerized run, they will be + # the values provided by the environment. gcp_audience = os.getenv("GCP_WORKLOAD_AUDIENCE") sa_impersonation_url = os.getenv("GCP_SERVICE_ACCOUNT_IMPERSONATION_URL") gcs_bucket_name = os.getenv("GCS_BUCKET_NAME") if not all([gcp_audience, gcs_bucket_name]): - raise ValueError( + print( "Required configuration missing. Please provide it in a " "custom-credentials-aws-secrets.json file or as environment variables: " "GCP_WORKLOAD_AUDIENCE, GCS_BUCKET_NAME" ) - print(f"Retrieving metadata for bucket: {gcs_bucket_name}...") - metadata = authenticate_with_aws_credentials( - gcs_bucket_name, gcp_audience, sa_impersonation_url - ) - return metadata + return - -if __name__ == "__main__": try: - metadata = main() - if metadata: - print("--- SUCCESS! ---") - print(json.dumps(metadata, indent=2)) + print(f"Retrieving metadata for bucket: {gcs_bucket_name}...") + metadata = authenticate_with_aws_credentials( + gcs_bucket_name, gcp_audience, sa_impersonation_url + ) + print("--- SUCCESS! ---") + print(json.dumps(metadata, indent=2)) except Exception as e: - print(f"An error occurred: {e}", file=sys.stderr) - sys.exit(1) + print(f"Authentication or Request failed: {e}") + + +if __name__ == "__main__": + main() diff --git a/auth/custom-credentials/okta/snippets.py b/auth/custom-credentials/okta/snippets.py index 39c98c6e8b6..82166f5cd96 100644 --- a/auth/custom-credentials/okta/snippets.py +++ b/auth/custom-credentials/okta/snippets.py @@ -12,14 +12,12 @@ # limitations under the License. # [START auth_custom_credential_supplier_okta] - import json -import os import time import urllib.parse from google.auth import identity_pool -from google.auth.transport import requests as auth_requests +from google.cloud import storage import requests @@ -41,7 +39,6 @@ def get_subject_token(self, context, request=None) -> str: """Fetches a new token if the current one is expired or missing.""" if self.access_token and time.time() < self.expiry_time - 60: return self.access_token - self._fetch_okta_access_token() return self.access_token @@ -78,11 +75,9 @@ def authenticate_with_okta_credentials( dict: The bucket metadata response from the Google Cloud Storage API. """ - # 1. Instantiate the custom supplier. okta_supplier = OktaClientCredentialsSupplier(domain, client_id, client_secret) - # 2. Instantiate the IdentityPoolClient. - client = identity_pool.Credentials( + credentials = identity_pool.Credentials( audience=audience, subject_token_type="urn:ietf:params:oauth:token-type:jwt", token_url="https://sts.googleapis.com/v1/token", @@ -91,16 +86,11 @@ def authenticate_with_okta_credentials( service_account_impersonation_url=impersonation_url, ) - # 3. Create an authenticated session. - authed_session = auth_requests.AuthorizedSession(client) - - # 4. Make the API Request. - bucket_url = f"https://storage.googleapis.com/storage/v1/b/{bucket_name}" + storage_client = storage.Client(credentials=credentials) - response = authed_session.get(bucket_url) - response.raise_for_status() + bucket = storage_client.get_bucket(bucket_name) - return response.json() + return bucket._properties # [END auth_custom_credential_supplier_okta] From 7f61120054f1f09036c59dd500d2f38f6f657373 Mon Sep 17 00:00:00 2001 From: Pranav Iyer Date: Fri, 12 Dec 2025 15:57:23 -0800 Subject: [PATCH 20/23] Removed unnecessary comments. --- auth/custom-credentials/aws/README.md | 2 +- auth/custom-credentials/aws/snippets.py | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/auth/custom-credentials/aws/README.md b/auth/custom-credentials/aws/README.md index ecfc990dcb1..551c95ef691 100644 --- a/auth/custom-credentials/aws/README.md +++ b/auth/custom-credentials/aws/README.md @@ -8,7 +8,7 @@ This sample demonstrates how to use a custom AWS security credential supplier to * A Google Cloud project with the IAM API enabled. * A GCS bucket. * Python 3.10 or later installed. -* + If you want to use AWS security credentials that cannot be retrieved using methods supported natively by the [google-auth](https://github.com/googleapis/google-auth-library-python) library, a custom `AwsSecurityCredentialsSupplier` implementation may be specified. The supplier must return valid, unexpired AWS security credentials when called by the Google Cloud Auth library. diff --git a/auth/custom-credentials/aws/snippets.py b/auth/custom-credentials/aws/snippets.py index f6cf8967102..3a59ea26644 100644 --- a/auth/custom-credentials/aws/snippets.py +++ b/auth/custom-credentials/aws/snippets.py @@ -71,10 +71,8 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N dict: The bucket metadata response from the Google Cloud Storage API. """ - # 1. Instantiate the custom supplier. custom_supplier = CustomAwsSupplier() - - # 2. Instantiate the AWS Credentials object. + credentials = aws.Credentials( audience=audience, subject_token_type="urn:ietf:params:aws:token-type:aws4_request", @@ -83,10 +81,8 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N scopes=["https://www.googleapis.com/auth/devstorage.read_write"], ) - # 3. Create a storage client. storage_client = storage.Client(credentials=credentials) - # 4. Get bucket metadata. bucket = storage_client.get_bucket(bucket_name) return bucket._properties From 05d775c75d0fe3829cc1ac804fdd2f143ace57e9 Mon Sep 17 00:00:00 2001 From: Pranav Iyer Date: Fri, 12 Dec 2025 16:11:16 -0800 Subject: [PATCH 21/23] Formatting changes. --- auth/custom-credentials/aws/snippets.py | 2 +- auth/custom-credentials/aws/snippets_test.py | 1 + auth/custom-credentials/okta/snippets.py | 4 +--- auth/custom-credentials/okta/snippets_test.py | 1 + 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/auth/custom-credentials/aws/snippets.py b/auth/custom-credentials/aws/snippets.py index 3a59ea26644..f96daf9176b 100644 --- a/auth/custom-credentials/aws/snippets.py +++ b/auth/custom-credentials/aws/snippets.py @@ -72,7 +72,7 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N """ custom_supplier = CustomAwsSupplier() - + credentials = aws.Credentials( audience=audience, subject_token_type="urn:ietf:params:aws:token-type:aws4_request", diff --git a/auth/custom-credentials/aws/snippets_test.py b/auth/custom-credentials/aws/snippets_test.py index c137684b418..e0382cfc6f5 100644 --- a/auth/custom-credentials/aws/snippets_test.py +++ b/auth/custom-credentials/aws/snippets_test.py @@ -96,6 +96,7 @@ def test_authenticate_unit_success(MockSupplier, MockAwsCreds, MockSession): # --- System Test (Integration) --- + def test_authenticate_system(): """ System test that runs against the real API. diff --git a/auth/custom-credentials/okta/snippets.py b/auth/custom-credentials/okta/snippets.py index 82166f5cd96..d85e319eb4d 100644 --- a/auth/custom-credentials/okta/snippets.py +++ b/auth/custom-credentials/okta/snippets.py @@ -101,9 +101,7 @@ def main(): with open("custom-credentials-okta-secrets.json") as f: secrets = json.load(f) except FileNotFoundError: - print( - "Could not find custom-credentials-okta-secrets.json." - ) + print("Could not find custom-credentials-okta-secrets.json.") return gcp_audience = secrets.get("gcp_workload_audience") diff --git a/auth/custom-credentials/okta/snippets_test.py b/auth/custom-credentials/okta/snippets_test.py index 539f33150cc..1f05c4ad7bf 100644 --- a/auth/custom-credentials/okta/snippets_test.py +++ b/auth/custom-credentials/okta/snippets_test.py @@ -98,6 +98,7 @@ def test_authenticate_unit_success(MockSupplier, MockCreds, MockSession): # --- System Test --- + def test_authenticate_system(): """ System test that runs against the real API. From 613bf33473ac57a8c65c9cc2bac570e800649029 Mon Sep 17 00:00:00 2001 From: Pranav Iyer Date: Fri, 12 Dec 2025 16:37:44 -0800 Subject: [PATCH 22/23] Changed default scopes. --- auth/custom-credentials/aws/snippets.py | 2 +- auth/custom-credentials/okta/snippets.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/auth/custom-credentials/aws/snippets.py b/auth/custom-credentials/aws/snippets.py index f96daf9176b..49617a00029 100644 --- a/auth/custom-credentials/aws/snippets.py +++ b/auth/custom-credentials/aws/snippets.py @@ -78,7 +78,7 @@ def authenticate_with_aws_credentials(bucket_name, audience, impersonation_url=N subject_token_type="urn:ietf:params:aws:token-type:aws4_request", service_account_impersonation_url=impersonation_url, aws_security_credentials_supplier=custom_supplier, - scopes=["https://www.googleapis.com/auth/devstorage.read_write"], + scopes=["https://www.googleapis.com/auth/devstorage.read_only"], ) storage_client = storage.Client(credentials=credentials) diff --git a/auth/custom-credentials/okta/snippets.py b/auth/custom-credentials/okta/snippets.py index d85e319eb4d..02af2dadc93 100644 --- a/auth/custom-credentials/okta/snippets.py +++ b/auth/custom-credentials/okta/snippets.py @@ -82,7 +82,7 @@ def authenticate_with_okta_credentials( subject_token_type="urn:ietf:params:oauth:token-type:jwt", token_url="https://sts.googleapis.com/v1/token", subject_token_supplier=okta_supplier, - default_scopes=["https://www.googleapis.com/auth/devstorage.read_write"], + default_scopes=["https://www.googleapis.com/auth/devstorage.read_only"], service_account_impersonation_url=impersonation_url, ) From 79ee7513bf45d3f4c5ce33b31aa888d492209422 Mon Sep 17 00:00:00 2001 From: Pranav Iyer Date: Fri, 12 Dec 2025 16:58:12 -0800 Subject: [PATCH 23/23] Fixed PR Build run fixes. --- auth/custom-credentials/aws/requirements.txt | 1 + auth/custom-credentials/aws/snippets.py | 1 + auth/custom-credentials/okta/requirements.txt | 1 + 3 files changed, 3 insertions(+) diff --git a/auth/custom-credentials/aws/requirements.txt b/auth/custom-credentials/aws/requirements.txt index 4a091fb39fb..2c302888ed7 100644 --- a/auth/custom-credentials/aws/requirements.txt +++ b/auth/custom-credentials/aws/requirements.txt @@ -1,4 +1,5 @@ boto3==1.40.53 google-auth==2.43.0 +google-cloud-storage==2.19.0 python-dotenv==1.1.1 requests==2.32.3 diff --git a/auth/custom-credentials/aws/snippets.py b/auth/custom-credentials/aws/snippets.py index 49617a00029..2d77a123015 100644 --- a/auth/custom-credentials/aws/snippets.py +++ b/auth/custom-credentials/aws/snippets.py @@ -14,6 +14,7 @@ # [START auth_custom_credential_supplier_aws] import json import os +import sys import boto3 from google.auth import aws diff --git a/auth/custom-credentials/okta/requirements.txt b/auth/custom-credentials/okta/requirements.txt index c40de273b71..d9669ebee9f 100644 --- a/auth/custom-credentials/okta/requirements.txt +++ b/auth/custom-credentials/okta/requirements.txt @@ -1,3 +1,4 @@ requests==2.32.3 +google-cloud-storage==2.19.0 google-auth==2.43.0 python-dotenv==1.1.1