From 37ac682406ad27383b6829ecab40ecd6ed0ee46c Mon Sep 17 00:00:00 2001 From: Erik Schultink Date: Tue, 20 Jan 2026 21:38:42 -0800 Subject: [PATCH 1/8] inc webhook collectors in gcp test-all.sh --- infra/modules/gcp-host/main.tf | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/infra/modules/gcp-host/main.tf b/infra/modules/gcp-host/main.tf index 4c8f1c846..5d8f60d53 100644 --- a/infra/modules/gcp-host/main.tf +++ b/infra/modules/gcp-host/main.tf @@ -509,6 +509,12 @@ echo "Testing Bulk Connectors ..." %{for test_script in values(module.bulk_connector)[*].test_script~} ./${test_script} %{endfor} + +echo "Testing Webhook Collectors ..." + +%{for test_script in values(module.webhook_collector)[*].test_script~} +./${test_script} +%{endfor} EOF } From 3bd0d862a1cd8c77179e9e32c4b50669236969a8 Mon Sep 17 00:00:00 2001 From: Erik Schultink Date: Wed, 21 Jan 2026 10:41:14 -0800 Subject: [PATCH 2/8] improve gcp testing of webhooks, to actually do an end-to-end --- infra/modules/gcp-webhook-collector/main.tf | 6 + .../gcp-webhook-collector/test_script.tftpl | 2 + tools/psoxy-test/cli-call.js | 30 +++ tools/psoxy-test/lib/gcp.js | 228 ++++++++++++++++-- tools/psoxy-test/lib/utils.js | 42 ++-- tools/psoxy-test/package-lock.json | 13 + tools/psoxy-test/package.json | 1 + 7 files changed, 291 insertions(+), 31 deletions(-) diff --git a/infra/modules/gcp-webhook-collector/main.tf b/infra/modules/gcp-webhook-collector/main.tf index ee9570257..2428e32cc 100644 --- a/infra/modules/gcp-webhook-collector/main.tf +++ b/infra/modules/gcp-webhook-collector/main.tf @@ -235,6 +235,9 @@ locals { secrets_to_grant_access_to = { AUTH_ISSUER = { secret_id = module.auth_issuer_secret.secret_ids_within_project["AUTH_ISSUER"] + }, + SERVICE_URL = { + secret_id = module.auth_issuer_secret.secret_ids_within_project["SERVICE_URL"] } } } @@ -501,6 +504,9 @@ resource "local_file" "test_script" { example_payload = coalesce(var.example_payload, "{\"test\": \"data\"}") example_identity = var.example_identity collection_path = "/" + scheduler_job_name = google_cloud_scheduler_job.trigger_batch_processing.id + bucket_name = module.sanitized_webhook_output.bucket_name + output_path_prefix = var.output_path_prefix }) } diff --git a/infra/modules/gcp-webhook-collector/test_script.tftpl b/infra/modules/gcp-webhook-collector/test_script.tftpl index d41e15362..a1417a2a2 100644 --- a/infra/modules/gcp-webhook-collector/test_script.tftpl +++ b/infra/modules/gcp-webhook-collector/test_script.tftpl @@ -15,6 +15,8 @@ ${command_cli_call} -u "${collector_endpoint_url}${collection_path}" --method PO %{ if example_identity != null ~} --identity-subject '${example_identity}' \ %{ endif ~} +--verify-collection "${bucket_name}" \ +--scheduler-job "${scheduler_job_name}" \ --body '${example_payload}' COLLECTION_RC=$? diff --git a/tools/psoxy-test/cli-call.js b/tools/psoxy-test/cli-call.js index 8f335d4ae..a4b210d83 100644 --- a/tools/psoxy-test/cli-call.js +++ b/tools/psoxy-test/cli-call.js @@ -4,6 +4,7 @@ import { Command, Option } from 'commander'; import _ from 'lodash'; import { createRequire } from 'module'; import { callDataSourceEndpoints } from './data-sources/runner.js'; +import gcp from './lib/gcp.js'; import getLogger from './lib/logger.js'; import psoxyTestCall from './psoxy-test-call.js'; @@ -39,6 +40,8 @@ const AWS_ACCESS_DENIED_EXCEPTION_REGEXP = new RegExp(/(?arn:aws:iam::\d+:\ .option('--request-no-response', "Request 'No response body' back from proxy (tests side-output case)", false) .option('--async', 'Process request asynchronously (adds X-Psoxy-Process-Async header)', false) .option('-b, --body ', 'Body to send in request (it expects a JSON string)') + .option('--verify-collection ', 'Verify that the posted data appears in the specified bucket (GCS/S3)') + .option('--scheduler-job ', 'GCP: Cloud Scheduler job name to trigger batch processing') .addOption(new Option('-d, --data-source ', 'Data source to test all available endpoints').choices([ //TODO: pull this list from terraform console or something?? @@ -82,11 +85,38 @@ const AWS_ACCESS_DENIED_EXCEPTION_REGEXP = new RegExp(/(?arn:aws:iam::\d+:\ let result; try { + const startTime = Date.now(); if (options.dataSource) { result = await callDataSourceEndpoints(options); } else { result = await psoxyTestCall(options); } + + if (options.verifyCollection && result.status === 200) { + // TODO: delegate based on cloud provider + // For now, assuming GCP if gcp.isValidURL(options.url) or force=gcp + // We import gcp dynamically or just use the one we have if we refactor psoxy-test-call to return provider + // But psoxyTestCall is a default export. + + // Re-evaluating provider logic mostly duplicates psoxy-test-call logic + // Let's use the helper from psoxy-test-call.js imports, but we need to import them here if we want to use them directly. + // They are imported: `import gcp from './lib/gcp.js';` + + const url = new URL(options.url); + // Simple check + if (options.force === 'gcp' || gcp.isValidURL(url)) { + await gcp.verifyCollection({ + verifyCollection: options.verifyCollection, + schedulerJob: options.schedulerJob, + url: options.url, + body: options.body, + startTime: startTime + }, logger); + } else { + logger.warn('Verification only implemented for GCP currently.'); + } + } + } catch (error) { if (error?.name === 'AccessDenied' && error.message && AWS_ACCESS_DENIED_EXCEPTION_REGEXP.test(error.message)) { diff --git a/tools/psoxy-test/lib/gcp.js b/tools/psoxy-test/lib/gcp.js index d3350378b..9c6264e2c 100644 --- a/tools/psoxy-test/lib/gcp.js +++ b/tools/psoxy-test/lib/gcp.js @@ -1,15 +1,18 @@ + import { Logging } from '@google-cloud/logging'; +import { CloudSchedulerClient } from '@google-cloud/scheduler'; import { Storage } from '@google-cloud/storage'; import _ from 'lodash'; import getLogger from './logger.js'; import { - executeCommand, - executeWithRetry, - getCommonHTTPHeaders, - isGzipped, - request, - resolveHTTPMethod, - signJwtWithGCPKMS, + executeCommand, + executeWithRetry, + getCommonHTTPHeaders, + isGzipped, + request, + resolveHTTPMethod, + signJwtWithGCPKMS, + sleep, } from './utils.js'; @@ -112,14 +115,25 @@ async function call(options = {}) { * @return {Array} - array of serialized log entries */ async function getLogs(options = {}) { - const logging = new Logging(); - const log = logging.log('cloudfunctions.googleapis.com%2Fcloud-functions'); - const [entries] = await log.getEntries({ - filter: `resource.labels.function_name=${options.functionName}`, + const logging = new Logging({ projectId: options.projectId }); + + // Support both Gen 1 (cloud_function) and Gen 2 (cloud_run_revision) + // Gen 2 logs usually appear under run.googleapis.com/stdout or stderr, but resource.labels.service_name identify the function + const filter = ` + (resource.type="cloud_function" AND resource.labels.function_name="${options.functionName}") + OR + (resource.type="cloud_run_revision" AND resource.labels.service_name="${options.functionName}") + `; + + const [entries] = await logging.getEntries({ + filter: filter, resourceNames: [`projects/${options.projectId}`], - orderBy: 'timestamp asc', + orderBy: 'timestamp desc', // Get newest first + pageSize: 50, }); - return entries.map(entry => entry.toStructuredJSON()); + + // Return in chronological order + return entries.reverse().map(entry => entry.toStructuredJSON()); } /** @@ -195,7 +209,7 @@ function parseLogEntries(entries) { return []; } - // https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity + // https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list#LogEntry const LOG_LEVELS = ['WARNING', 'ERROR', 'CRITICAL', 'ALERT', 'EMERGENCY']; const dateRegex = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3}/ return entries.map(entry => { @@ -335,6 +349,190 @@ async function download(bucketName, fileName, destination, client, logger) { return downloadResponse; } +/** + * Triggers a Cloud Scheduler job to run immediately. + * + * @param {string} jobName + * @param {Object} logger + */ +async function triggerScheduler(jobName, logger) { + logger.info(`Triggering Cloud Scheduler job: ${jobName}`); + const client = new CloudSchedulerClient(); + const [response] = await client.runJob({ name: jobName }); + logger.success(`Cloud Scheduler job triggered: ${response.name}`); +} + +/** + * Verifies that a file containing the expected content appears in the bucket after startTime. + * + * @param {string} bucketName + * @param {string} expectedContent + * @param {number} startTime - timestamp in ms + * @param {Object} logger + */ +async function verifyBucket(bucketName, expectedContent, startTime, logger) { + const timeout = 60000; // 60 seconds + const pollInterval = 5000; // 5 seconds + const endTime = Date.now() + timeout; + + logger.info(`Verifying content in bucket: ${bucketName}. Will wait up to ${timeout / 1000}s`); + + const client = createStorageClient(); + const bucket = client.bucket(bucketName); + + while (Date.now() < endTime) { + const elapsed = Math.round((Date.now() - startTime) / 1000); // approx + logger.info(`Waiting for content to appear in bucket... [${Math.max(0, elapsed)}s elapsed]`); + + // Check for new files + const [files] = await bucket.getFiles(); + + // Sort files by creation time, newest first + const newFiles = files.filter(f => new Date(f.metadata.timeCreated).getTime() > startTime) + .sort((a, b) => new Date(b.metadata.timeCreated).getTime() - new Date(a.metadata.timeCreated).getTime()); + + if (newFiles.length > 0) { + // Stop polling as soon as we find a file (expecting only one) + const file = newFiles[0]; + logger.info(`New file found: ${file.name} (Created: ${new Date(file.metadata.timeCreated).toISOString()})`); + + const [content] = await file.download(); + const contentStr = content.toString(); + logger.info(`Found Content: ${contentStr}`); + + // Parse found content + let items = []; + try { + const jsonContent = JSON.parse(contentStr); + if (Array.isArray(jsonContent)) { + items = jsonContent; + } else if (_.isPlainObject(jsonContent)) { + items = [jsonContent]; + } + } catch (e) { + logger.error(`Failed to parse file content: ${e.message}`); + throw new Error(`Verification failed: Invalid JSON in file ${file.name}`); + } + + if (items.length > 0) { + let expectedJson; + if (typeof expectedContent === 'string') { + expectedJson = JSON.parse(expectedContent); + } else { + expectedJson = expectedContent; + } + + logger.info(`Expected Content: ${JSON.stringify(expectedJson)}`); + + const found = items.some(item => { + // 1. Try strict equality first + if (_.isEqual(item, expectedJson)) return true; + + // 2. Try relaxed equality (ignoring actor.id for pseudonymization) + const itemNoId = _.cloneDeep(item); + if (itemNoId.actor) delete itemNoId.actor.id; + + const expectedNoId = _.cloneDeep(expectedJson); + if (expectedNoId.actor) delete expectedNoId.actor.id; + + if (_.isEqual(itemNoId, expectedNoId)) { + logger.info('Match found with differing actor.id (likely pseudonymized)'); + return true; + } + + return false; + }); + + if (found) { + logger.success(`Verification Successful: Content matches.`); + return; + } else { + logger.error(`Verification Failed: Content does not match.`); + throw new Error(`Verification failed: Content mismatch in file ${file.name}`); + } + } else { + logger.warn(`File is empty or contains no items.`); + throw new Error(`Verification failed: Empty file ${file.name}`); + } + } + + await sleep(pollInterval); + } + + logger.error('No new files found in bucket within timeout.'); + throw new Error('Verification failed: Expected content not found in bucket.'); +} + +/** + * End-to-end verification of webhook collection in GCP. + * + * @param {Object} options + * @param {string} options.verifyCollection - bucket name + * @param {string} options.schedulerJob - optional, job name + * @param {string} options.url - function URL + * @param {string} options.body - original POST body + * @param {number} options.startTime - timestamp when test started + * @param {Object} logger + */ +async function verifyCollection(options, logger) { + const bucketName = options.verifyCollection; + + // 1. Trigger Scheduler + let jobName = options.schedulerJob; + if (!jobName) { + // Attempt to derive job name from function URL + // URL: https://REGION-PROJECT.cloudfunctions.net/FUNCTION_NAME or https://FUNCTION_NAME-HASH-REGION.a.run.app + // Job convention: ENVIRONMENT-INSTANCE-batch-processing + // This is tricky to derive perfectly without more info. + // However, if we follow the naming in terraform: + // function name: ${env_prefix}${instance_id} + // job name: ${env_prefix}${instance_id}-batch-processing + + // So we just need to extract function name from URL and append "-batch-processing" + + try { + const url = new URL(options.url); + let functionName = ''; + let region = ''; + let projectId = ''; + + if (isCloudFunctionGen2(url)) { + // https://psoxy-dev-erik-llm-portal-bovv3fr26q-uc.a.run.app + const re = /^(.+)-([a-z0-9]+)-([a-z]{2})\.a\.run\.app$/; + const parts = url.hostname.match(re); + if (parts) { + functionName = parts[1]; + // Map shortCode to region? Or we need region for job name construction? + // Job name is full resource name: projects/PROJECT/locations/REGION/jobs/JOB_NAME + // We don't have region or project easily available unless passed or inferred. + + // wait, TriggerScheduler takes a full job name. + // If we don't have it, we might fail. + // Let's rely on it being passed for now, or assume we can find it. + } + } + + if (functionName) { + // We need project and region to construct the full name. + // If we are running in a context where we can get project ID (e.g. gcloud config), maybe. + // But relying on user/terraform to pass it is safer. + } + } catch (e) { + logger.warn('Failed to derive scheduler job name.'); + } + } + + if (jobName) { + await triggerScheduler(jobName, logger); + } else { + logger.warn('Skipping Cloud Scheduler trigger (no job name provided). Waiting for scheduled run...'); + } + + // 2. Verify Bucket + await verifyBucket(bucketName, options.body, options.startTime, logger); +} + + export default { call, createStorageClient, @@ -347,4 +545,6 @@ export default { parseLogEntries, listFilesMetadata, upload, + verifyCollection, }; + diff --git a/tools/psoxy-test/lib/utils.js b/tools/psoxy-test/lib/utils.js index 11f6904e0..c606d554f 100644 --- a/tools/psoxy-test/lib/utils.js +++ b/tools/psoxy-test/lib/utils.js @@ -1,8 +1,8 @@ import { KMSClient, SignCommand } from '@aws-sdk/client-kms'; import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3'; import { - fromNodeProviderChain, - fromTemporaryCredentials + fromNodeProviderChain, + fromTemporaryCredentials } from "@aws-sdk/credential-providers"; import { KeyManagementServiceClient } from '@google-cloud/kms'; import { Storage } from '@google-cloud/storage'; @@ -808,21 +808,29 @@ async function pollAsyncResponse(locationUrl, options = {}) { throw new Error(`Timeout: File not available after ${maxAttempts * 10} seconds of polling`); } +/** + * Sleep for a given number of milliseconds + * @param {number} ms + * @returns {Promise} + */ +function sleep(ms) { + return new Promise(resolve => setTimeout(resolve, ms)); +} + export { - addFilenameSuffix, environmentCheck, - executeCommand, - executeWithRetry, - getAWSCredentials, - getCommonHTTPHeaders, - getFileNameFromURL, - isGzipped, - parseBucketOption, pollAsyncResponse, requestWrapper as request, - resolveAWSRegion, - resolveHTTPMethod, - saveToFile, - signAWSRequestURL, - signJwtWithAWSKMS, - signJwtWithGCPKMS, - transformSpecWithResponse, unzip + environmentCheck, executeCommand, + executeWithRetry, + getAWSCredentials, + getCommonHTTPHeaders, + getFileNameFromURL, + isGzipped, + pollAsyncResponse, + requestWrapper as request, + resolveAWSRegion, + resolveHTTPMethod, + saveToFile, + signAWSRequestURL, + signJwtWithAWSKMS, + signJwtWithGCPKMS, sleep, transformSpecWithResponse }; diff --git a/tools/psoxy-test/package-lock.json b/tools/psoxy-test/package-lock.json index aa43e31de..816382faa 100644 --- a/tools/psoxy-test/package-lock.json +++ b/tools/psoxy-test/package-lock.json @@ -15,6 +15,7 @@ "@aws-sdk/credential-providers": "^3.911.0", "@google-cloud/kms": "^5.2.1", "@google-cloud/logging": "^11.2.1", + "@google-cloud/scheduler": "^5.3.1", "@google-cloud/storage": "^7.17.2", "@stdlib/assert-is-gzip-buffer": "^0.2.2", "aws4": "^1.13.2", @@ -1258,6 +1259,18 @@ "node": ">=14" } }, + "node_modules/@google-cloud/scheduler": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/@google-cloud/scheduler/-/scheduler-5.3.1.tgz", + "integrity": "sha512-EGPTRRjMO4F/52HPVPVmcP4EUaONnISXo6VJP75QOQw9cMhg+yiruj8zb0BM4iUIY8+vOwlFv/VCHqrZ8gj6bw==", + "license": "Apache-2.0", + "dependencies": { + "google-gax": "^5.0.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/@google-cloud/storage": { "version": "7.17.2", "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.17.2.tgz", diff --git a/tools/psoxy-test/package.json b/tools/psoxy-test/package.json index 46ea9b04e..f4fd9c12f 100644 --- a/tools/psoxy-test/package.json +++ b/tools/psoxy-test/package.json @@ -25,6 +25,7 @@ "@aws-sdk/credential-providers": "^3.911.0", "@google-cloud/kms": "^5.2.1", "@google-cloud/logging": "^11.2.1", + "@google-cloud/scheduler": "^5.3.1", "@google-cloud/storage": "^7.17.2", "@stdlib/assert-is-gzip-buffer": "^0.2.2", "aws4": "^1.13.2", From f2c59b4c73772abafc1048037a033482cc0076fe Mon Sep 17 00:00:00 2001 From: Erik Schultink Date: Wed, 21 Jan 2026 10:53:42 -0800 Subject: [PATCH 3/8] document webhook collectiont testing --- docs/guides/psoxy-test-tool.md | 18 +++++++++++++ tools/psoxy-test/lib/gcp.js | 47 +++------------------------------- 2 files changed, 21 insertions(+), 44 deletions(-) diff --git a/docs/guides/psoxy-test-tool.md b/docs/guides/psoxy-test-tool.md index dd73d801b..8a4ce5aed 100644 --- a/docs/guides/psoxy-test-tool.md +++ b/docs/guides/psoxy-test-tool.md @@ -47,6 +47,24 @@ node cli-call.js -u https://us-central1-acme.cloudfunctions.net/outlook-cal/v1.0 (*) You can obtain it by running `gcloud auth print-identity-token` (using [Google Cloud SDK]) +### End-to-End Verification (Webhook Collection) + +For Webhook Collection testing, you can use the tool toto verify that the data was successfully collected and written to the expected bucket. + +```shell +node cli-call.js -u https://us-central1-acme.cloudfunctions.net/webhook-collector --method POST --body '{...}' --verify-collection my-output-bucket +``` + +This will: +1. Make the POST request to the webhook collector. +2. In GCP case, trigger the associated Cloud Scheduler job processing the batch (GCP). +3. Poll the specified bucket until the output file appears (up to 60s). +4. Verify that the content of the output file matches the uploaded data. + +**Options:** +* `--verify-collection `: Enables verification mode and specifies the target bucket. +* `--scheduler-job `: (only for GCP case) Specify the Cloud Scheduler job that batch-processes pending webhooks from the pubsub topic. + ### Psoxy Test Call: Health Check option Use the `--health-check` option to check if your deploy is correctly configured: diff --git a/tools/psoxy-test/lib/gcp.js b/tools/psoxy-test/lib/gcp.js index 9c6264e2c..9a47c6e06 100644 --- a/tools/psoxy-test/lib/gcp.js +++ b/tools/psoxy-test/lib/gcp.js @@ -474,58 +474,17 @@ async function verifyBucket(bucketName, expectedContent, startTime, logger) { * @param {number} options.startTime - timestamp when test started * @param {Object} logger */ + async function verifyCollection(options, logger) { const bucketName = options.verifyCollection; // 1. Trigger Scheduler let jobName = options.schedulerJob; - if (!jobName) { - // Attempt to derive job name from function URL - // URL: https://REGION-PROJECT.cloudfunctions.net/FUNCTION_NAME or https://FUNCTION_NAME-HASH-REGION.a.run.app - // Job convention: ENVIRONMENT-INSTANCE-batch-processing - // This is tricky to derive perfectly without more info. - // However, if we follow the naming in terraform: - // function name: ${env_prefix}${instance_id} - // job name: ${env_prefix}${instance_id}-batch-processing - - // So we just need to extract function name from URL and append "-batch-processing" - - try { - const url = new URL(options.url); - let functionName = ''; - let region = ''; - let projectId = ''; - - if (isCloudFunctionGen2(url)) { - // https://psoxy-dev-erik-llm-portal-bovv3fr26q-uc.a.run.app - const re = /^(.+)-([a-z0-9]+)-([a-z]{2})\.a\.run\.app$/; - const parts = url.hostname.match(re); - if (parts) { - functionName = parts[1]; - // Map shortCode to region? Or we need region for job name construction? - // Job name is full resource name: projects/PROJECT/locations/REGION/jobs/JOB_NAME - // We don't have region or project easily available unless passed or inferred. - - // wait, TriggerScheduler takes a full job name. - // If we don't have it, we might fail. - // Let's rely on it being passed for now, or assume we can find it. - } - } - - if (functionName) { - // We need project and region to construct the full name. - // If we are running in a context where we can get project ID (e.g. gcloud config), maybe. - // But relying on user/terraform to pass it is safer. - } - } catch (e) { - logger.warn('Failed to derive scheduler job name.'); - } - } - + if (jobName) { await triggerScheduler(jobName, logger); } else { - logger.warn('Skipping Cloud Scheduler trigger (no job name provided). Waiting for scheduled run...'); + logger.info('Skipping Cloud Scheduler trigger (no job name provided). Waiting for scheduled run...'); } // 2. Verify Bucket From 6a117f110f9bcc8de0943d6ebb88bcf0d5287e3e Mon Sep 17 00:00:00 2001 From: Erik Schultink Date: Wed, 21 Jan 2026 13:58:09 -0800 Subject: [PATCH 4/8] aws-case of testing webhooks --- infra/modules/aws-host/main.tf | 11 ++ infra/modules/aws-webhook-collector/main.tf | 1 + .../aws-webhook-collector/test_script.tftpl | 1 + tools/psoxy-test/cli-call.js | 48 +++--- tools/psoxy-test/lib/aws.js | 148 +++++++++++++++--- tools/psoxy-test/lib/gcp.js | 46 ++---- tools/psoxy-test/lib/utils.js | 58 ++++++- 7 files changed, 235 insertions(+), 78 deletions(-) diff --git a/infra/modules/aws-host/main.tf b/infra/modules/aws-host/main.tf index 54969b165..a37f3630b 100644 --- a/infra/modules/aws-host/main.tf +++ b/infra/modules/aws-host/main.tf @@ -388,6 +388,17 @@ resource "aws_iam_policy" "invoke_webhook_collector_urls" { ], "Effect" : "Allow", "Resource" : flatten([for k, v in module.webhook_collectors : v.provisioned_auth_key_pairs]) + }, + { # allow test caller to read from sanitized output buckets to verify collection + "Action" : [ + "s3:ListBucket", + "s3:GetObject" + ], + "Effect" : "Allow", + "Resource" : flatten([for k, v in module.webhook_collectors : [ + "arn:aws:s3:::${v.output_sanitized_bucket_id}", + "arn:aws:s3:::${v.output_sanitized_bucket_id}/*" + ]]) } ] } diff --git a/infra/modules/aws-webhook-collector/main.tf b/infra/modules/aws-webhook-collector/main.tf index 1768302bf..b7e6b9bcc 100644 --- a/infra/modules/aws-webhook-collector/main.tf +++ b/infra/modules/aws-webhook-collector/main.tf @@ -383,6 +383,7 @@ locals { example_payload = coalesce(var.example_payload, "{\"test\": \"data\"}") example_identity = var.example_identity collection_path = local.collection_path + sanitized_bucket_name = module.sanitized_output.bucket_id }) } diff --git a/infra/modules/aws-webhook-collector/test_script.tftpl b/infra/modules/aws-webhook-collector/test_script.tftpl index 8715e93ac..d647283a2 100644 --- a/infra/modules/aws-webhook-collector/test_script.tftpl +++ b/infra/modules/aws-webhook-collector/test_script.tftpl @@ -15,6 +15,7 @@ ${command_cli_call} -u "${collector_endpoint_url}${collection_path}" --method PO %{ if example_identity != null ~} --identity-subject '${example_identity}' \ %{ endif ~} +--verify-collection "${sanitized_bucket_name}" \ --body '${example_payload}' COLLECTION_RC=$? diff --git a/tools/psoxy-test/cli-call.js b/tools/psoxy-test/cli-call.js index a4b210d83..b393eee67 100644 --- a/tools/psoxy-test/cli-call.js +++ b/tools/psoxy-test/cli-call.js @@ -93,27 +93,39 @@ const AWS_ACCESS_DENIED_EXCEPTION_REGEXP = new RegExp(/(?arn:aws:iam::\d+:\ } if (options.verifyCollection && result.status === 200) { - // TODO: delegate based on cloud provider - // For now, assuming GCP if gcp.isValidURL(options.url) or force=gcp - // We import gcp dynamically or just use the one we have if we refactor psoxy-test-call to return provider - // But psoxyTestCall is a default export. + // Delegate based on cloud provider logic + const url = new URL(options.url); - // Re-evaluating provider logic mostly duplicates psoxy-test-call logic - // Let's use the helper from psoxy-test-call.js imports, but we need to import them here if we want to use them directly. - // They are imported: `import gcp from './lib/gcp.js';` + // Dynamically import aws.js for AWS verification to avoid circular deps or if beneficial + // But we didn't import aws at the top yet, let's look at imports. + // We only have `import gcp from './lib/gcp.js';` at top. + // We should probably import aws dynamically or if it's not imported at all. + // `cli-call.js` does NOT import aws.js. I should add `import aws from './lib/aws.js'` or dynamic import. + // I will use dynamic import here as a quick fix or updated it properly. + // But since replace_file_content targets specific lines, I am inside the block. - const url = new URL(options.url); - // Simple check - if (options.force === 'gcp' || gcp.isValidURL(url)) { - await gcp.verifyCollection({ - verifyCollection: options.verifyCollection, - schedulerJob: options.schedulerJob, - url: options.url, - body: options.body, - startTime: startTime - }, logger); + const isGcp = options.force === 'gcp' || gcp.isValidURL(url); + const isAws = options.force === 'aws' || (!isGcp && (url.hostname.endsWith('amazonaws.com') || url.hostname.endsWith('on.aws'))); // rough check or rely on fallback + + if (isGcp) { + await gcp.verifyCollection({ + verifyCollection: options.verifyCollection, + schedulerJob: options.schedulerJob, + url: options.url, + body: options.body, + startTime: startTime + }, logger); } else { - logger.warn('Verification only implemented for GCP currently.'); + // Assume AWS or fallback + const aws = (await import('./lib/aws.js')).default; + await aws.verifyCollection({ + verifyCollection: options.verifyCollection, + url: options.url, + body: options.body, + startTime: startTime, + role: options.role, + region: options.region, + }, logger); } } diff --git a/tools/psoxy-test/lib/aws.js b/tools/psoxy-test/lib/aws.js index 6b4b150f0..30a74c575 100644 --- a/tools/psoxy-test/lib/aws.js +++ b/tools/psoxy-test/lib/aws.js @@ -1,26 +1,27 @@ import { - CloudWatchLogsClient, - DescribeLogStreamsCommand, - GetLogEventsCommand, + CloudWatchLogsClient, + DescribeLogStreamsCommand, + GetLogEventsCommand, } from '@aws-sdk/client-cloudwatch-logs'; import { - DeleteObjectCommand, - GetObjectCommand, - ListBucketsCommand, - ListObjectsV2Command, - PutObjectCommand, - S3Client + GetObjectCommand, + ListBucketsCommand, + ListObjectsV2Command, + PutObjectCommand, + S3Client } from '@aws-sdk/client-s3'; import { - executeWithRetry, - getAWSCredentials, - getCommonHTTPHeaders, - isGzipped, - request, - resolveAWSRegion, - resolveHTTPMethod, - signAWSRequestURL, - signJwtWithAWSKMS + compareContent, + executeWithRetry, + getAWSCredentials, + getCommonHTTPHeaders, + isGzipped, + request, + resolveAWSRegion, + resolveHTTPMethod, + signAWSRequestURL, + signJwtWithAWSKMS, + sleep, } from './utils.js'; import fs from 'fs'; @@ -374,6 +375,116 @@ async function deleteObject(bucket, key, options, client) { // BypassGovernanceRetention: true, })); } +/** + * Verifies that a file containing the expected content appears in the bucket after startTime. + * + * @param {Object} options + * @param {string} options.verifyCollection - bucket name + * @param {string} options.body - expected content + * @param {number} options.startTime - timestamp in ms + * @param {string} options.role + * @param {string} options.region + * @param {Object} logger + */ +async function verifyCollection(options, logger) { + const bucketName = options.verifyCollection; + const expectedContent = options.body; + const startTime = options.startTime; + const timeout = 90000; // 90 seconds + const pollInterval = 5000; // 5 seconds + const endTime = Date.now() + timeout; + + logger.info(`Verifying content in bucket: ${bucketName}. Will wait up to ${timeout / 1000}s`); + + const client = await createS3Client(options.role, options.region); + + while (Date.now() < endTime) { + const elapsed = Math.round((Date.now() - startTime) / 1000); + logger.info(`Waiting for content to appear in bucket... [${Math.max(0, elapsed)}s elapsed]`); + + // List objects + // We might want to list only recent objects or just list all and filter. + // AWS S3 ListObjectsV2 returns up to 1000 keys. + const command = new ListObjectsV2Command({ + Bucket: bucketName + }); + const response = await client.send(command); + + const files = response.Contents || []; + + // Filter by LastModified > startTime + const newFiles = files.filter(f => f.LastModified && new Date(f.LastModified).getTime() > startTime) + .sort((a, b) => new Date(b.LastModified).getTime() - new Date(a.LastModified).getTime()); + + if (newFiles.length > 0) { + const file = newFiles[0]; + logger.info(`New file found: ${file.Key} (Created: ${new Date(file.LastModified).toISOString()})`); + + // Download content + const getObjCmd = new GetObjectCommand({ + Bucket: bucketName, + Key: file.Key + }); + const getResponse = await client.send(getObjCmd); + + let contentStr = ''; + if (getResponse.Body) { + const chunks = []; + for await (const chunk of getResponse.Body) { + chunks.push(chunk); + } + const buffer = Buffer.concat(chunks); + + // Check for gzip + const isGzippedContent = isGzipped(buffer) || getResponse.ContentEncoding === 'gzip'; + if (isGzippedContent) { + contentStr = (await new Promise((resolve, reject) => { + zlib.gunzip(buffer, (err, res) => { + if (err) reject(err); + else resolve(res); + }); + })).toString(); + } else { + contentStr = buffer.toString(); + } + } + + logger.info(`Found Content: ${contentStr}`); + + let items = []; + try { + const jsonContent = JSON.parse(contentStr); + if (Array.isArray(jsonContent)) { + items = jsonContent; + } else if (_.isPlainObject(jsonContent)) { + items = [jsonContent]; + } + } catch (e) { + logger.error(`Failed to parse file content: ${e.message}`); + throw new Error(`Verification failed: Invalid JSON in file ${file.Key}`); + } + + if (items.length > 0) { + const matchResult = compareContent(items, expectedContent, logger); + if (matchResult.found) { + logger.success(`Verification Successful: Content matches.`); + return; + } else { + logger.error(`Verification Failed: Content does not match.`); + throw new Error(`Verification failed: Content mismatch in file ${file.Key}`); + } + } else { + logger.warn(`File is empty or contains no items.`); + throw new Error(`Verification failed: Empty file ${file.Key}`); + } + } + + await sleep(pollInterval); + } + + logger.error('No new files found in bucket within timeout.'); + throw new Error('Verification failed: Expected content not found in bucket.'); +} export default { call, @@ -389,4 +500,5 @@ export default { listObjects, parseLogEvents, upload, + verifyCollection, } diff --git a/tools/psoxy-test/lib/gcp.js b/tools/psoxy-test/lib/gcp.js index 9a47c6e06..6cc32ee7d 100644 --- a/tools/psoxy-test/lib/gcp.js +++ b/tools/psoxy-test/lib/gcp.js @@ -5,6 +5,7 @@ import { Storage } from '@google-cloud/storage'; import _ from 'lodash'; import getLogger from './logger.js'; import { + compareContent, executeCommand, executeWithRetry, getCommonHTTPHeaders, @@ -12,7 +13,7 @@ import { request, resolveHTTPMethod, signJwtWithGCPKMS, - sleep, + sleep } from './utils.js'; @@ -415,41 +416,14 @@ async function verifyBucket(bucketName, expectedContent, startTime, logger) { } if (items.length > 0) { - let expectedJson; - if (typeof expectedContent === 'string') { - expectedJson = JSON.parse(expectedContent); - } else { - expectedJson = expectedContent; - } - - logger.info(`Expected Content: ${JSON.stringify(expectedJson)}`); - - const found = items.some(item => { - // 1. Try strict equality first - if (_.isEqual(item, expectedJson)) return true; - - // 2. Try relaxed equality (ignoring actor.id for pseudonymization) - const itemNoId = _.cloneDeep(item); - if (itemNoId.actor) delete itemNoId.actor.id; - - const expectedNoId = _.cloneDeep(expectedJson); - if (expectedNoId.actor) delete expectedNoId.actor.id; - - if (_.isEqual(itemNoId, expectedNoId)) { - logger.info('Match found with differing actor.id (likely pseudonymized)'); - return true; - } - - return false; - }); - - if (found) { - logger.success(`Verification Successful: Content matches.`); - return; - } else { - logger.error(`Verification Failed: Content does not match.`); - throw new Error(`Verification failed: Content mismatch in file ${file.name}`); - } + const matchResult = compareContent(items, expectedContent, logger); + if (matchResult.found) { + logger.success(`Verification Successful: Content matches.`); + return; + } else { + logger.error(`Verification Failed: Content does not match.`); + throw new Error(`Verification failed: Content mismatch in file ${file.name}`); + } } else { logger.warn(`File is empty or contains no items.`); throw new Error(`Verification failed: Empty file ${file.name}`); diff --git a/tools/psoxy-test/lib/utils.js b/tools/psoxy-test/lib/utils.js index c606d554f..f95aaf21d 100644 --- a/tools/psoxy-test/lib/utils.js +++ b/tools/psoxy-test/lib/utils.js @@ -592,12 +592,15 @@ async function unzip(filePath) { } /** - * Check if file is gzipped - * @param {string} filePath + * Check if file or buffer is gzipped + * @param {string|Buffer} file - filePath or Buffer * @returns {boolean} */ -async function isGzipped(filePath) { - return isgzipBuffer(await fs.readFile(filePath)); +async function isGzipped(file) { + if (Buffer.isBuffer(file)) { + return isgzipBuffer(file); + } + return isgzipBuffer(await fs.readFile(file)); } /** @@ -808,6 +811,45 @@ async function pollAsyncResponse(locationUrl, options = {}) { throw new Error(`Timeout: File not available after ${maxAttempts * 10} seconds of polling`); } +/** + * Compare actual content items against expected content. + * + * @param {Array} items - Array of actual items found in the file + * @param {string} expectedContent - Expected JSON string + * @param {Object} logger - Logger instance + * @returns {Object} - { found: boolean } + */ +function compareContent(items, expectedContent, logger) { + let expectedJson; + if (typeof expectedContent === 'string') { + expectedJson = JSON.parse(expectedContent); + } else { + expectedJson = expectedContent; + } + + const found = items.some(item => { + // 1. Try strict equality first + if (_.isEqual(item, expectedJson)) return true; + + // 2. Try relaxed equality (ignoring actor.id for pseudonymization) + const itemNoId = _.cloneDeep(item); + if (itemNoId.actor) delete itemNoId.actor.id; + + const expectedNoId = _.cloneDeep(expectedJson); + if (expectedNoId.actor) delete expectedNoId.actor.id; + + if (_.isEqual(itemNoId, expectedNoId)) { + logger.info('Match found with differing actor.id (likely pseudonymized)'); + return true; + } + + logger.info(`Comparison failed.\nActual (no ID): ${JSON.stringify(itemNoId)}\nExpected (no ID): ${JSON.stringify(expectedNoId)}`); + return false; + }); + + return { found }; +} + /** * Sleep for a given number of milliseconds * @param {number} ms @@ -818,7 +860,9 @@ function sleep(ms) { } export { - environmentCheck, executeCommand, + compareContent, + environmentCheck, + executeCommand, executeWithRetry, getAWSCredentials, getCommonHTTPHeaders, @@ -831,6 +875,8 @@ export { saveToFile, signAWSRequestURL, signJwtWithAWSKMS, - signJwtWithGCPKMS, sleep, transformSpecWithResponse + signJwtWithGCPKMS, + sleep, + transformSpecWithResponse }; From eba33fcf4c933607f3374ec843d6d907f098b707 Mon Sep 17 00:00:00 2001 From: Erik Schultink Date: Fri, 23 Jan 2026 13:45:28 -0800 Subject: [PATCH 5/8] Update docs/guides/psoxy-test-tool.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- docs/guides/psoxy-test-tool.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/guides/psoxy-test-tool.md b/docs/guides/psoxy-test-tool.md index 8a4ce5aed..56f95a7d7 100644 --- a/docs/guides/psoxy-test-tool.md +++ b/docs/guides/psoxy-test-tool.md @@ -49,7 +49,7 @@ node cli-call.js -u https://us-central1-acme.cloudfunctions.net/outlook-cal/v1.0 ### End-to-End Verification (Webhook Collection) -For Webhook Collection testing, you can use the tool toto verify that the data was successfully collected and written to the expected bucket. +For Webhook Collection testing, you can use the tool to verify that the data was successfully collected and written to the expected bucket. ```shell node cli-call.js -u https://us-central1-acme.cloudfunctions.net/webhook-collector --method POST --body '{...}' --verify-collection my-output-bucket From 61cef1cb8c0f14fc398b8eb820c2f1a7cc96e0d2 Mon Sep 17 00:00:00 2001 From: Erik Schultink Date: Fri, 23 Jan 2026 13:45:57 -0800 Subject: [PATCH 6/8] Update tools/psoxy-test/lib/gcp.js Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- tools/psoxy-test/lib/gcp.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/psoxy-test/lib/gcp.js b/tools/psoxy-test/lib/gcp.js index 6cc32ee7d..bfe88aeeb 100644 --- a/tools/psoxy-test/lib/gcp.js +++ b/tools/psoxy-test/lib/gcp.js @@ -210,7 +210,7 @@ function parseLogEntries(entries) { return []; } - // https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list#LogEntry + // https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry const LOG_LEVELS = ['WARNING', 'ERROR', 'CRITICAL', 'ALERT', 'EMERGENCY']; const dateRegex = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3}/ return entries.map(entry => { From 05f5bbc55c110e33d9631475c4b80ee71a3fac59 Mon Sep 17 00:00:00 2001 From: Erik Schultink Date: Fri, 23 Jan 2026 14:07:41 -0800 Subject: [PATCH 7/8] CR feedback --- tools/psoxy-test/cli-call.js | 19 +++----- tools/psoxy-test/lib/aws.js | 6 +-- tools/psoxy-test/lib/gcp.js | 4 +- tools/psoxy-test/lib/utils.js | 6 ++- tools/psoxy-test/package-lock.json | 1 - tools/psoxy-test/test/compare-content.test.js | 43 +++++++++++++++++++ 6 files changed, 57 insertions(+), 22 deletions(-) create mode 100644 tools/psoxy-test/test/compare-content.test.js diff --git a/tools/psoxy-test/cli-call.js b/tools/psoxy-test/cli-call.js index b393eee67..e4478c747 100644 --- a/tools/psoxy-test/cli-call.js +++ b/tools/psoxy-test/cli-call.js @@ -93,26 +93,17 @@ const AWS_ACCESS_DENIED_EXCEPTION_REGEXP = new RegExp(/(?arn:aws:iam::\d+:\ } if (options.verifyCollection && result.status === 200) { - // Delegate based on cloud provider logic - const url = new URL(options.url); - - // Dynamically import aws.js for AWS verification to avoid circular deps or if beneficial - // But we didn't import aws at the top yet, let's look at imports. - // We only have `import gcp from './lib/gcp.js';` at top. - // We should probably import aws dynamically or if it's not imported at all. - // `cli-call.js` does NOT import aws.js. I should add `import aws from './lib/aws.js'` or dynamic import. - // I will use dynamic import here as a quick fix or updated it properly. - // But since replace_file_content targets specific lines, I am inside the block. + // Delegate based on cloud provider logic + const url = new URL(options.url); + const isGcp = options.force === 'gcp' || gcp.isValidURL(url); const isAws = options.force === 'aws' || (!isGcp && (url.hostname.endsWith('amazonaws.com') || url.hostname.endsWith('on.aws'))); // rough check or rely on fallback if (isGcp) { await gcp.verifyCollection({ - verifyCollection: options.verifyCollection, - schedulerJob: options.schedulerJob, - url: options.url, - body: options.body, + ...options, + bucketName: options.verifyCollection, startTime: startTime }, logger); } else { diff --git a/tools/psoxy-test/lib/aws.js b/tools/psoxy-test/lib/aws.js index 30a74c575..c780b09da 100644 --- a/tools/psoxy-test/lib/aws.js +++ b/tools/psoxy-test/lib/aws.js @@ -436,7 +436,7 @@ async function verifyCollection(options, logger) { const buffer = Buffer.concat(chunks); // Check for gzip - const isGzippedContent = isGzipped(buffer) || getResponse.ContentEncoding === 'gzip'; + const isGzippedContent = (await isGzipped(buffer)) || getResponse.ContentEncoding === 'gzip'; if (isGzippedContent) { contentStr = (await new Promise((resolve, reject) => { zlib.gunzip(buffer, (err, res) => { @@ -465,8 +465,8 @@ async function verifyCollection(options, logger) { } if (items.length > 0) { - const matchResult = compareContent(items, expectedContent, logger); - if (matchResult.found) { + const matchFound = compareContent(items, expectedContent, logger); + if (matchFound) { logger.success(`Verification Successful: Content matches.`); return; } else { diff --git a/tools/psoxy-test/lib/gcp.js b/tools/psoxy-test/lib/gcp.js index bfe88aeeb..45121bf3c 100644 --- a/tools/psoxy-test/lib/gcp.js +++ b/tools/psoxy-test/lib/gcp.js @@ -416,8 +416,8 @@ async function verifyBucket(bucketName, expectedContent, startTime, logger) { } if (items.length > 0) { - const matchResult = compareContent(items, expectedContent, logger); - if (matchResult.found) { + const matchFound = compareContent(items, expectedContent, logger); + if (matchFound) { logger.success(`Verification Successful: Content matches.`); return; } else { diff --git a/tools/psoxy-test/lib/utils.js b/tools/psoxy-test/lib/utils.js index f95aaf21d..642277013 100644 --- a/tools/psoxy-test/lib/utils.js +++ b/tools/psoxy-test/lib/utils.js @@ -817,7 +817,7 @@ async function pollAsyncResponse(locationUrl, options = {}) { * @param {Array} items - Array of actual items found in the file * @param {string} expectedContent - Expected JSON string * @param {Object} logger - Logger instance - * @returns {Object} - { found: boolean } + * @returns {boolean} */ function compareContent(items, expectedContent, logger) { let expectedJson; @@ -847,7 +847,7 @@ function compareContent(items, expectedContent, logger) { return false; }); - return { found }; + return found; } /** @@ -860,6 +860,7 @@ function sleep(ms) { } export { + addFilenameSuffix, compareContent, environmentCheck, executeCommand, @@ -868,6 +869,7 @@ export { getCommonHTTPHeaders, getFileNameFromURL, isGzipped, + parseBucketOption, pollAsyncResponse, requestWrapper as request, resolveAWSRegion, diff --git a/tools/psoxy-test/package-lock.json b/tools/psoxy-test/package-lock.json index 816382faa..e48162fa6 100644 --- a/tools/psoxy-test/package-lock.json +++ b/tools/psoxy-test/package-lock.json @@ -2936,7 +2936,6 @@ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", - "peer": true, "bin": { "acorn": "bin/acorn" }, diff --git a/tools/psoxy-test/test/compare-content.test.js b/tools/psoxy-test/test/compare-content.test.js new file mode 100644 index 000000000..61b5efbdb --- /dev/null +++ b/tools/psoxy-test/test/compare-content.test.js @@ -0,0 +1,43 @@ +import test from 'ava'; +import * as td from 'testdouble'; +import { compareContent } from '../lib/utils.js'; + +test('compareContent: strict equality', (t) => { + const logger = td.object({ info: () => {}, error: () => {}, success: () => {} }); + const items = [{ id: 1, name: 'foo' }]; + const expected = JSON.stringify({ id: 1, name: 'foo' }); + + t.true(compareContent(items, expected, logger)); +}); + +test('compareContent: pseudonymized equality (ignore actor.id)', (t) => { + const logger = td.object({ info: () => {}, error: () => {}, success: () => {} }); + const items = [{ actor: { id: 'generated-id', name: 'user' }, action: 'login' }]; + const expected = JSON.stringify({ actor: { id: 'original-id', name: 'user' }, action: 'login' }); + + t.true(compareContent(items, expected, logger)); +}); + +test('compareContent: mismatch', (t) => { + const logger = td.object({ info: () => {}, error: () => {}, success: () => {} }); + const items = [{ id: 1, name: 'foo' }]; + const expected = JSON.stringify({ id: 1, name: 'bar' }); + + t.false(compareContent(items, expected, logger)); +}); + +test('compareContent: empty items', (t) => { + const logger = td.object({ info: () => {}, error: () => {}, success: () => {} }); + const items = []; + const expected = JSON.stringify({ id: 1 }); + + t.false(compareContent(items, expected, logger)); +}); + +test('compareContent: expectedContent as object', (t) => { + const logger = td.object({ info: () => {}, error: () => {}, success: () => {} }); + const items = [{ id: 1, name: 'foo' }]; + const expected = { id: 1, name: 'foo' }; + + t.true(compareContent(items, expected, logger)); +}); From 205bd2b85df8ff4f071de03d036c96e3a33310b1 Mon Sep 17 00:00:00 2001 From: Erik Schultink Date: Fri, 23 Jan 2026 14:20:55 -0800 Subject: [PATCH 8/8] more CR fixes --- tools/psoxy-test/cli-call.js | 4 ++-- tools/psoxy-test/lib/aws.js | 1 + tools/psoxy-test/lib/utils.js | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/tools/psoxy-test/cli-call.js b/tools/psoxy-test/cli-call.js index e4478c747..d8a20da4e 100644 --- a/tools/psoxy-test/cli-call.js +++ b/tools/psoxy-test/cli-call.js @@ -97,8 +97,8 @@ const AWS_ACCESS_DENIED_EXCEPTION_REGEXP = new RegExp(/(?arn:aws:iam::\d+:\ const url = new URL(options.url); - const isGcp = options.force === 'gcp' || gcp.isValidURL(url); - const isAws = options.force === 'aws' || (!isGcp && (url.hostname.endsWith('amazonaws.com') || url.hostname.endsWith('on.aws'))); // rough check or rely on fallback + const isGcp = options.force?.toLowerCase() === 'gcp' || gcp.isValidURL(url); + const isAws = options.force?.toLowerCase() === 'aws' || (!isGcp && (url.hostname.endsWith('amazonaws.com') || url.hostname.endsWith('on.aws'))); // rough check or rely on fallback if (isGcp) { await gcp.verifyCollection({ diff --git a/tools/psoxy-test/lib/aws.js b/tools/psoxy-test/lib/aws.js index c780b09da..c352523b7 100644 --- a/tools/psoxy-test/lib/aws.js +++ b/tools/psoxy-test/lib/aws.js @@ -4,6 +4,7 @@ import { GetLogEventsCommand, } from '@aws-sdk/client-cloudwatch-logs'; import { + DeleteObjectCommand, GetObjectCommand, ListBucketsCommand, ListObjectsV2Command, diff --git a/tools/psoxy-test/lib/utils.js b/tools/psoxy-test/lib/utils.js index 642277013..35df8e647 100644 --- a/tools/psoxy-test/lib/utils.js +++ b/tools/psoxy-test/lib/utils.js @@ -879,6 +879,7 @@ export { signJwtWithAWSKMS, signJwtWithGCPKMS, sleep, - transformSpecWithResponse + transformSpecWithResponse, + unzip };