diff --git a/docs/guides/psoxy-test-tool.md b/docs/guides/psoxy-test-tool.md index dd73d801b..56f95a7d7 100644 --- a/docs/guides/psoxy-test-tool.md +++ b/docs/guides/psoxy-test-tool.md @@ -47,6 +47,24 @@ node cli-call.js -u https://us-central1-acme.cloudfunctions.net/outlook-cal/v1.0 (*) You can obtain it by running `gcloud auth print-identity-token` (using [Google Cloud SDK]) +### End-to-End Verification (Webhook Collection) + +For Webhook Collection testing, you can use the tool to verify that the data was successfully collected and written to the expected bucket. + +```shell +node cli-call.js -u https://us-central1-acme.cloudfunctions.net/webhook-collector --method POST --body '{...}' --verify-collection my-output-bucket +``` + +This will: +1. Make the POST request to the webhook collector. +2. In GCP case, trigger the associated Cloud Scheduler job processing the batch (GCP). +3. Poll the specified bucket until the output file appears (up to 60s). +4. Verify that the content of the output file matches the uploaded data. + +**Options:** +* `--verify-collection `: Enables verification mode and specifies the target bucket. +* `--scheduler-job `: (only for GCP case) Specify the Cloud Scheduler job that batch-processes pending webhooks from the pubsub topic. + ### Psoxy Test Call: Health Check option Use the `--health-check` option to check if your deploy is correctly configured: diff --git a/infra/modules/aws-host/main.tf b/infra/modules/aws-host/main.tf index 54969b165..a37f3630b 100644 --- a/infra/modules/aws-host/main.tf +++ b/infra/modules/aws-host/main.tf @@ -388,6 +388,17 @@ resource "aws_iam_policy" "invoke_webhook_collector_urls" { ], "Effect" : "Allow", "Resource" : flatten([for k, v in module.webhook_collectors : v.provisioned_auth_key_pairs]) + }, + { # allow test caller to read from sanitized output buckets to verify collection + "Action" : [ + "s3:ListBucket", + "s3:GetObject" + ], + "Effect" : "Allow", + "Resource" : flatten([for k, v in module.webhook_collectors : [ + "arn:aws:s3:::${v.output_sanitized_bucket_id}", + "arn:aws:s3:::${v.output_sanitized_bucket_id}/*" + ]]) } ] } diff --git a/infra/modules/aws-webhook-collector/main.tf b/infra/modules/aws-webhook-collector/main.tf index 1768302bf..b7e6b9bcc 100644 --- a/infra/modules/aws-webhook-collector/main.tf +++ b/infra/modules/aws-webhook-collector/main.tf @@ -383,6 +383,7 @@ locals { example_payload = coalesce(var.example_payload, "{\"test\": \"data\"}") example_identity = var.example_identity collection_path = local.collection_path + sanitized_bucket_name = module.sanitized_output.bucket_id }) } diff --git a/infra/modules/aws-webhook-collector/test_script.tftpl b/infra/modules/aws-webhook-collector/test_script.tftpl index 8715e93ac..d647283a2 100644 --- a/infra/modules/aws-webhook-collector/test_script.tftpl +++ b/infra/modules/aws-webhook-collector/test_script.tftpl @@ -15,6 +15,7 @@ ${command_cli_call} -u "${collector_endpoint_url}${collection_path}" --method PO %{ if example_identity != null ~} --identity-subject '${example_identity}' \ %{ endif ~} +--verify-collection "${sanitized_bucket_name}" \ --body '${example_payload}' COLLECTION_RC=$? diff --git a/infra/modules/gcp-host/main.tf b/infra/modules/gcp-host/main.tf index 4c8f1c846..5d8f60d53 100644 --- a/infra/modules/gcp-host/main.tf +++ b/infra/modules/gcp-host/main.tf @@ -509,6 +509,12 @@ echo "Testing Bulk Connectors ..." %{for test_script in values(module.bulk_connector)[*].test_script~} ./${test_script} %{endfor} + +echo "Testing Webhook Collectors ..." + +%{for test_script in values(module.webhook_collector)[*].test_script~} +./${test_script} +%{endfor} EOF } diff --git a/infra/modules/gcp-webhook-collector/main.tf b/infra/modules/gcp-webhook-collector/main.tf index ee9570257..2428e32cc 100644 --- a/infra/modules/gcp-webhook-collector/main.tf +++ b/infra/modules/gcp-webhook-collector/main.tf @@ -235,6 +235,9 @@ locals { secrets_to_grant_access_to = { AUTH_ISSUER = { secret_id = module.auth_issuer_secret.secret_ids_within_project["AUTH_ISSUER"] + }, + SERVICE_URL = { + secret_id = module.auth_issuer_secret.secret_ids_within_project["SERVICE_URL"] } } } @@ -501,6 +504,9 @@ resource "local_file" "test_script" { example_payload = coalesce(var.example_payload, "{\"test\": \"data\"}") example_identity = var.example_identity collection_path = "/" + scheduler_job_name = google_cloud_scheduler_job.trigger_batch_processing.id + bucket_name = module.sanitized_webhook_output.bucket_name + output_path_prefix = var.output_path_prefix }) } diff --git a/infra/modules/gcp-webhook-collector/test_script.tftpl b/infra/modules/gcp-webhook-collector/test_script.tftpl index d41e15362..a1417a2a2 100644 --- a/infra/modules/gcp-webhook-collector/test_script.tftpl +++ b/infra/modules/gcp-webhook-collector/test_script.tftpl @@ -15,6 +15,8 @@ ${command_cli_call} -u "${collector_endpoint_url}${collection_path}" --method PO %{ if example_identity != null ~} --identity-subject '${example_identity}' \ %{ endif ~} +--verify-collection "${bucket_name}" \ +--scheduler-job "${scheduler_job_name}" \ --body '${example_payload}' COLLECTION_RC=$? diff --git a/tools/psoxy-test/cli-call.js b/tools/psoxy-test/cli-call.js index 8f335d4ae..d8a20da4e 100644 --- a/tools/psoxy-test/cli-call.js +++ b/tools/psoxy-test/cli-call.js @@ -4,6 +4,7 @@ import { Command, Option } from 'commander'; import _ from 'lodash'; import { createRequire } from 'module'; import { callDataSourceEndpoints } from './data-sources/runner.js'; +import gcp from './lib/gcp.js'; import getLogger from './lib/logger.js'; import psoxyTestCall from './psoxy-test-call.js'; @@ -39,6 +40,8 @@ const AWS_ACCESS_DENIED_EXCEPTION_REGEXP = new RegExp(/(?arn:aws:iam::\d+:\ .option('--request-no-response', "Request 'No response body' back from proxy (tests side-output case)", false) .option('--async', 'Process request asynchronously (adds X-Psoxy-Process-Async header)', false) .option('-b, --body ', 'Body to send in request (it expects a JSON string)') + .option('--verify-collection ', 'Verify that the posted data appears in the specified bucket (GCS/S3)') + .option('--scheduler-job ', 'GCP: Cloud Scheduler job name to trigger batch processing') .addOption(new Option('-d, --data-source ', 'Data source to test all available endpoints').choices([ //TODO: pull this list from terraform console or something?? @@ -82,11 +85,41 @@ const AWS_ACCESS_DENIED_EXCEPTION_REGEXP = new RegExp(/(?arn:aws:iam::\d+:\ let result; try { + const startTime = Date.now(); if (options.dataSource) { result = await callDataSourceEndpoints(options); } else { result = await psoxyTestCall(options); } + + if (options.verifyCollection && result.status === 200) { + // Delegate based on cloud provider logic + const url = new URL(options.url); + + + const isGcp = options.force?.toLowerCase() === 'gcp' || gcp.isValidURL(url); + const isAws = options.force?.toLowerCase() === 'aws' || (!isGcp && (url.hostname.endsWith('amazonaws.com') || url.hostname.endsWith('on.aws'))); // rough check or rely on fallback + + if (isGcp) { + await gcp.verifyCollection({ + ...options, + bucketName: options.verifyCollection, + startTime: startTime + }, logger); + } else { + // Assume AWS or fallback + const aws = (await import('./lib/aws.js')).default; + await aws.verifyCollection({ + verifyCollection: options.verifyCollection, + url: options.url, + body: options.body, + startTime: startTime, + role: options.role, + region: options.region, + }, logger); + } + } + } catch (error) { if (error?.name === 'AccessDenied' && error.message && AWS_ACCESS_DENIED_EXCEPTION_REGEXP.test(error.message)) { diff --git a/tools/psoxy-test/lib/aws.js b/tools/psoxy-test/lib/aws.js index 6b4b150f0..c352523b7 100644 --- a/tools/psoxy-test/lib/aws.js +++ b/tools/psoxy-test/lib/aws.js @@ -1,26 +1,28 @@ import { - CloudWatchLogsClient, - DescribeLogStreamsCommand, - GetLogEventsCommand, + CloudWatchLogsClient, + DescribeLogStreamsCommand, + GetLogEventsCommand, } from '@aws-sdk/client-cloudwatch-logs'; import { - DeleteObjectCommand, - GetObjectCommand, - ListBucketsCommand, - ListObjectsV2Command, - PutObjectCommand, - S3Client + DeleteObjectCommand, + GetObjectCommand, + ListBucketsCommand, + ListObjectsV2Command, + PutObjectCommand, + S3Client } from '@aws-sdk/client-s3'; import { - executeWithRetry, - getAWSCredentials, - getCommonHTTPHeaders, - isGzipped, - request, - resolveAWSRegion, - resolveHTTPMethod, - signAWSRequestURL, - signJwtWithAWSKMS + compareContent, + executeWithRetry, + getAWSCredentials, + getCommonHTTPHeaders, + isGzipped, + request, + resolveAWSRegion, + resolveHTTPMethod, + signAWSRequestURL, + signJwtWithAWSKMS, + sleep, } from './utils.js'; import fs from 'fs'; @@ -374,6 +376,116 @@ async function deleteObject(bucket, key, options, client) { // BypassGovernanceRetention: true, })); } +/** + * Verifies that a file containing the expected content appears in the bucket after startTime. + * + * @param {Object} options + * @param {string} options.verifyCollection - bucket name + * @param {string} options.body - expected content + * @param {number} options.startTime - timestamp in ms + * @param {string} options.role + * @param {string} options.region + * @param {Object} logger + */ +async function verifyCollection(options, logger) { + const bucketName = options.verifyCollection; + const expectedContent = options.body; + const startTime = options.startTime; + const timeout = 90000; // 90 seconds + const pollInterval = 5000; // 5 seconds + const endTime = Date.now() + timeout; + + logger.info(`Verifying content in bucket: ${bucketName}. Will wait up to ${timeout / 1000}s`); + + const client = await createS3Client(options.role, options.region); + + while (Date.now() < endTime) { + const elapsed = Math.round((Date.now() - startTime) / 1000); + logger.info(`Waiting for content to appear in bucket... [${Math.max(0, elapsed)}s elapsed]`); + + // List objects + // We might want to list only recent objects or just list all and filter. + // AWS S3 ListObjectsV2 returns up to 1000 keys. + const command = new ListObjectsV2Command({ + Bucket: bucketName + }); + const response = await client.send(command); + + const files = response.Contents || []; + + // Filter by LastModified > startTime + const newFiles = files.filter(f => f.LastModified && new Date(f.LastModified).getTime() > startTime) + .sort((a, b) => new Date(b.LastModified).getTime() - new Date(a.LastModified).getTime()); + + if (newFiles.length > 0) { + const file = newFiles[0]; + logger.info(`New file found: ${file.Key} (Created: ${new Date(file.LastModified).toISOString()})`); + + // Download content + const getObjCmd = new GetObjectCommand({ + Bucket: bucketName, + Key: file.Key + }); + const getResponse = await client.send(getObjCmd); + + let contentStr = ''; + if (getResponse.Body) { + const chunks = []; + for await (const chunk of getResponse.Body) { + chunks.push(chunk); + } + const buffer = Buffer.concat(chunks); + + // Check for gzip + const isGzippedContent = (await isGzipped(buffer)) || getResponse.ContentEncoding === 'gzip'; + if (isGzippedContent) { + contentStr = (await new Promise((resolve, reject) => { + zlib.gunzip(buffer, (err, res) => { + if (err) reject(err); + else resolve(res); + }); + })).toString(); + } else { + contentStr = buffer.toString(); + } + } + + logger.info(`Found Content: ${contentStr}`); + + let items = []; + try { + const jsonContent = JSON.parse(contentStr); + if (Array.isArray(jsonContent)) { + items = jsonContent; + } else if (_.isPlainObject(jsonContent)) { + items = [jsonContent]; + } + } catch (e) { + logger.error(`Failed to parse file content: ${e.message}`); + throw new Error(`Verification failed: Invalid JSON in file ${file.Key}`); + } + + if (items.length > 0) { + const matchFound = compareContent(items, expectedContent, logger); + if (matchFound) { + logger.success(`Verification Successful: Content matches.`); + return; + } else { + logger.error(`Verification Failed: Content does not match.`); + throw new Error(`Verification failed: Content mismatch in file ${file.Key}`); + } + } else { + logger.warn(`File is empty or contains no items.`); + throw new Error(`Verification failed: Empty file ${file.Key}`); + } + } + + await sleep(pollInterval); + } + + logger.error('No new files found in bucket within timeout.'); + throw new Error('Verification failed: Expected content not found in bucket.'); +} export default { call, @@ -389,4 +501,5 @@ export default { listObjects, parseLogEvents, upload, + verifyCollection, } diff --git a/tools/psoxy-test/lib/gcp.js b/tools/psoxy-test/lib/gcp.js index d3350378b..45121bf3c 100644 --- a/tools/psoxy-test/lib/gcp.js +++ b/tools/psoxy-test/lib/gcp.js @@ -1,15 +1,19 @@ + import { Logging } from '@google-cloud/logging'; +import { CloudSchedulerClient } from '@google-cloud/scheduler'; import { Storage } from '@google-cloud/storage'; import _ from 'lodash'; import getLogger from './logger.js'; import { - executeCommand, - executeWithRetry, - getCommonHTTPHeaders, - isGzipped, - request, - resolveHTTPMethod, - signJwtWithGCPKMS, + compareContent, + executeCommand, + executeWithRetry, + getCommonHTTPHeaders, + isGzipped, + request, + resolveHTTPMethod, + signJwtWithGCPKMS, + sleep } from './utils.js'; @@ -112,14 +116,25 @@ async function call(options = {}) { * @return {Array} - array of serialized log entries */ async function getLogs(options = {}) { - const logging = new Logging(); - const log = logging.log('cloudfunctions.googleapis.com%2Fcloud-functions'); - const [entries] = await log.getEntries({ - filter: `resource.labels.function_name=${options.functionName}`, + const logging = new Logging({ projectId: options.projectId }); + + // Support both Gen 1 (cloud_function) and Gen 2 (cloud_run_revision) + // Gen 2 logs usually appear under run.googleapis.com/stdout or stderr, but resource.labels.service_name identify the function + const filter = ` + (resource.type="cloud_function" AND resource.labels.function_name="${options.functionName}") + OR + (resource.type="cloud_run_revision" AND resource.labels.service_name="${options.functionName}") + `; + + const [entries] = await logging.getEntries({ + filter: filter, resourceNames: [`projects/${options.projectId}`], - orderBy: 'timestamp asc', + orderBy: 'timestamp desc', // Get newest first + pageSize: 50, }); - return entries.map(entry => entry.toStructuredJSON()); + + // Return in chronological order + return entries.reverse().map(entry => entry.toStructuredJSON()); } /** @@ -195,7 +210,7 @@ function parseLogEntries(entries) { return []; } - // https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity + // https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry const LOG_LEVELS = ['WARNING', 'ERROR', 'CRITICAL', 'ALERT', 'EMERGENCY']; const dateRegex = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3}/ return entries.map(entry => { @@ -335,6 +350,122 @@ async function download(bucketName, fileName, destination, client, logger) { return downloadResponse; } +/** + * Triggers a Cloud Scheduler job to run immediately. + * + * @param {string} jobName + * @param {Object} logger + */ +async function triggerScheduler(jobName, logger) { + logger.info(`Triggering Cloud Scheduler job: ${jobName}`); + const client = new CloudSchedulerClient(); + const [response] = await client.runJob({ name: jobName }); + logger.success(`Cloud Scheduler job triggered: ${response.name}`); +} + +/** + * Verifies that a file containing the expected content appears in the bucket after startTime. + * + * @param {string} bucketName + * @param {string} expectedContent + * @param {number} startTime - timestamp in ms + * @param {Object} logger + */ +async function verifyBucket(bucketName, expectedContent, startTime, logger) { + const timeout = 60000; // 60 seconds + const pollInterval = 5000; // 5 seconds + const endTime = Date.now() + timeout; + + logger.info(`Verifying content in bucket: ${bucketName}. Will wait up to ${timeout / 1000}s`); + + const client = createStorageClient(); + const bucket = client.bucket(bucketName); + + while (Date.now() < endTime) { + const elapsed = Math.round((Date.now() - startTime) / 1000); // approx + logger.info(`Waiting for content to appear in bucket... [${Math.max(0, elapsed)}s elapsed]`); + + // Check for new files + const [files] = await bucket.getFiles(); + + // Sort files by creation time, newest first + const newFiles = files.filter(f => new Date(f.metadata.timeCreated).getTime() > startTime) + .sort((a, b) => new Date(b.metadata.timeCreated).getTime() - new Date(a.metadata.timeCreated).getTime()); + + if (newFiles.length > 0) { + // Stop polling as soon as we find a file (expecting only one) + const file = newFiles[0]; + logger.info(`New file found: ${file.name} (Created: ${new Date(file.metadata.timeCreated).toISOString()})`); + + const [content] = await file.download(); + const contentStr = content.toString(); + logger.info(`Found Content: ${contentStr}`); + + // Parse found content + let items = []; + try { + const jsonContent = JSON.parse(contentStr); + if (Array.isArray(jsonContent)) { + items = jsonContent; + } else if (_.isPlainObject(jsonContent)) { + items = [jsonContent]; + } + } catch (e) { + logger.error(`Failed to parse file content: ${e.message}`); + throw new Error(`Verification failed: Invalid JSON in file ${file.name}`); + } + + if (items.length > 0) { + const matchFound = compareContent(items, expectedContent, logger); + if (matchFound) { + logger.success(`Verification Successful: Content matches.`); + return; + } else { + logger.error(`Verification Failed: Content does not match.`); + throw new Error(`Verification failed: Content mismatch in file ${file.name}`); + } + } else { + logger.warn(`File is empty or contains no items.`); + throw new Error(`Verification failed: Empty file ${file.name}`); + } + } + + await sleep(pollInterval); + } + + logger.error('No new files found in bucket within timeout.'); + throw new Error('Verification failed: Expected content not found in bucket.'); +} + +/** + * End-to-end verification of webhook collection in GCP. + * + * @param {Object} options + * @param {string} options.verifyCollection - bucket name + * @param {string} options.schedulerJob - optional, job name + * @param {string} options.url - function URL + * @param {string} options.body - original POST body + * @param {number} options.startTime - timestamp when test started + * @param {Object} logger + */ + +async function verifyCollection(options, logger) { + const bucketName = options.verifyCollection; + + // 1. Trigger Scheduler + let jobName = options.schedulerJob; + + if (jobName) { + await triggerScheduler(jobName, logger); + } else { + logger.info('Skipping Cloud Scheduler trigger (no job name provided). Waiting for scheduled run...'); + } + + // 2. Verify Bucket + await verifyBucket(bucketName, options.body, options.startTime, logger); +} + + export default { call, createStorageClient, @@ -347,4 +478,6 @@ export default { parseLogEntries, listFilesMetadata, upload, + verifyCollection, }; + diff --git a/tools/psoxy-test/lib/utils.js b/tools/psoxy-test/lib/utils.js index 11f6904e0..35df8e647 100644 --- a/tools/psoxy-test/lib/utils.js +++ b/tools/psoxy-test/lib/utils.js @@ -1,8 +1,8 @@ import { KMSClient, SignCommand } from '@aws-sdk/client-kms'; import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3'; import { - fromNodeProviderChain, - fromTemporaryCredentials + fromNodeProviderChain, + fromTemporaryCredentials } from "@aws-sdk/credential-providers"; import { KeyManagementServiceClient } from '@google-cloud/kms'; import { Storage } from '@google-cloud/storage'; @@ -592,12 +592,15 @@ async function unzip(filePath) { } /** - * Check if file is gzipped - * @param {string} filePath + * Check if file or buffer is gzipped + * @param {string|Buffer} file - filePath or Buffer * @returns {boolean} */ -async function isGzipped(filePath) { - return isgzipBuffer(await fs.readFile(filePath)); +async function isGzipped(file) { + if (Buffer.isBuffer(file)) { + return isgzipBuffer(file); + } + return isgzipBuffer(await fs.readFile(file)); } /** @@ -808,21 +811,75 @@ async function pollAsyncResponse(locationUrl, options = {}) { throw new Error(`Timeout: File not available after ${maxAttempts * 10} seconds of polling`); } +/** + * Compare actual content items against expected content. + * + * @param {Array} items - Array of actual items found in the file + * @param {string} expectedContent - Expected JSON string + * @param {Object} logger - Logger instance + * @returns {boolean} + */ +function compareContent(items, expectedContent, logger) { + let expectedJson; + if (typeof expectedContent === 'string') { + expectedJson = JSON.parse(expectedContent); + } else { + expectedJson = expectedContent; + } + + const found = items.some(item => { + // 1. Try strict equality first + if (_.isEqual(item, expectedJson)) return true; + + // 2. Try relaxed equality (ignoring actor.id for pseudonymization) + const itemNoId = _.cloneDeep(item); + if (itemNoId.actor) delete itemNoId.actor.id; + + const expectedNoId = _.cloneDeep(expectedJson); + if (expectedNoId.actor) delete expectedNoId.actor.id; + + if (_.isEqual(itemNoId, expectedNoId)) { + logger.info('Match found with differing actor.id (likely pseudonymized)'); + return true; + } + + logger.info(`Comparison failed.\nActual (no ID): ${JSON.stringify(itemNoId)}\nExpected (no ID): ${JSON.stringify(expectedNoId)}`); + return false; + }); + + return found; +} + +/** + * Sleep for a given number of milliseconds + * @param {number} ms + * @returns {Promise} + */ +function sleep(ms) { + return new Promise(resolve => setTimeout(resolve, ms)); +} + export { - addFilenameSuffix, environmentCheck, - executeCommand, - executeWithRetry, - getAWSCredentials, - getCommonHTTPHeaders, - getFileNameFromURL, - isGzipped, - parseBucketOption, pollAsyncResponse, requestWrapper as request, - resolveAWSRegion, - resolveHTTPMethod, - saveToFile, - signAWSRequestURL, - signJwtWithAWSKMS, - signJwtWithGCPKMS, - transformSpecWithResponse, unzip + addFilenameSuffix, + compareContent, + environmentCheck, + executeCommand, + executeWithRetry, + getAWSCredentials, + getCommonHTTPHeaders, + getFileNameFromURL, + isGzipped, + parseBucketOption, + pollAsyncResponse, + requestWrapper as request, + resolveAWSRegion, + resolveHTTPMethod, + saveToFile, + signAWSRequestURL, + signJwtWithAWSKMS, + signJwtWithGCPKMS, + sleep, + transformSpecWithResponse, + unzip }; diff --git a/tools/psoxy-test/package-lock.json b/tools/psoxy-test/package-lock.json index aa43e31de..e48162fa6 100644 --- a/tools/psoxy-test/package-lock.json +++ b/tools/psoxy-test/package-lock.json @@ -15,6 +15,7 @@ "@aws-sdk/credential-providers": "^3.911.0", "@google-cloud/kms": "^5.2.1", "@google-cloud/logging": "^11.2.1", + "@google-cloud/scheduler": "^5.3.1", "@google-cloud/storage": "^7.17.2", "@stdlib/assert-is-gzip-buffer": "^0.2.2", "aws4": "^1.13.2", @@ -1258,6 +1259,18 @@ "node": ">=14" } }, + "node_modules/@google-cloud/scheduler": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/@google-cloud/scheduler/-/scheduler-5.3.1.tgz", + "integrity": "sha512-EGPTRRjMO4F/52HPVPVmcP4EUaONnISXo6VJP75QOQw9cMhg+yiruj8zb0BM4iUIY8+vOwlFv/VCHqrZ8gj6bw==", + "license": "Apache-2.0", + "dependencies": { + "google-gax": "^5.0.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/@google-cloud/storage": { "version": "7.17.2", "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.17.2.tgz", @@ -2923,7 +2936,6 @@ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", - "peer": true, "bin": { "acorn": "bin/acorn" }, diff --git a/tools/psoxy-test/package.json b/tools/psoxy-test/package.json index 46ea9b04e..f4fd9c12f 100644 --- a/tools/psoxy-test/package.json +++ b/tools/psoxy-test/package.json @@ -25,6 +25,7 @@ "@aws-sdk/credential-providers": "^3.911.0", "@google-cloud/kms": "^5.2.1", "@google-cloud/logging": "^11.2.1", + "@google-cloud/scheduler": "^5.3.1", "@google-cloud/storage": "^7.17.2", "@stdlib/assert-is-gzip-buffer": "^0.2.2", "aws4": "^1.13.2", diff --git a/tools/psoxy-test/test/compare-content.test.js b/tools/psoxy-test/test/compare-content.test.js new file mode 100644 index 000000000..61b5efbdb --- /dev/null +++ b/tools/psoxy-test/test/compare-content.test.js @@ -0,0 +1,43 @@ +import test from 'ava'; +import * as td from 'testdouble'; +import { compareContent } from '../lib/utils.js'; + +test('compareContent: strict equality', (t) => { + const logger = td.object({ info: () => {}, error: () => {}, success: () => {} }); + const items = [{ id: 1, name: 'foo' }]; + const expected = JSON.stringify({ id: 1, name: 'foo' }); + + t.true(compareContent(items, expected, logger)); +}); + +test('compareContent: pseudonymized equality (ignore actor.id)', (t) => { + const logger = td.object({ info: () => {}, error: () => {}, success: () => {} }); + const items = [{ actor: { id: 'generated-id', name: 'user' }, action: 'login' }]; + const expected = JSON.stringify({ actor: { id: 'original-id', name: 'user' }, action: 'login' }); + + t.true(compareContent(items, expected, logger)); +}); + +test('compareContent: mismatch', (t) => { + const logger = td.object({ info: () => {}, error: () => {}, success: () => {} }); + const items = [{ id: 1, name: 'foo' }]; + const expected = JSON.stringify({ id: 1, name: 'bar' }); + + t.false(compareContent(items, expected, logger)); +}); + +test('compareContent: empty items', (t) => { + const logger = td.object({ info: () => {}, error: () => {}, success: () => {} }); + const items = []; + const expected = JSON.stringify({ id: 1 }); + + t.false(compareContent(items, expected, logger)); +}); + +test('compareContent: expectedContent as object', (t) => { + const logger = td.object({ info: () => {}, error: () => {}, success: () => {} }); + const items = [{ id: 1, name: 'foo' }]; + const expected = { id: 1, name: 'foo' }; + + t.true(compareContent(items, expected, logger)); +});