diff --git a/conductor/archive/batch_tokenization_20260309/index.md b/conductor/archive/batch_tokenization_20260309/index.md new file mode 100644 index 0000000..e0fc731 --- /dev/null +++ b/conductor/archive/batch_tokenization_20260309/index.md @@ -0,0 +1,5 @@ +# Track batch_tokenization_20260309 Context + +- [Specification](./spec.md) +- [Implementation Plan](./plan.md) +- [Metadata](./metadata.json) diff --git a/conductor/archive/batch_tokenization_20260309/metadata.json b/conductor/archive/batch_tokenization_20260309/metadata.json new file mode 100644 index 0000000..214a75b --- /dev/null +++ b/conductor/archive/batch_tokenization_20260309/metadata.json @@ -0,0 +1,8 @@ +{ + "track_id": "batch_tokenization_20260309", + "type": "feature", + "status": "new", + "created_at": "2026-03-09T14:30:00Z", + "updated_at": "2026-03-09T14:30:00Z", + "description": "Add Batch Tokenize/Detokenize Endpoints (wrap existing single-item logic in a loop with transaction)" +} diff --git a/conductor/archive/batch_tokenization_20260309/plan.md b/conductor/archive/batch_tokenization_20260309/plan.md new file mode 100644 index 0000000..4c19828 --- /dev/null +++ b/conductor/archive/batch_tokenization_20260309/plan.md @@ -0,0 +1,49 @@ +# Implementation Plan: Batch Tokenize/Detokenize Endpoints + +This plan outlines the steps to implement batch tokenization and detokenization endpoints in the Secrets manager. + +## Phase 1: Domain and Repository Layer + +- [x] Task: Define Batch Tokenization Interfaces [e8a8cee] + - [ ] Add `CreateBatch` to `TokenRepository` interface in `internal/tokenization/domain/token.go`. + - [ ] Add `GetBatchByTokens` to `TokenRepository` interface in `internal/tokenization/domain/token.go`. +- [x] Task: Implement Batch Repository Methods (PostgreSQL) [517777c] + - [ ] Implement `CreateBatch` in `internal/tokenization/repository/postgresql/token_repository.go`. + - [ ] Implement `GetBatchByTokens` in `internal/tokenization/repository/postgresql/token_repository.go`. + - [ ] Write integration tests for these methods (tagged with `//go:build integration`). +- [x] Task: Implement Batch Repository Methods (MySQL) [cc03816] + - [ ] Implement `CreateBatch` in `internal/tokenization/repository/mysql/token_repository.go`. + - [ ] Implement `GetBatchByTokens` in `internal/tokenization/repository/mysql/token_repository.go`. + - [ ] Write integration tests for these methods (tagged with `//go:build integration`). +- [x] Task: Implement Batch Usecase Logic [191cb29] + - [ ] Add `TokenizeBatch` to `TokenizationUsecase` in `internal/tokenization/usecase/tokenization_usecase.go`. + - [ ] Add `DetokenizeBatch` to `TokenizationUsecase` in `internal/tokenization/usecase/tokenization_usecase.go`. + - [ ] Ensure both methods use `TxManager` for atomicity. + - [ ] Implement the loop over existing single-item logic. + - [ ] Write unit tests for the new usecase methods. +- [x] Task: Conductor - User Manual Verification 'Phase 1: Domain and Repository Layer' (Protocol in workflow.md) + +## Phase 2: HTTP Layer + +- [x] Task: Define Request/Response DTOs [cc85bfe] + - [ ] Create `TokenizeBatchRequest` and `TokenizeBatchResponse` in `internal/tokenization/http/dto.go` (or equivalent). + - [ ] Create `DetokenizeBatchRequest` and `DetokenizeBatchResponse` in `internal/tokenization/http/dto.go`. + - [ ] Implement validation rules (e.g., max 100 items). +- [x] Task: Implement HTTP Handlers [ee3290b] + - [ ] Implement `TokenizeBatch` handler in `internal/tokenization/http/tokenization_handler.go`. + - [ ] Implement `DetokenizeBatch` handler in `internal/tokenization/http/tokenization_handler.go`. + - [ ] Write unit tests for the new handlers in `internal/tokenization/http/tokenization_handler_test.go`. +- [x] Task: Register Routes [ee3290b] + - [ ] Add the new batch routes to the router in `internal/tokenization/http/tokenization_handler.go` (or `internal/app/di_tokenization.go`). +- [x] Task: Conductor - User Manual Verification 'Phase 2: HTTP Layer' (Protocol in workflow.md) + +## Phase 3: Documentation and Integration Testing + +- [x] Task: Update Integration Flow Tests [efa3c2c] + - [ ] Add batch operation test cases to `test/integration/tokenization_flow_test.go`. + - [ ] Verify atomicity by intentionally failing one item in a batch. +- [x] Task: Update OpenAPI Specification [fa50f71] + - [ ] Add the new batch endpoints to `docs/openapi.yaml`. +- [x] Task: Update Engine Documentation [9faab2e] + - [ ] Update `docs/engines/tokenization.md` with examples of batch requests and responses. +- [x] Task: Conductor - User Manual Verification 'Phase 3: Documentation and Integration Testing' (Protocol in workflow.md) diff --git a/conductor/archive/batch_tokenization_20260309/spec.md b/conductor/archive/batch_tokenization_20260309/spec.md new file mode 100644 index 0000000..442bfe0 --- /dev/null +++ b/conductor/archive/batch_tokenization_20260309/spec.md @@ -0,0 +1,39 @@ +# Specification: Batch Tokenize/Detokenize Endpoints + +## Overview +This track introduces batch processing capabilities to the Tokenization Engine. Currently, tokenization and detokenization are performed on a single item at a time. This feature will add new endpoints to allow clients to tokenize or detokenize multiple items in a single request, wrapped in a database transaction for atomicity. + +## Functional Requirements +- **New Endpoints:** + - `POST /v1/tokenization/keys/:name/tokenize-batch`: Batch tokenize a list of values using a named key. + - `POST /v1/tokenization/detokenize-batch`: Batch detokenize a list of tokens. +- **Batch Limit:** A configurable limit of 100 items per batch request will be enforced to ensure performance and prevent resource exhaustion. +- **Atomicity:** Both batch endpoints MUST be atomic. If any single item in the batch fails (e.g., validation error, database failure), the entire request MUST fail, and any database changes MUST be rolled back. +- **Request/Response Formats:** + - `tokenize-batch`: + - Request: `{"values": ["val1", "val2", ...]}` + - Response: `{"tokens": ["token1", "token2", ...]}` + - `detokenize-batch`: + - Request: `{"tokens": ["token1", "token2", ...]}` + - Response: `{"values": ["val1", "val2", ...]}` +- **Documentation:** + - Update `docs/engines/tokenization.md` to include batch operations. + - Update `docs/openapi.yaml` with the new endpoint definitions. + +## Non-Functional Requirements +- **Performance:** Batch processing should be more efficient than multiple single-item calls by reducing network round-trips and utilizing a single database transaction. +- **Security:** Standard capability validation (`tokenize` or `detokenize`) must be enforced for the batch operations. + +## Acceptance Criteria +- [ ] Clients can successfully tokenize up to 100 values in a single call. +- [ ] Clients can successfully detokenize up to 100 tokens in a single call. +- [ ] If any value in a `tokenize-batch` request is invalid, the entire request returns an error (400 Bad Request) and no tokens are created. +- [ ] If any token in a `detokenize-batch` request is invalid, the entire request returns an error (400 Bad Request) and no values are returned. +- [ ] The batch limit is enforced and returns a 400 Bad Request if exceeded. +- [ ] Unit tests cover new domain logic, usecase methods, and HTTP handlers. +- [ ] Integration tests in `test/integration/tokenization_flow_test.go` cover batch operations for both PostgreSQL and MySQL. +- [ ] Documentation (`docs/engines/tokenization.md`) and OpenAPI spec (`docs/openapi.yaml`) are updated. + +## Out of Scope +- Partial success/failure handling for batch requests. +- Asynchronous batch processing. diff --git a/conductor/product.md b/conductor/product.md index 1acc6b4..b2b3d5d 100644 --- a/conductor/product.md +++ b/conductor/product.md @@ -14,7 +14,7 @@ To provide a secure, developer-friendly, and lightweight secrets management plat ## Core Features - **Secret Management (Storage):** Versioned, envelope-encrypted storage with support for arbitrary key-value pairs and strict path validation. - **Transit Engine (EaaS):** On-the-fly encryption/decryption of application data without database storage. -- **Tokenization Engine:** Format-preserving tokens for sensitive data types like credit card numbers. +- **Tokenization Engine:** Format-preserving tokens for sensitive data types like credit card numbers, with support for atomic batch processing. - **Auth Token Revocation:** Immediate invalidation of authentication tokens (single or client-wide) with full state management. - **Client Secret Rotation:** Self-service and administrative rotation of client secrets with automatic auth token revocation. - **Audit Logs:** HMAC-signed audit trails capturing every access attempt and policy evaluation, with support for advanced filtering by client and date range. diff --git a/docs/engines/tokenization.md b/docs/engines/tokenization.md index f28a1a3..a2c05f1 100644 --- a/docs/engines/tokenization.md +++ b/docs/engines/tokenization.md @@ -74,6 +74,27 @@ Example response (`201 Created`): } ``` +### Tokenize Data (Batch) + +- **Endpoint**: `POST /v1/tokenization/keys/:name/tokenize-batch` +- **Capability**: `encrypt` +- **Body**: `items` (array of objects with `plaintext`, `metadata`, `ttl`). +- **Limit**: Maximum 100 items per batch. + +Generates tokens for multiple plaintext values in a single atomic operation. If any item fails (e.g., invalid format), the entire batch is rejected. + +```bash +curl -X POST http://localhost:8080/v1/tokenization/keys/payment-cards/tokenize-batch \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{ + "items": [ + { "plaintext": "NDUzMjAxNTExMjgzMDM2Ng==", "metadata": { "index": 1 } }, + { "plaintext": "NTQ5ODAxNTExMjgzMDM2Nw==", "metadata": { "index": 2 } } + ] + }' +``` + ### Detokenize Data - **Endpoint**: `POST /v1/tokenization/detokenize` @@ -91,6 +112,24 @@ Example response (`200 OK`): } ``` +### Detokenize Data (Batch) + +- **Endpoint**: `POST /v1/tokenization/detokenize-batch` +- **Capability**: `decrypt` +- **Body**: `{"tokens": ["string", "string"]}` +- **Limit**: Maximum 100 tokens per batch. + +Retrieves original plaintext values for multiple tokens in a single atomic operation. + +```bash +curl -X POST http://localhost:8080/v1/tokenization/detokenize-batch \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{ + "tokens": ["4532015112830366", "5498015112830367"] + }' +``` + ### Validate and Revoke - `POST /v1/tokenization/validate` (Capability: `read`) - Check if token is valid without returning plaintext. diff --git a/docs/openapi.yaml b/docs/openapi.yaml index 0fb7d79..d7e4579 100644 --- a/docs/openapi.yaml +++ b/docs/openapi.yaml @@ -911,6 +911,46 @@ paths: $ref: "#/components/responses/ValidationError" "429": $ref: "#/components/responses/TooManyRequests" + /v1/tokenization/keys/{name}/tokenize-batch: + post: + tags: [tokenization] + summary: Tokenize multiple plaintexts in batch + description: Generates tokens for multiple plaintext values in a single atomic operation. + security: + - bearerAuth: [] + parameters: + - name: name + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/TokenizeBatchRequest" + responses: + "201": + description: Tokens created + content: + application/json: + schema: + $ref: "#/components/schemas/TokenizeBatchResponse" + "401": + $ref: "#/components/responses/Unauthorized" + "403": + $ref: "#/components/responses/Forbidden" + "404": + description: Tokenization key not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "422": + $ref: "#/components/responses/ValidationError" + "429": + $ref: "#/components/responses/TooManyRequests" /v1/tokenization/detokenize: post: tags: [tokenization] @@ -944,6 +984,40 @@ paths: $ref: "#/components/responses/ValidationError" "429": $ref: "#/components/responses/TooManyRequests" + /v1/tokenization/detokenize-batch: + post: + tags: [tokenization] + summary: Detokenize multiple tokens in batch + description: Retrieves original plaintext values for multiple tokens in a single atomic operation. + security: + - bearerAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/DetokenizeBatchRequest" + responses: + "200": + description: Plaintexts resolved + content: + application/json: + schema: + $ref: "#/components/schemas/DetokenizeBatchResponse" + "401": + $ref: "#/components/responses/Unauthorized" + "403": + $ref: "#/components/responses/Forbidden" + "404": + description: One or more tokens not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "422": + $ref: "#/components/responses/ValidationError" + "429": + $ref: "#/components/responses/TooManyRequests" /v1/tokenization/validate: post: tags: [tokenization] @@ -1455,6 +1529,16 @@ components: type: integer minimum: 1 required: [plaintext] + TokenizeBatchRequest: + type: object + properties: + items: + type: array + minItems: 1 + maxItems: 100 + items: + $ref: "#/components/schemas/TokenizeRequest" + required: [items] TokenizeResponse: type: object properties: @@ -1471,12 +1555,30 @@ components: format: date-time nullable: true required: [token, created_at] + TokenizeBatchResponse: + type: object + properties: + items: + type: array + items: + $ref: "#/components/schemas/TokenizeResponse" + required: [items] DetokenizeRequest: type: object properties: token: type: string required: [token] + DetokenizeBatchRequest: + type: object + properties: + tokens: + type: array + minItems: 1 + maxItems: 100 + items: + type: string + required: [tokens] DetokenizeResponse: type: object properties: @@ -1487,6 +1589,14 @@ components: type: object additionalProperties: true required: [plaintext] + DetokenizeBatchResponse: + type: object + properties: + items: + type: array + items: + $ref: "#/components/schemas/DetokenizeResponse" + required: [items] ValidateTokenRequest: type: object properties: diff --git a/internal/app/di_tokenization.go b/internal/app/di_tokenization.go index 66a58f7..8aa1b07 100644 --- a/internal/app/di_tokenization.go +++ b/internal/app/di_tokenization.go @@ -249,6 +249,11 @@ func (c *Container) initTokenizationKeyUseCase( func (c *Container) initTokenizationUseCase( ctx context.Context, ) (tokenizationUseCase.TokenizationUseCase, error) { + txManager, err := c.TxManager(ctx) + if err != nil { + return nil, fmt.Errorf("failed to get tx manager for tokenization use case: %w", err) + } + tokenizationKeyRepository, err := c.TokenizationKeyRepository(ctx) if err != nil { return nil, fmt.Errorf( @@ -279,6 +284,7 @@ func (c *Container) initTokenizationUseCase( } baseUseCase := tokenizationUseCase.NewTokenizationUseCase( + txManager, tokenizationKeyRepository, tokenRepository, dekRepository, diff --git a/internal/http/server.go b/internal/http/server.go index f3a183c..beb440d 100644 --- a/internal/http/server.go +++ b/internal/http/server.go @@ -408,6 +408,12 @@ func (s *Server) registerTokenizationRoutes( authHTTP.AuthorizationMiddleware(authDomain.EncryptCapability, auditLogUseCase, s.logger), tokenizationHandler.TokenizeHandler, ) + + // Tokenize batch of plaintexts with tokenization key + keys.POST("/:name/tokenize-batch", + authHTTP.AuthorizationMiddleware(authDomain.EncryptCapability, auditLogUseCase, s.logger), + tokenizationHandler.TokenizeBatchHandler, + ) } // Detokenize token to retrieve plaintext @@ -416,6 +422,12 @@ func (s *Server) registerTokenizationRoutes( tokenizationHandler.DetokenizeHandler, ) + // Detokenize batch of tokens to retrieve plaintexts + tokenization.POST("/detokenize-batch", + authHTTP.AuthorizationMiddleware(authDomain.DecryptCapability, auditLogUseCase, s.logger), + tokenizationHandler.DetokenizeBatchHandler, + ) + // Validate token existence and validity tokenization.POST("/validate", authHTTP.AuthorizationMiddleware(authDomain.ReadCapability, auditLogUseCase, s.logger), diff --git a/internal/tokenization/http/dto/request.go b/internal/tokenization/http/dto/request.go index 94799da..99ab06a 100644 --- a/internal/tokenization/http/dto/request.go +++ b/internal/tokenization/http/dto/request.go @@ -84,6 +84,21 @@ func (r *TokenizeRequest) Validate() error { ) } +// TokenizeBatchRequest contains the parameters for tokenizing multiple values. +type TokenizeBatchRequest struct { + Items []TokenizeRequest `json:"items"` +} + +// Validate checks if the tokenize batch request is valid. +func (r *TokenizeBatchRequest) Validate() error { + return validation.ValidateStruct(r, + validation.Field(&r.Items, + validation.Required, + validation.Length(1, 100), + ), + ) +} + // DetokenizeRequest contains the parameters for detokenizing a value. type DetokenizeRequest struct { Token string `json:"token"` @@ -99,6 +114,22 @@ func (r *DetokenizeRequest) Validate() error { ) } +// DetokenizeBatchRequest contains the parameters for detokenizing multiple values. +type DetokenizeBatchRequest struct { + Tokens []string `json:"tokens"` +} + +// Validate checks if the detokenize batch request is valid. +func (r *DetokenizeBatchRequest) Validate() error { + return validation.ValidateStruct(r, + validation.Field(&r.Tokens, + validation.Required, + validation.Length(1, 100), + validation.Each(validation.Required, customValidation.NotBlank), + ), + ) +} + // ValidateTokenRequest contains the parameters for validating a token. type ValidateTokenRequest struct { Token string `json:"token"` diff --git a/internal/tokenization/http/dto/request_test.go b/internal/tokenization/http/dto/request_test.go index f3ccdeb..52cf0e6 100644 --- a/internal/tokenization/http/dto/request_test.go +++ b/internal/tokenization/http/dto/request_test.go @@ -477,3 +477,66 @@ func TestValidateAlgorithm(t *testing.T) { assert.Error(t, err) }) } + +func TestTokenizeBatchRequest_Validate(t *testing.T) { + t.Run("Success_ValidRequest", func(t *testing.T) { + req := TokenizeBatchRequest{ + Items: []TokenizeRequest{ + {Plaintext: "SGVsbG8="}, + {Plaintext: "V29ybGQ="}, + }, + } + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Error_EmptyItems", func(t *testing.T) { + req := TokenizeBatchRequest{ + Items: []TokenizeRequest{}, + } + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_TooManyItems", func(t *testing.T) { + req := TokenizeBatchRequest{ + Items: make([]TokenizeRequest, 101), + } + err := req.Validate() + assert.Error(t, err) + }) +} + +func TestDetokenizeBatchRequest_Validate(t *testing.T) { + t.Run("Success_ValidRequest", func(t *testing.T) { + req := DetokenizeBatchRequest{ + Tokens: []string{"t1", "t2"}, + } + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Error_EmptyTokens", func(t *testing.T) { + req := DetokenizeBatchRequest{ + Tokens: []string{}, + } + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_TooManyTokens", func(t *testing.T) { + req := DetokenizeBatchRequest{ + Tokens: make([]string, 101), + } + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_BlankTokenInBatch", func(t *testing.T) { + req := DetokenizeBatchRequest{ + Tokens: []string{"t1", " "}, + } + err := req.Validate() + assert.Error(t, err) + }) +} diff --git a/internal/tokenization/http/dto/response.go b/internal/tokenization/http/dto/response.go index 15ae86c..f7b4ab1 100644 --- a/internal/tokenization/http/dto/response.go +++ b/internal/tokenization/http/dto/response.go @@ -49,12 +49,46 @@ func MapTokenToTokenizeResponse(token *tokenizationDomain.Token) TokenizeRespons } } +// TokenizeBatchResponse represents the result of tokenizing multiple values. +type TokenizeBatchResponse struct { + Items []TokenizeResponse `json:"items"` +} + +// MapTokensToTokenizeBatchResponse converts multiple domain tokens to a tokenize batch API response. +func MapTokensToTokenizeBatchResponse(tokens []*tokenizationDomain.Token) TokenizeBatchResponse { + items := make([]TokenizeResponse, len(tokens)) + for i, token := range tokens { + items[i] = MapTokenToTokenizeResponse(token) + } + return TokenizeBatchResponse{Items: items} +} + // DetokenizeResponse represents the result of detokenizing a token. type DetokenizeResponse struct { Plaintext string `json:"plaintext"` // Base64-encoded plaintext Metadata map[string]any `json:"metadata,omitempty"` } +// DetokenizeBatchResponse represents the result of detokenizing multiple tokens. +type DetokenizeBatchResponse struct { + Items []DetokenizeResponse `json:"items"` +} + +// MapPlaintextsToDetokenizeBatchResponse converts multiple plaintexts and metadatas to a detokenize batch API response. +func MapPlaintextsToDetokenizeBatchResponse( + plaintexts []string, + metadatas []map[string]any, +) DetokenizeBatchResponse { + items := make([]DetokenizeResponse, len(plaintexts)) + for i := range plaintexts { + items[i] = DetokenizeResponse{ + Plaintext: plaintexts[i], + Metadata: metadatas[i], + } + } + return DetokenizeBatchResponse{Items: items} +} + // ValidateTokenResponse represents the result of validating a token. type ValidateTokenResponse struct { Valid bool `json:"valid"` diff --git a/internal/tokenization/http/dto/response_test.go b/internal/tokenization/http/dto/response_test.go index c88dc42..06a3908 100644 --- a/internal/tokenization/http/dto/response_test.go +++ b/internal/tokenization/http/dto/response_test.go @@ -258,3 +258,39 @@ func TestValidateTokenResponse(t *testing.T) { assert.False(t, response.Valid) }) } + +func TestMapTokensToTokenizeBatchResponse(t *testing.T) { + now := time.Now().UTC() + tokens := []*tokenizationDomain.Token{ + { + Token: "t1", + CreatedAt: now, + }, + { + Token: "t2", + CreatedAt: now, + }, + } + + response := MapTokensToTokenizeBatchResponse(tokens) + + assert.Len(t, response.Items, 2) + assert.Equal(t, "t1", response.Items[0].Token) + assert.Equal(t, "t2", response.Items[1].Token) +} + +func TestMapPlaintextsToDetokenizeBatchResponse(t *testing.T) { + plaintexts := []string{"p1", "p2"} + metadatas := []map[string]any{ + {"k1": "v1"}, + {"k2": "v2"}, + } + + response := MapPlaintextsToDetokenizeBatchResponse(plaintexts, metadatas) + + assert.Len(t, response.Items, 2) + assert.Equal(t, "p1", response.Items[0].Plaintext) + assert.Equal(t, "v1", response.Items[0].Metadata["k1"]) + assert.Equal(t, "p2", response.Items[1].Plaintext) + assert.Equal(t, "v2", response.Items[1].Metadata["k2"]) +} diff --git a/internal/tokenization/http/tokenization_handler.go b/internal/tokenization/http/tokenization_handler.go index 10274e1..2b35e34 100644 --- a/internal/tokenization/http/tokenization_handler.go +++ b/internal/tokenization/http/tokenization_handler.go @@ -97,6 +97,87 @@ func (h *TokenizationHandler) TokenizeHandler(c *gin.Context) { c.JSON(http.StatusCreated, response) } +// TokenizeBatchHandler generates tokens for multiple plaintext values using the named key. +// POST /v1/tokenization/keys/:name/tokenize-batch - Requires EncryptCapability. +// Wrapped in a transaction for atomicity. +// Returns 201 Created with a batch of tokens and metadata. +func (h *TokenizationHandler) TokenizeBatchHandler(c *gin.Context) { + var req dto.TokenizeBatchRequest + + // Parse and bind JSON + if err := c.ShouldBindJSON(&req); err != nil { + httputil.HandleBadRequestGin(c, err, h.logger) + return + } + + // Validate request + if err := req.Validate(); err != nil { + httputil.HandleValidationErrorGin(c, customValidation.WrapValidationError(err), h.logger) + return + } + + // Get key name from URL parameter + keyName := c.Param("name") + if keyName == "" { + httputil.HandleBadRequestGin(c, + fmt.Errorf("key name is required in URL path"), + h.logger) + return + } + + // Prepare data for use case + plaintexts := make([][]byte, len(req.Items)) + metadatas := make([]map[string]any, len(req.Items)) + var commonExpiresAt *time.Time + + for i, item := range req.Items { + // Decode base64 plaintext + plaintext, err := base64.StdEncoding.DecodeString(item.Plaintext) + if err != nil { + httputil.HandleBadRequestGin(c, + fmt.Errorf("item %d: plaintext must be valid base64", i), + h.logger) + return + } + plaintexts[i] = plaintext + + // Setup metadata + metadatas[i] = item.Metadata + + // Note: The usecase currently takes a single expiresAt for the batch. + // For simplicity, we'll use the TTL of the first item if provided. + // A more advanced implementation could support individual TTLs if the usecase is updated. + if i == 0 && item.TTL != nil { + expiry := time.Now().UTC().Add(time.Duration(*item.TTL) * time.Second) + commonExpiresAt = &expiry + } + } + + // SECURITY: Ensure plaintexts are zeroed after use + defer func() { + for _, p := range plaintexts { + cryptoDomain.Zero(p) + } + }() + + // Call use case + tokens, err := h.tokenizationUseCase.TokenizeBatch( + c.Request.Context(), + keyName, + plaintexts, + metadatas, + commonExpiresAt, + ) + if err != nil { + httputil.HandleErrorGin(c, err, h.logger) + return + } + + // Return response + response := dto.MapTokensToTokenizeBatchResponse(tokens) + c.JSON(http.StatusCreated, response) +} + // DetokenizeHandler retrieves the original plaintext value for a given token. // POST /v1/tokenization/detokenize - Requires DecryptCapability. // Returns 200 OK with base64-encoded plaintext and metadata. @@ -138,6 +219,53 @@ func (h *TokenizationHandler) DetokenizeHandler(c *gin.Context) { c.JSON(http.StatusOK, response) } +// DetokenizeBatchHandler retrieves original plaintext values for multiple tokens. +// POST /v1/tokenization/detokenize-batch - Requires DecryptCapability. +// Wrapped in a transaction for atomicity. +// Returns 200 OK with a batch of base64-encoded plaintexts and metadata. +func (h *TokenizationHandler) DetokenizeBatchHandler(c *gin.Context) { + var req dto.DetokenizeBatchRequest + + // Parse and bind JSON + if err := c.ShouldBindJSON(&req); err != nil { + httputil.HandleBadRequestGin(c, err, h.logger) + return + } + + // Validate request + if err := req.Validate(); err != nil { + httputil.HandleValidationErrorGin(c, customValidation.WrapValidationError(err), h.logger) + return + } + + // Call use case + plaintexts, metadatas, err := h.tokenizationUseCase.DetokenizeBatch( + c.Request.Context(), + req.Tokens, + ) + if err != nil { + httputil.HandleErrorGin(c, err, h.logger) + return + } + + // SECURITY: Ensure plaintexts are zeroed after encoding + defer func() { + for _, p := range plaintexts { + cryptoDomain.Zero(p) + } + }() + + // Encode plaintexts as base64 for JSON response + plaintextB64s := make([]string, len(plaintexts)) + for i, p := range plaintexts { + plaintextB64s[i] = base64.StdEncoding.EncodeToString(p) + } + + // Return response + response := dto.MapPlaintextsToDetokenizeBatchResponse(plaintextB64s, metadatas) + c.JSON(http.StatusOK, response) +} + // ValidateHandler checks if a token exists and is valid (not expired or revoked). // POST /v1/tokenization/validate - Requires ReadCapability. // Returns 200 OK with validation result. diff --git a/internal/tokenization/http/tokenization_handler_test.go b/internal/tokenization/http/tokenization_handler_test.go index 6b513c2..0520bdb 100644 --- a/internal/tokenization/http/tokenization_handler_test.go +++ b/internal/tokenization/http/tokenization_handler_test.go @@ -437,6 +437,105 @@ func TestTokenizationHandler_ValidateHandler(t *testing.T) { }) } +func TestTokenizationHandler_TokenizeBatchHandler(t *testing.T) { + t.Run("Success_TokenizeBatch", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + plaintext1 := []byte("p1") + plaintext2 := []byte("p2") + plaintext1B64 := base64.StdEncoding.EncodeToString(plaintext1) + plaintext2B64 := base64.StdEncoding.EncodeToString(plaintext2) + + request := dto.TokenizeBatchRequest{ + Items: []dto.TokenizeRequest{ + {Plaintext: plaintext1B64}, + {Plaintext: plaintext2B64}, + }, + } + + expectedTokens := []*tokenizationDomain.Token{ + {Token: "t1", CreatedAt: time.Now()}, + {Token: "t2", CreatedAt: time.Now()}, + } + + mockUseCase.EXPECT(). + TokenizeBatch(mock.Anything, "test-key", mock.Anything, mock.Anything, mock.Anything). + Return(expectedTokens, nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/test-key/tokenize-batch", request) + c.Params = gin.Params{{Key: "name", Value: "test-key"}} + + handler.TokenizeBatchHandler(c) + + assert.Equal(t, http.StatusCreated, w.Code) + + var response dto.TokenizeBatchResponse + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Len(t, response.Items, 2) + assert.Equal(t, "t1", response.Items[0].Token) + assert.Equal(t, "t2", response.Items[1].Token) + }) + + t.Run("Error_InvalidJSON", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/test-key/tokenize-batch", nil) + c.Request.Body = io.NopCloser(bytes.NewReader([]byte("invalid json"))) + c.Params = gin.Params{{Key: "name", Value: "test-key"}} + + handler.TokenizeBatchHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) +} + +func TestTokenizationHandler_DetokenizeBatchHandler(t *testing.T) { + t.Run("Success_DetokenizeBatch", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + tokens := []string{"t1", "t2"} + plaintexts := [][]byte{[]byte("p1"), []byte("p2")} + plaintext1Copy := []byte("p1") + plaintext2Copy := []byte("p2") + metadatas := []map[string]any{nil, nil} + + request := dto.DetokenizeBatchRequest{ + Tokens: tokens, + } + + mockUseCase.EXPECT(). + DetokenizeBatch(mock.Anything, tokens). + Return(plaintexts, metadatas, nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/detokenize-batch", request) + + handler.DetokenizeBatchHandler(c) + + assert.Equal(t, http.StatusOK, w.Code) + + var response dto.DetokenizeBatchResponse + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Len(t, response.Items, 2) + assert.Equal(t, base64.StdEncoding.EncodeToString(plaintext1Copy), response.Items[0].Plaintext) + assert.Equal(t, base64.StdEncoding.EncodeToString(plaintext2Copy), response.Items[1].Plaintext) + }) + + t.Run("Error_InvalidJSON", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/detokenize-batch", nil) + c.Request.Body = io.NopCloser(bytes.NewReader([]byte("invalid json"))) + + handler.DetokenizeBatchHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) +} + func TestTokenizationHandler_RevokeHandler(t *testing.T) { t.Run("Success_RevokeToken", func(t *testing.T) { handler, mockUseCase := setupTestTokenizationHandler(t) diff --git a/internal/tokenization/repository/mysql/mysql_repository.go b/internal/tokenization/repository/mysql/mysql_repository.go index 568da0a..d896675 100644 --- a/internal/tokenization/repository/mysql/mysql_repository.go +++ b/internal/tokenization/repository/mysql/mysql_repository.go @@ -423,6 +423,23 @@ func (m *MySQLTokenRepository) Create( return nil } +// CreateBatch inserts multiple token mappings into the MySQL database. +func (m *MySQLTokenRepository) CreateBatch( + ctx context.Context, + tokens []*tokenizationDomain.Token, +) error { + if len(tokens) == 0 { + return nil + } + + for _, token := range tokens { + if err := m.Create(ctx, token); err != nil { + return apperrors.Wrap(err, "failed to create token in batch") + } + } + return nil +} + // GetByToken retrieves a token mapping by its token string. func (m *MySQLTokenRepository) GetByToken( ctx context.Context, @@ -475,6 +492,90 @@ func (m *MySQLTokenRepository) GetByToken( return &token, nil } +// GetBatchByTokens retrieves multiple token mappings by their token strings. +func (m *MySQLTokenRepository) GetBatchByTokens( + ctx context.Context, + tokenStrings []string, +) ([]*tokenizationDomain.Token, error) { + if len(tokenStrings) == 0 { + return []*tokenizationDomain.Token{}, nil + } + + querier := database.GetTx(ctx, m.db) + + query := `SELECT id, tokenization_key_id, token, value_hash, ciphertext, nonce, metadata, created_at, expires_at, revoked_at + FROM tokenization_tokens + WHERE token IN (` + + args := make([]interface{}, len(tokenStrings)) + for i, t := range tokenStrings { + if i > 0 { + query += ", " + } + query += "?" + args[i] = t + } + query += ")" + + rows, err := querier.QueryContext(ctx, query, args...) + if err != nil { + return nil, apperrors.Wrap(err, "failed to get tokens by batch") + } + defer func() { + _ = rows.Close() + }() + + var tokens []*tokenizationDomain.Token + for rows.Next() { + var token tokenizationDomain.Token + var id, keyID []byte + var metadataJSON []byte + + err := rows.Scan( + &id, + &keyID, + &token.Token, + &token.ValueHash, + &token.Ciphertext, + &token.Nonce, + &metadataJSON, + &token.CreatedAt, + &token.ExpiresAt, + &token.RevokedAt, + ) + if err != nil { + return nil, apperrors.Wrap(err, "failed to scan token") + } + + if err := token.ID.UnmarshalBinary(id); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal token id") + } + + if err := token.TokenizationKeyID.UnmarshalBinary(keyID); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal tokenization key id") + } + + // Parse metadata if present + if len(metadataJSON) > 0 { + if err := json.Unmarshal(metadataJSON, &token.Metadata); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal metadata") + } + } + + tokens = append(tokens, &token) + } + + if err := rows.Err(); err != nil { + return nil, apperrors.Wrap(err, "error iterating tokens") + } + + if tokens == nil { + tokens = make([]*tokenizationDomain.Token, 0) + } + + return tokens, nil +} + // GetByValueHash retrieves a token by its value hash (for deterministic mode). func (m *MySQLTokenRepository) GetByValueHash( ctx context.Context, diff --git a/internal/tokenization/repository/mysql/mysql_repository_test.go b/internal/tokenization/repository/mysql/mysql_repository_test.go index 487891f..aa3589d 100644 --- a/internal/tokenization/repository/mysql/mysql_repository_test.go +++ b/internal/tokenization/repository/mysql/mysql_repository_test.go @@ -14,6 +14,7 @@ import ( cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" cryptoRepository "github.com/allisson/secrets/internal/crypto/repository/mysql" + apperrors "github.com/allisson/secrets/internal/errors" "github.com/allisson/secrets/internal/testutil" tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" ) @@ -762,3 +763,111 @@ func TestMySQLTokenizationKeyRepository_HardDelete(t *testing.T) { assert.True(t, exists) }) } + +func TestMySQLTokenRepository_CreateBatch(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKeyMySQL(t, db) + + tokens := []*tokenizationDomain.Token{ + { + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_batch1", + Ciphertext: []byte("enc1"), + Nonce: []byte("nonce1"), + CreatedAt: time.Now().UTC(), + }, + { + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_batch2", + Ciphertext: []byte("enc2"), + Nonce: []byte("nonce2"), + CreatedAt: time.Now().UTC(), + }, + } + + err := repo.CreateBatch(ctx, tokens) + require.NoError(t, err) + + // Verify + for _, tok := range tokens { + retrieved, err := repo.GetByToken(ctx, tok.Token) + require.NoError(t, err) + assert.Equal(t, tok.ID, retrieved.ID) + } + + // Test conflict + err = repo.CreateBatch(ctx, tokens) + assert.Error(t, err) + assert.ErrorIs(t, err, apperrors.ErrConflict) + + // Test empty batch + err = repo.CreateBatch(ctx, []*tokenizationDomain.Token{}) + assert.NoError(t, err) +} + +func TestMySQLTokenRepository_GetBatchByTokens(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKeyMySQL(t, db) + + tokens := []*tokenizationDomain.Token{ + { + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_get_batch1", + Ciphertext: []byte("enc1"), + Nonce: []byte("nonce1"), + CreatedAt: time.Now().UTC(), + }, + { + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_get_batch2", + Ciphertext: []byte("enc2"), + Nonce: []byte("nonce2"), + CreatedAt: time.Now().UTC(), + }, + } + + for _, tok := range tokens { + require.NoError(t, repo.Create(ctx, tok)) + } + + // Test GetBatchByTokens + tokenStrings := []string{"tok_get_batch1", "tok_get_batch2", "non-existent"} + retrieved, err := repo.GetBatchByTokens(ctx, tokenStrings) + require.NoError(t, err) + assert.Len(t, retrieved, 2) + + found1 := false + found2 := false + for _, r := range retrieved { + if r.Token == "tok_get_batch1" { + found1 = true + } + if r.Token == "tok_get_batch2" { + found2 = true + } + } + assert.True(t, found1) + assert.True(t, found2) + + // Test empty tokens + retrieved, err = repo.GetBatchByTokens(ctx, []string{}) + require.NoError(t, err) + assert.Empty(t, retrieved) +} + diff --git a/internal/tokenization/repository/postgresql/postgresql_repository.go b/internal/tokenization/repository/postgresql/postgresql_repository.go index 18f7551..86272ee 100644 --- a/internal/tokenization/repository/postgresql/postgresql_repository.go +++ b/internal/tokenization/repository/postgresql/postgresql_repository.go @@ -364,6 +364,23 @@ func (p *PostgreSQLTokenRepository) Create( return nil } +// CreateBatch inserts multiple token mappings into the PostgreSQL database. +func (p *PostgreSQLTokenRepository) CreateBatch( + ctx context.Context, + tokens []*tokenizationDomain.Token, +) error { + if len(tokens) == 0 { + return nil + } + + for _, token := range tokens { + if err := p.Create(ctx, token); err != nil { + return apperrors.Wrap(err, "failed to create token in batch") + } + } + return nil +} + // GetByToken retrieves a token mapping by its token string. func (p *PostgreSQLTokenRepository) GetByToken( ctx context.Context, @@ -407,6 +424,71 @@ func (p *PostgreSQLTokenRepository) GetByToken( return &token, nil } +// GetBatchByTokens retrieves multiple token mappings by their token strings. +func (p *PostgreSQLTokenRepository) GetBatchByTokens( + ctx context.Context, + tokenStrings []string, +) ([]*tokenizationDomain.Token, error) { + if len(tokenStrings) == 0 { + return []*tokenizationDomain.Token{}, nil + } + + querier := database.GetTx(ctx, p.db) + + query := `SELECT id, tokenization_key_id, token, value_hash, ciphertext, nonce, metadata, created_at, expires_at, revoked_at + FROM tokenization_tokens + WHERE token = ANY($1)` + + rows, err := querier.QueryContext(ctx, query, pq.Array(tokenStrings)) + if err != nil { + return nil, apperrors.Wrap(err, "failed to get tokens by batch") + } + defer func() { + _ = rows.Close() + }() + + var tokens []*tokenizationDomain.Token + for rows.Next() { + var token tokenizationDomain.Token + var metadataJSON []byte + + err := rows.Scan( + &token.ID, + &token.TokenizationKeyID, + &token.Token, + &token.ValueHash, + &token.Ciphertext, + &token.Nonce, + &metadataJSON, + &token.CreatedAt, + &token.ExpiresAt, + &token.RevokedAt, + ) + if err != nil { + return nil, apperrors.Wrap(err, "failed to scan token") + } + + // Parse metadata if present + if len(metadataJSON) > 0 { + if err := json.Unmarshal(metadataJSON, &token.Metadata); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal metadata") + } + } + + tokens = append(tokens, &token) + } + + if err := rows.Err(); err != nil { + return nil, apperrors.Wrap(err, "error iterating tokens") + } + + if tokens == nil { + tokens = make([]*tokenizationDomain.Token, 0) + } + + return tokens, nil +} + // GetByValueHash retrieves a token by its value hash (for deterministic mode). func (p *PostgreSQLTokenRepository) GetByValueHash( ctx context.Context, diff --git a/internal/tokenization/repository/postgresql/postgresql_repository_test.go b/internal/tokenization/repository/postgresql/postgresql_repository_test.go index 66cdb68..65671b6 100644 --- a/internal/tokenization/repository/postgresql/postgresql_repository_test.go +++ b/internal/tokenization/repository/postgresql/postgresql_repository_test.go @@ -14,6 +14,7 @@ import ( cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" cryptoRepository "github.com/allisson/secrets/internal/crypto/repository/postgresql" + apperrors "github.com/allisson/secrets/internal/errors" "github.com/allisson/secrets/internal/testutil" tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" ) @@ -758,3 +759,102 @@ func TestPostgreSQLTokenizationKeyRepository_HardDelete(t *testing.T) { assert.True(t, exists) }) } + +func TestPostgreSQLTokenRepository_CreateBatch(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKey(t, db) + + tokens := []*tokenizationDomain.Token{ + { + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_batch1", + Ciphertext: []byte("enc1"), + Nonce: []byte("nonce1"), + CreatedAt: time.Now().UTC(), + }, + { + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_batch2", + Ciphertext: []byte("enc2"), + Nonce: []byte("nonce2"), + CreatedAt: time.Now().UTC(), + }, + } + + err := repo.CreateBatch(ctx, tokens) + require.NoError(t, err) + + // Verify + for _, tok := range tokens { + retrieved, err := repo.GetByToken(ctx, tok.Token) + require.NoError(t, err) + assert.Equal(t, tok.ID, retrieved.ID) + } + + // Test conflict + err = repo.CreateBatch(ctx, tokens) + assert.Error(t, err) + assert.ErrorIs(t, err, apperrors.ErrConflict) +} + +func TestPostgreSQLTokenRepository_GetBatchByTokens(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKey(t, db) + + tokens := []*tokenizationDomain.Token{ + { + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_get_batch1", + Ciphertext: []byte("enc1"), + Nonce: []byte("nonce1"), + CreatedAt: time.Now().UTC(), + }, + { + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_get_batch2", + Ciphertext: []byte("enc2"), + Nonce: []byte("nonce2"), + CreatedAt: time.Now().UTC(), + }, + } + + for _, tok := range tokens { + require.NoError(t, repo.Create(ctx, tok)) + } + + // Test GetBatchByTokens + tokenStrings := []string{"tok_get_batch1", "tok_get_batch2", "non-existent"} + retrieved, err := repo.GetBatchByTokens(ctx, tokenStrings) + require.NoError(t, err) + assert.Len(t, retrieved, 2) + + found1 := false + found2 := false + for _, r := range retrieved { + if r.Token == "tok_get_batch1" { + found1 = true + } + if r.Token == "tok_get_batch2" { + found2 = true + } + } + assert.True(t, found1) + assert.True(t, found2) +} + diff --git a/internal/tokenization/usecase/interface.go b/internal/tokenization/usecase/interface.go index 6cf98f8..11f852f 100644 --- a/internal/tokenization/usecase/interface.go +++ b/internal/tokenization/usecase/interface.go @@ -51,7 +51,9 @@ type TokenizationKeyRepository interface { // TokenRepository defines the interface for token mapping persistence. type TokenRepository interface { Create(ctx context.Context, token *tokenizationDomain.Token) error + CreateBatch(ctx context.Context, tokens []*tokenizationDomain.Token) error GetByToken(ctx context.Context, token string) (*tokenizationDomain.Token, error) + GetBatchByTokens(ctx context.Context, tokens []string) ([]*tokenizationDomain.Token, error) GetByValueHash(ctx context.Context, keyID uuid.UUID, valueHash string) (*tokenizationDomain.Token, error) Revoke(ctx context.Context, token string) error @@ -125,11 +127,28 @@ type TokenizationUseCase interface { expiresAt *time.Time, ) (*tokenizationDomain.Token, error) + // TokenizeBatch generates tokens for multiple plaintext values using the latest version of the named key. + // Wrapped in a transaction for atomicity. + TokenizeBatch( + ctx context.Context, + keyName string, + plaintexts [][]byte, + metadatas []map[string]any, + expiresAt *time.Time, + ) ([]*tokenizationDomain.Token, error) + // Detokenize retrieves the original plaintext value for a given token. // Returns ErrTokenNotFound if token doesn't exist, ErrTokenExpired if expired, ErrTokenRevoked if revoked. // Security Note: Callers MUST zero the returned plaintext after use: cryptoDomain.Zero(plaintext). Detokenize(ctx context.Context, token string) (plaintext []byte, metadata map[string]any, err error) + // DetokenizeBatch retrieves original plaintext values for multiple tokens. + // Wrapped in a transaction for atomicity. + DetokenizeBatch( + ctx context.Context, + tokens []string, + ) (plaintexts [][]byte, metadatas []map[string]any, err error) + // Validate checks if a token exists and is valid (not expired or revoked). Validate(ctx context.Context, token string) (bool, error) diff --git a/internal/tokenization/usecase/mocks/mocks.go b/internal/tokenization/usecase/mocks/mocks.go index fb82cc3..24ef2c1 100644 --- a/internal/tokenization/usecase/mocks/mocks.go +++ b/internal/tokenization/usecase/mocks/mocks.go @@ -897,6 +897,63 @@ func (_c *MockTokenRepository_Create_Call) RunAndReturn(run func(ctx context.Con return _c } +// CreateBatch provides a mock function for the type MockTokenRepository +func (_mock *MockTokenRepository) CreateBatch(ctx context.Context, tokens []*domain0.Token) error { + ret := _mock.Called(ctx, tokens) + + if len(ret) == 0 { + panic("no return value specified for CreateBatch") + } + + var r0 error + if returnFunc, ok := ret.Get(0).(func(context.Context, []*domain0.Token) error); ok { + r0 = returnFunc(ctx, tokens) + } else { + r0 = ret.Error(0) + } + return r0 +} + +// MockTokenRepository_CreateBatch_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateBatch' +type MockTokenRepository_CreateBatch_Call struct { + *mock.Call +} + +// CreateBatch is a helper method to define mock.On call +// - ctx context.Context +// - tokens []*domain0.Token +func (_e *MockTokenRepository_Expecter) CreateBatch(ctx interface{}, tokens interface{}) *MockTokenRepository_CreateBatch_Call { + return &MockTokenRepository_CreateBatch_Call{Call: _e.mock.On("CreateBatch", ctx, tokens)} +} + +func (_c *MockTokenRepository_CreateBatch_Call) Run(run func(ctx context.Context, tokens []*domain0.Token)) *MockTokenRepository_CreateBatch_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 []*domain0.Token + if args[1] != nil { + arg1 = args[1].([]*domain0.Token) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenRepository_CreateBatch_Call) Return(err error) *MockTokenRepository_CreateBatch_Call { + _c.Call.Return(err) + return _c +} + +func (_c *MockTokenRepository_CreateBatch_Call) RunAndReturn(run func(ctx context.Context, tokens []*domain0.Token) error) *MockTokenRepository_CreateBatch_Call { + _c.Call.Return(run) + return _c +} + // DeleteExpired provides a mock function for the type MockTokenRepository func (_mock *MockTokenRepository) DeleteExpired(ctx context.Context, olderThan time.Time) (int64, error) { ret := _mock.Called(ctx, olderThan) @@ -963,6 +1020,74 @@ func (_c *MockTokenRepository_DeleteExpired_Call) RunAndReturn(run func(ctx cont return _c } +// GetBatchByTokens provides a mock function for the type MockTokenRepository +func (_mock *MockTokenRepository) GetBatchByTokens(ctx context.Context, tokens []string) ([]*domain0.Token, error) { + ret := _mock.Called(ctx, tokens) + + if len(ret) == 0 { + panic("no return value specified for GetBatchByTokens") + } + + var r0 []*domain0.Token + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, []string) ([]*domain0.Token, error)); ok { + return returnFunc(ctx, tokens) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, []string) []*domain0.Token); ok { + r0 = returnFunc(ctx, tokens) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*domain0.Token) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = returnFunc(ctx, tokens) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenRepository_GetBatchByTokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetBatchByTokens' +type MockTokenRepository_GetBatchByTokens_Call struct { + *mock.Call +} + +// GetBatchByTokens is a helper method to define mock.On call +// - ctx context.Context +// - tokens []string +func (_e *MockTokenRepository_Expecter) GetBatchByTokens(ctx interface{}, tokens interface{}) *MockTokenRepository_GetBatchByTokens_Call { + return &MockTokenRepository_GetBatchByTokens_Call{Call: _e.mock.On("GetBatchByTokens", ctx, tokens)} +} + +func (_c *MockTokenRepository_GetBatchByTokens_Call) Run(run func(ctx context.Context, tokens []string)) *MockTokenRepository_GetBatchByTokens_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 []string + if args[1] != nil { + arg1 = args[1].([]string) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenRepository_GetBatchByTokens_Call) Return(tokens1 []*domain0.Token, err error) *MockTokenRepository_GetBatchByTokens_Call { + _c.Call.Return(tokens1, err) + return _c +} + +func (_c *MockTokenRepository_GetBatchByTokens_Call) RunAndReturn(run func(ctx context.Context, tokens []string) ([]*domain0.Token, error)) *MockTokenRepository_GetBatchByTokens_Call { + _c.Call.Return(run) + return _c +} + // GetByToken provides a mock function for the type MockTokenRepository func (_mock *MockTokenRepository) GetByToken(ctx context.Context, token string) (*domain0.Token, error) { ret := _mock.Called(ctx, token) @@ -1807,6 +1932,82 @@ func (_c *MockTokenizationUseCase_Detokenize_Call) RunAndReturn(run func(ctx con return _c } +// DetokenizeBatch provides a mock function for the type MockTokenizationUseCase +func (_mock *MockTokenizationUseCase) DetokenizeBatch(ctx context.Context, tokens []string) ([][]byte, []map[string]any, error) { + ret := _mock.Called(ctx, tokens) + + if len(ret) == 0 { + panic("no return value specified for DetokenizeBatch") + } + + var r0 [][]byte + var r1 []map[string]any + var r2 error + if returnFunc, ok := ret.Get(0).(func(context.Context, []string) ([][]byte, []map[string]any, error)); ok { + return returnFunc(ctx, tokens) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, []string) [][]byte); ok { + r0 = returnFunc(ctx, tokens) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]byte) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, []string) []map[string]any); ok { + r1 = returnFunc(ctx, tokens) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).([]map[string]any) + } + } + if returnFunc, ok := ret.Get(2).(func(context.Context, []string) error); ok { + r2 = returnFunc(ctx, tokens) + } else { + r2 = ret.Error(2) + } + return r0, r1, r2 +} + +// MockTokenizationUseCase_DetokenizeBatch_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DetokenizeBatch' +type MockTokenizationUseCase_DetokenizeBatch_Call struct { + *mock.Call +} + +// DetokenizeBatch is a helper method to define mock.On call +// - ctx context.Context +// - tokens []string +func (_e *MockTokenizationUseCase_Expecter) DetokenizeBatch(ctx interface{}, tokens interface{}) *MockTokenizationUseCase_DetokenizeBatch_Call { + return &MockTokenizationUseCase_DetokenizeBatch_Call{Call: _e.mock.On("DetokenizeBatch", ctx, tokens)} +} + +func (_c *MockTokenizationUseCase_DetokenizeBatch_Call) Run(run func(ctx context.Context, tokens []string)) *MockTokenizationUseCase_DetokenizeBatch_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 []string + if args[1] != nil { + arg1 = args[1].([]string) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenizationUseCase_DetokenizeBatch_Call) Return(plaintexts [][]byte, metadatas []map[string]any, err error) *MockTokenizationUseCase_DetokenizeBatch_Call { + _c.Call.Return(plaintexts, metadatas, err) + return _c +} + +func (_c *MockTokenizationUseCase_DetokenizeBatch_Call) RunAndReturn(run func(ctx context.Context, tokens []string) ([][]byte, []map[string]any, error)) *MockTokenizationUseCase_DetokenizeBatch_Call { + _c.Call.Return(run) + return _c +} + // Revoke provides a mock function for the type MockTokenizationUseCase func (_mock *MockTokenizationUseCase) Revoke(ctx context.Context, token string) error { ret := _mock.Called(ctx, token) @@ -1950,6 +2151,92 @@ func (_c *MockTokenizationUseCase_Tokenize_Call) RunAndReturn(run func(ctx conte return _c } +// TokenizeBatch provides a mock function for the type MockTokenizationUseCase +func (_mock *MockTokenizationUseCase) TokenizeBatch(ctx context.Context, keyName string, plaintexts [][]byte, metadatas []map[string]any, expiresAt *time.Time) ([]*domain0.Token, error) { + ret := _mock.Called(ctx, keyName, plaintexts, metadatas, expiresAt) + + if len(ret) == 0 { + panic("no return value specified for TokenizeBatch") + } + + var r0 []*domain0.Token + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string, [][]byte, []map[string]any, *time.Time) ([]*domain0.Token, error)); ok { + return returnFunc(ctx, keyName, plaintexts, metadatas, expiresAt) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, string, [][]byte, []map[string]any, *time.Time) []*domain0.Token); ok { + r0 = returnFunc(ctx, keyName, plaintexts, metadatas, expiresAt) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*domain0.Token) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, string, [][]byte, []map[string]any, *time.Time) error); ok { + r1 = returnFunc(ctx, keyName, plaintexts, metadatas, expiresAt) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenizationUseCase_TokenizeBatch_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'TokenizeBatch' +type MockTokenizationUseCase_TokenizeBatch_Call struct { + *mock.Call +} + +// TokenizeBatch is a helper method to define mock.On call +// - ctx context.Context +// - keyName string +// - plaintexts [][]byte +// - metadatas []map[string]any +// - expiresAt *time.Time +func (_e *MockTokenizationUseCase_Expecter) TokenizeBatch(ctx interface{}, keyName interface{}, plaintexts interface{}, metadatas interface{}, expiresAt interface{}) *MockTokenizationUseCase_TokenizeBatch_Call { + return &MockTokenizationUseCase_TokenizeBatch_Call{Call: _e.mock.On("TokenizeBatch", ctx, keyName, plaintexts, metadatas, expiresAt)} +} + +func (_c *MockTokenizationUseCase_TokenizeBatch_Call) Run(run func(ctx context.Context, keyName string, plaintexts [][]byte, metadatas []map[string]any, expiresAt *time.Time)) *MockTokenizationUseCase_TokenizeBatch_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + var arg2 [][]byte + if args[2] != nil { + arg2 = args[2].([][]byte) + } + var arg3 []map[string]any + if args[3] != nil { + arg3 = args[3].([]map[string]any) + } + var arg4 *time.Time + if args[4] != nil { + arg4 = args[4].(*time.Time) + } + run( + arg0, + arg1, + arg2, + arg3, + arg4, + ) + }) + return _c +} + +func (_c *MockTokenizationUseCase_TokenizeBatch_Call) Return(tokens []*domain0.Token, err error) *MockTokenizationUseCase_TokenizeBatch_Call { + _c.Call.Return(tokens, err) + return _c +} + +func (_c *MockTokenizationUseCase_TokenizeBatch_Call) RunAndReturn(run func(ctx context.Context, keyName string, plaintexts [][]byte, metadatas []map[string]any, expiresAt *time.Time) ([]*domain0.Token, error)) *MockTokenizationUseCase_TokenizeBatch_Call { + _c.Call.Return(run) + return _c +} + // Validate provides a mock function for the type MockTokenizationUseCase func (_mock *MockTokenizationUseCase) Validate(ctx context.Context, token string) (bool, error) { ret := _mock.Called(ctx, token) diff --git a/internal/tokenization/usecase/tokenization_metrics_decorator.go b/internal/tokenization/usecase/tokenization_metrics_decorator.go index ad0cac7..f5bee47 100644 --- a/internal/tokenization/usecase/tokenization_metrics_decorator.go +++ b/internal/tokenization/usecase/tokenization_metrics_decorator.go @@ -47,6 +47,28 @@ func (t *tokenizationUseCaseWithMetrics) Tokenize( return token, err } +// TokenizeBatch records metrics for batch token generation operations. +func (t *tokenizationUseCaseWithMetrics) TokenizeBatch( + ctx context.Context, + keyName string, + plaintexts [][]byte, + metadatas []map[string]any, + expiresAt *time.Time, +) ([]*tokenizationDomain.Token, error) { + start := time.Now() + tokens, err := t.next.TokenizeBatch(ctx, keyName, plaintexts, metadatas, expiresAt) + + status := "success" + if err != nil { + status = "error" + } + + t.metrics.RecordOperation(ctx, "tokenization", "tokenize_batch", status) + t.metrics.RecordDuration(ctx, "tokenization", "tokenize_batch", time.Since(start), status) + + return tokens, err +} + // Detokenize records metrics for token detokenization operations. func (t *tokenizationUseCaseWithMetrics) Detokenize( ctx context.Context, @@ -66,6 +88,25 @@ func (t *tokenizationUseCaseWithMetrics) Detokenize( return plaintext, metadata, err } +// DetokenizeBatch records metrics for batch token detokenization operations. +func (t *tokenizationUseCaseWithMetrics) DetokenizeBatch( + ctx context.Context, + tokens []string, +) (plaintexts [][]byte, metadatas []map[string]any, err error) { + start := time.Now() + plaintexts, metadatas, err = t.next.DetokenizeBatch(ctx, tokens) + + status := "success" + if err != nil { + status = "error" + } + + t.metrics.RecordOperation(ctx, "tokenization", "detokenize_batch", status) + t.metrics.RecordDuration(ctx, "tokenization", "detokenize_batch", time.Since(start), status) + + return plaintexts, metadatas, err +} + // Validate records metrics for token validation operations. func (t *tokenizationUseCaseWithMetrics) Validate(ctx context.Context, token string) (bool, error) { start := time.Now() diff --git a/internal/tokenization/usecase/tokenization_metrics_decorator_test.go b/internal/tokenization/usecase/tokenization_metrics_decorator_test.go index 58275b2..1644e30 100644 --- a/internal/tokenization/usecase/tokenization_metrics_decorator_test.go +++ b/internal/tokenization/usecase/tokenization_metrics_decorator_test.go @@ -439,3 +439,141 @@ func TestTokenizationUseCaseWithMetrics_CleanupExpired(t *testing.T) { }) } } + +func TestTokenizationUseCaseWithMetrics_TokenizeBatch(t *testing.T) { + tests := []struct { + name string + setupMocks func(*tokenizationMocks.MockTokenizationUseCase, *mockBusinessMetrics) + keyName string + plaintexts [][]byte + expectedErr error + expectedStatus string + }{ + { + name: "Success_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + tokens := []*tokenizationDomain.Token{{ID: uuid.New(), Token: "t1"}} + mockUseCase.EXPECT(). + TokenizeBatch(mock.Anything, "test-key", mock.Anything, mock.Anything, mock.Anything). + Return(tokens, nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenize_batch", "success"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenize_batch", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + keyName: "test-key", + plaintexts: [][]byte{[]byte("p1")}, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Error_RecordsErrorMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + TokenizeBatch(mock.Anything, "test-key", mock.Anything, mock.Anything, mock.Anything). + Return(nil, errors.New("error")). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenize_batch", "error"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenize_batch", mock.AnythingOfType("time.Duration"), "error"). + Once() + }, + keyName: "test-key", + plaintexts: [][]byte{[]byte("p1")}, + expectedErr: errors.New("error"), + expectedStatus: "error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationUseCase(t) + mockMetrics := &mockBusinessMetrics{} + tt.setupMocks(mockUseCase, mockMetrics) + + decorator := NewTokenizationUseCaseWithMetrics(mockUseCase, mockMetrics) + + _, err := decorator.TokenizeBatch( + context.Background(), + tt.keyName, + tt.plaintexts, + nil, + nil, + ) + + if tt.expectedErr != nil { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + + mockMetrics.AssertExpectations(t) + mockUseCase.AssertExpectations(t) + }) + } +} + +func TestTokenizationUseCaseWithMetrics_DetokenizeBatch(t *testing.T) { + tests := []struct { + name string + setupMocks func(*tokenizationMocks.MockTokenizationUseCase, *mockBusinessMetrics) + tokens []string + expectedErr error + expectedStatus string + }{ + { + name: "Success_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + DetokenizeBatch(mock.Anything, []string{"t1"}). + Return([][]byte{[]byte("p1")}, []map[string]any{nil}, nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "detokenize_batch", "success"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "detokenize_batch", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + tokens: []string{"t1"}, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Error_RecordsErrorMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + DetokenizeBatch(mock.Anything, []string{"t1"}). + Return(nil, nil, errors.New("error")). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "detokenize_batch", "error"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "detokenize_batch", mock.AnythingOfType("time.Duration"), "error"). + Once() + }, + tokens: []string{"t1"}, + expectedErr: errors.New("error"), + expectedStatus: "error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationUseCase(t) + mockMetrics := &mockBusinessMetrics{} + tt.setupMocks(mockUseCase, mockMetrics) + + decorator := NewTokenizationUseCaseWithMetrics(mockUseCase, mockMetrics) + + _, _, err := decorator.DetokenizeBatch(context.Background(), tt.tokens) + + if tt.expectedErr != nil { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + + mockMetrics.AssertExpectations(t) + mockUseCase.AssertExpectations(t) + }) + } +} diff --git a/internal/tokenization/usecase/tokenization_usecase.go b/internal/tokenization/usecase/tokenization_usecase.go index 8caec23..2a8cd46 100644 --- a/internal/tokenization/usecase/tokenization_usecase.go +++ b/internal/tokenization/usecase/tokenization_usecase.go @@ -12,6 +12,7 @@ import ( cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" cryptoService "github.com/allisson/secrets/internal/crypto/service" + "github.com/allisson/secrets/internal/database" apperrors "github.com/allisson/secrets/internal/errors" tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" tokenizationService "github.com/allisson/secrets/internal/tokenization/service" @@ -45,6 +46,7 @@ func validateTokenLength(formatType tokenizationDomain.FormatType, length int) e // tokenizationUseCase implements TokenizationUseCase for managing tokenization operations. type tokenizationUseCase struct { + txManager database.TxManager tokenizationRepo TokenizationKeyRepository tokenRepo TokenRepository dekRepo DekRepository @@ -200,6 +202,36 @@ func (t *tokenizationUseCase) Tokenize( return token, nil } +// TokenizeBatch generates tokens for multiple plaintext values using the latest version of the named key. +// Wrapped in a transaction for atomicity. +func (t *tokenizationUseCase) TokenizeBatch( + ctx context.Context, + keyName string, + plaintexts [][]byte, + metadatas []map[string]any, + expiresAt *time.Time, +) ([]*tokenizationDomain.Token, error) { + var tokens []*tokenizationDomain.Token + err := t.txManager.WithTx(ctx, func(ctx context.Context) error { + for i, plaintext := range plaintexts { + var metadata map[string]any + if i < len(metadatas) { + metadata = metadatas[i] + } + token, err := t.Tokenize(ctx, keyName, plaintext, metadata, expiresAt) + if err != nil { + return err + } + tokens = append(tokens, token) + } + return nil + }) + if err != nil { + return nil, err + } + return tokens, nil +} + // Detokenize retrieves the original plaintext value for a given token. // Returns ErrTokenNotFound if token doesn't exist, ErrTokenExpired if expired, ErrTokenRevoked if revoked. // Security Note: Callers MUST zero the returned plaintext after use: cryptoDomain.Zero(plaintext). @@ -266,6 +298,29 @@ func (t *tokenizationUseCase) Detokenize( return plaintext, tokenRecord.Metadata, nil } +// DetokenizeBatch retrieves original plaintext values for multiple tokens. +// Wrapped in a transaction for atomicity. +func (t *tokenizationUseCase) DetokenizeBatch( + ctx context.Context, + tokens []string, +) (plaintexts [][]byte, metadatas []map[string]any, err error) { + err = t.txManager.WithTx(ctx, func(ctx context.Context) error { + for _, token := range tokens { + plaintext, metadata, err := t.Detokenize(ctx, token) + if err != nil { + return err + } + plaintexts = append(plaintexts, plaintext) + metadatas = append(metadatas, metadata) + } + return nil + }) + if err != nil { + return nil, nil, err + } + return plaintexts, metadatas, nil +} + // Validate checks if a token exists and is valid (not expired or revoked). func (t *tokenizationUseCase) Validate(ctx context.Context, token string) (bool, error) { // Get token record @@ -318,6 +373,7 @@ func (t *tokenizationUseCase) CleanupExpired(ctx context.Context, days int, dryR // NewTokenizationUseCase creates a new TokenizationUseCase with injected dependencies. func NewTokenizationUseCase( + txManager database.TxManager, tokenizationRepo TokenizationKeyRepository, tokenRepo TokenRepository, dekRepo DekRepository, @@ -327,6 +383,7 @@ func NewTokenizationUseCase( kekChain *cryptoDomain.KekChain, ) TokenizationUseCase { return &tokenizationUseCase{ + txManager: txManager, tokenizationRepo: tokenizationRepo, tokenRepo: tokenRepo, dekRepo: dekRepo, diff --git a/internal/tokenization/usecase/tokenization_usecase_test.go b/internal/tokenization/usecase/tokenization_usecase_test.go index 704766d..4b8751a 100644 --- a/internal/tokenization/usecase/tokenization_usecase_test.go +++ b/internal/tokenization/usecase/tokenization_usecase_test.go @@ -12,6 +12,7 @@ import ( cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" cryptoServiceMocks "github.com/allisson/secrets/internal/crypto/service/mocks" + databaseMocks "github.com/allisson/secrets/internal/database/mocks" tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" tokenizationTesting "github.com/allisson/secrets/internal/tokenization/testing" tokenizationMocks "github.com/allisson/secrets/internal/tokenization/usecase/mocks" @@ -23,6 +24,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { t.Run("Success_NonDeterministicMode", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -108,6 +110,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -134,6 +137,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { t.Run("Success_DeterministicMode_NewToken", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -233,6 +237,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -258,6 +263,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { t.Run("Success_DeterministicMode_ExistingValidToken", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -316,6 +322,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -338,6 +345,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { t.Run("Success_DeterministicMode_ExpiredTokenCreatesNew", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -449,6 +457,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -470,6 +479,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { t.Run("Error_TokenizationKeyNotFound", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -489,6 +499,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -510,6 +521,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { t.Run("Error_DekNotFound", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -544,6 +556,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -565,6 +578,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { t.Run("Error_KekNotFound", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -609,6 +623,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -630,6 +645,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { t.Run("Error_EncryptionFails", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -695,6 +711,7 @@ func TestTokenizationUseCase_Tokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -720,6 +737,7 @@ func TestTokenizationUseCase_Detokenize(t *testing.T) { t.Run("Success_DetokenizeValid", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -806,6 +824,7 @@ func TestTokenizationUseCase_Detokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -826,6 +845,7 @@ func TestTokenizationUseCase_Detokenize(t *testing.T) { t.Run("Error_TokenNotFound", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -845,6 +865,7 @@ func TestTokenizationUseCase_Detokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -866,6 +887,7 @@ func TestTokenizationUseCase_Detokenize(t *testing.T) { t.Run("Error_TokenExpired", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -899,6 +921,7 @@ func TestTokenizationUseCase_Detokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -919,6 +942,7 @@ func TestTokenizationUseCase_Detokenize(t *testing.T) { t.Run("Error_TokenRevoked", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -952,6 +976,7 @@ func TestTokenizationUseCase_Detokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -972,6 +997,7 @@ func TestTokenizationUseCase_Detokenize(t *testing.T) { t.Run("Error_DecryptionFails", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1053,6 +1079,7 @@ func TestTokenizationUseCase_Detokenize(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1079,6 +1106,7 @@ func TestTokenizationUseCase_Validate(t *testing.T) { t.Run("Success_ValidToken", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1110,6 +1138,7 @@ func TestTokenizationUseCase_Validate(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1129,6 +1158,7 @@ func TestTokenizationUseCase_Validate(t *testing.T) { t.Run("Success_ExpiredToken", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1161,6 +1191,7 @@ func TestTokenizationUseCase_Validate(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1180,6 +1211,7 @@ func TestTokenizationUseCase_Validate(t *testing.T) { t.Run("Success_TokenNotFound", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1199,6 +1231,7 @@ func TestTokenizationUseCase_Validate(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1218,6 +1251,7 @@ func TestTokenizationUseCase_Validate(t *testing.T) { t.Run("Error_RepositoryError", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1239,6 +1273,7 @@ func TestTokenizationUseCase_Validate(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1265,6 +1300,7 @@ func TestTokenizationUseCase_Revoke(t *testing.T) { t.Run("Success_RevokeToken", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1301,6 +1337,7 @@ func TestTokenizationUseCase_Revoke(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1319,6 +1356,7 @@ func TestTokenizationUseCase_Revoke(t *testing.T) { t.Run("Error_TokenNotFound", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1338,6 +1376,7 @@ func TestTokenizationUseCase_Revoke(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1358,6 +1397,7 @@ func TestTokenizationUseCase_Revoke(t *testing.T) { t.Run("Error_RevokeFails", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1396,6 +1436,7 @@ func TestTokenizationUseCase_Revoke(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1421,6 +1462,7 @@ func TestTokenizationUseCase_CleanupExpired(t *testing.T) { t.Run("Success_DryRunMode", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1446,6 +1488,7 @@ func TestTokenizationUseCase_CleanupExpired(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1465,6 +1508,7 @@ func TestTokenizationUseCase_CleanupExpired(t *testing.T) { t.Run("Success_DeleteMode", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1490,6 +1534,7 @@ func TestTokenizationUseCase_CleanupExpired(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1509,6 +1554,7 @@ func TestTokenizationUseCase_CleanupExpired(t *testing.T) { t.Run("Error_NegativeDays", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1522,6 +1568,7 @@ func TestTokenizationUseCase_CleanupExpired(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1542,6 +1589,7 @@ func TestTokenizationUseCase_CleanupExpired(t *testing.T) { t.Run("Error_RepositoryError", func(t *testing.T) { // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) mockDekRepo := tokenizationMocks.NewMockDekRepository(t) @@ -1563,6 +1611,7 @@ func TestTokenizationUseCase_CleanupExpired(t *testing.T) { // Create use case uc := NewTokenizationUseCase( + mockTxManager, mockTokenizationKeyRepo, mockTokenRepo, mockDekRepo, @@ -1580,3 +1629,227 @@ func TestTokenizationUseCase_CleanupExpired(t *testing.T) { assert.Equal(t, dbError, err) }) } + +// TestTokenizationUseCase_TokenizeBatch tests the TokenizeBatch method. +func TestTokenizationUseCase_TokenizeBatch(t *testing.T) { + ctx := context.Background() + + t.Run("Success", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + // Create test data + masterKey := tokenizationTesting.CreateMasterKey() + kekChain := tokenizationTesting.CreateKekChain(masterKey) + defer kekChain.Close() + + activeKek := tokenizationTesting.GetActiveKek(kekChain) + dekID := uuid.Must(uuid.NewV7()) + tokenizationKeyID := uuid.Must(uuid.NewV7()) + + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: tokenizationKeyID, + DekID: dekID, + Name: "test-key", + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + Version: 1, + } + + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + dekKey := make([]byte, 32) + plaintexts := [][]byte{[]byte("value1"), []byte("value2")} + mockCipher := cryptoServiceMocks.NewMockAEAD(t) + + // Setup expectations + mockTxManager.EXPECT(). + WithTx(ctx, mock.AnythingOfType("func(context.Context) error")). + Run(func(ctx context.Context, f func(context.Context) error) { + _ = f(ctx) + }). + Return(nil). + Once() + + // Expectations for each item in batch + for range plaintexts { + mockTokenizationKeyRepo.EXPECT(). + GetByName(ctx, "test-key"). + Return(tokenizationKey, nil). + Once() + + mockDekRepo.EXPECT(). + Get(ctx, dekID). + Return(dek, nil). + Once() + + mockKeyManager.EXPECT(). + DecryptDek(dek, activeKek). + Return(dekKey, nil). + Once() + + mockAEADManager.EXPECT(). + CreateCipher(dekKey, cryptoDomain.AESGCM). + Return(mockCipher, nil). + Once() + + mockCipher.EXPECT(). + Encrypt(mock.Anything, mock.Anything). + Return([]byte("ciphertext"), []byte("nonce"), nil). + Once() + + mockTokenRepo.EXPECT(). + Create(ctx, mock.Anything). + Return(nil). + Once() + } + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + tokens, err := uc.TokenizeBatch(ctx, "test-key", plaintexts, nil, nil) + + // Assert + assert.NoError(t, err) + assert.Len(t, tokens, 2) + }) +} + +// TestTokenizationUseCase_DetokenizeBatch tests the DetokenizeBatch method. +func TestTokenizationUseCase_DetokenizeBatch(t *testing.T) { + ctx := context.Background() + + t.Run("Success", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + // Create test data + masterKey := tokenizationTesting.CreateMasterKey() + kekChain := tokenizationTesting.CreateKekChain(masterKey) + defer kekChain.Close() + + activeKek := tokenizationTesting.GetActiveKek(kekChain) + dekID := uuid.Must(uuid.NewV7()) + tokenizationKeyID := uuid.Must(uuid.NewV7()) + tokens := []string{"token1", "token2"} + + tokenRecord := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: tokenizationKeyID, + Token: "token", + Ciphertext: []byte("ciphertext"), + Nonce: []byte("nonce"), + CreatedAt: time.Now().UTC(), + } + + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: tokenizationKeyID, + DekID: dekID, + Name: "test-key", + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + Version: 1, + } + + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("dek-nonce"), + } + + dekKey := make([]byte, 32) + mockCipher := cryptoServiceMocks.NewMockAEAD(t) + + // Setup expectations + mockTxManager.EXPECT(). + WithTx(ctx, mock.AnythingOfType("func(context.Context) error")). + Run(func(ctx context.Context, f func(context.Context) error) { + _ = f(ctx) + }). + Return(nil). + Once() + + // Expectations for each item in batch + for range tokens { + mockTokenRepo.EXPECT(). + GetByToken(ctx, mock.Anything). + Return(tokenRecord, nil). + Once() + + mockTokenizationKeyRepo.EXPECT(). + Get(ctx, tokenizationKeyID). + Return(tokenizationKey, nil). + Once() + + mockDekRepo.EXPECT(). + Get(ctx, dekID). + Return(dek, nil). + Once() + + mockKeyManager.EXPECT(). + DecryptDek(dek, activeKek). + Return(dekKey, nil). + Once() + + mockAEADManager.EXPECT(). + CreateCipher(dekKey, cryptoDomain.AESGCM). + Return(mockCipher, nil). + Once() + + mockCipher.EXPECT(). + Decrypt(mock.Anything, mock.Anything, mock.Anything). + Return([]byte("plaintext"), nil). + Once() + } + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + plaintexts, metadatas, err := uc.DetokenizeBatch(ctx, tokens) + + // Assert + assert.NoError(t, err) + assert.Len(t, plaintexts, 2) + assert.Len(t, metadatas, 2) + }) +} diff --git a/test/integration/tokenization_flow_test.go b/test/integration/tokenization_flow_test.go index 7e5cc5a..b8d1169 100644 --- a/test/integration/tokenization_flow_test.go +++ b/test/integration/tokenization_flow_test.go @@ -414,7 +414,80 @@ func TestIntegration_Tokenization_CompleteFlow(t *testing.T) { assert.False(t, response.IsDeterministic) }) - t.Logf("All 13 tokenization endpoint tests passed for %s", tc.dbDriver) + // [14/15] Test POST /v1/tokenization/keys/:name/tokenize-batch - Batch Tokenize + t.Run("14_TokenizeBatch", func(t *testing.T) { + p1 := []byte("batch-item-1") + p2 := []byte("batch-item-2") + p1B64 := base64.StdEncoding.EncodeToString(p1) + p2B64 := base64.StdEncoding.EncodeToString(p2) + + requestBody := tokenizationDTO.TokenizeBatchRequest{ + Items: []tokenizationDTO.TokenizeRequest{ + {Plaintext: p1B64, Metadata: map[string]any{"index": 1}}, + {Plaintext: p2B64, Metadata: map[string]any{"index": 2}}, + }, + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/keys/"+tokenizationKeyName1+"/tokenize-batch", + requestBody, + true, + ) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + + var response tokenizationDTO.TokenizeBatchResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.Len(t, response.Items, 2) + + // [15/15] Test POST /v1/tokenization/detokenize-batch - Batch Detokenize + t.Run("15_DetokenizeBatch", func(t *testing.T) { + tokens := []string{response.Items[0].Token, response.Items[1].Token} + detokenizeRequest := tokenizationDTO.DetokenizeBatchRequest{ + Tokens: tokens, + } + + detokenizeResp, detokenizeBody := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/detokenize-batch", + detokenizeRequest, + true, + ) + assert.Equal(t, http.StatusOK, detokenizeResp.StatusCode) + + var detokenizeResponse tokenizationDTO.DetokenizeBatchResponse + err = json.Unmarshal(detokenizeBody, &detokenizeResponse) + require.NoError(t, err) + assert.Len(t, detokenizeResponse.Items, 2) + assert.Equal(t, p1B64, detokenizeResponse.Items[0].Plaintext) + assert.Equal(t, p2B64, detokenizeResponse.Items[1].Plaintext) + }) + }) + + // [16/16] Test POST /v1/tokenization/keys/:name/tokenize-batch - Atomicity + t.Run("16_TokenizeBatch_Atomicity", func(t *testing.T) { + requestBody := tokenizationDTO.TokenizeBatchRequest{ + Items: []tokenizationDTO.TokenizeRequest{ + {Plaintext: base64.StdEncoding.EncodeToString([]byte("valid"))}, + {Plaintext: "invalid-base64-!!!", Metadata: map[string]any{"index": 2}}, + }, + } + + resp, _ := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/keys/"+tokenizationKeyName1+"/tokenize-batch", + requestBody, + true, + ) + // It should fail with 400 Bad Request because of invalid base64 in one item + assert.Equal(t, http.StatusBadRequest, resp.StatusCode) + }) + + t.Logf("All 16 tokenization endpoint tests passed for %s", tc.dbDriver) }) } }