diff --git a/admin-app/src/main/kotlin/com/epam/drill/admin/config/DatabaseConfig.kt b/admin-app/src/main/kotlin/com/epam/drill/admin/config/DatabaseConfig.kt index 3a9d2154e..61c52950e 100644 --- a/admin-app/src/main/kotlin/com/epam/drill/admin/config/DatabaseConfig.kt +++ b/admin-app/src/main/kotlin/com/epam/drill/admin/config/DatabaseConfig.kt @@ -43,6 +43,9 @@ class DatabaseConfig(private val config: ApplicationConfig) { val maxPoolSize: Int get() = config.propertyOrNull("maximumPoolSize")?.getString()?.toInt() ?: 50 + + val ssl: Boolean + get() = config.propertyOrNull("ssl")?.getString()?.toBooleanStrictOrNull() ?: false } val dataSourceDIModule = DI.Module("dataSource") { @@ -69,6 +72,10 @@ val dataSourceDIModule = DI.Module("dataSource") { this.transactionIsolation = "TRANSACTION_READ_UNCOMMITTED" this.addDataSourceProperty("rewriteBatchedInserts", true) this.addDataSourceProperty("rewriteBatchedStatements", true) + if (databaseConfig.ssl) { + this.addDataSourceProperty("ssl", true) + this.addDataSourceProperty("sslmode", "require") + } this.validate() } } diff --git a/admin-app/src/main/resources/application.conf b/admin-app/src/main/resources/application.conf index dd7146f5e..6e87bb6e5 100644 --- a/admin-app/src/main/resources/application.conf +++ b/admin-app/src/main/resources/application.conf @@ -24,6 +24,8 @@ drill { userName = ${?DRILL_DB_USER_NAME} password = ${?DRILL_DB_PASSWORD} maximumPoolSize = ${?DRILL_DB_MAX_POOL_SIZE} + ssl = false + ssl = ${?DRILL_DB_SSL} } auth { jwt { @@ -104,5 +106,9 @@ drill { transformationBufferSize = ${?DRILL_ETL_TRANSFORMATION_BUFFER_SIZE} loggingFrequency = 10 loggingFrequency = ${?DRILL_ETL_LOGGING_FREQUENCY} + consistencyWindow = 0 + consistencyWindow = ${?DRILL_ETL_CONSISTENCY_WINDOW} + processingDelay = 0 + processingDelay = ${?DRILL_ETL_PROCESSING_DELAY} } } diff --git a/admin-app/src/main/resources/openapi.yml b/admin-app/src/main/resources/openapi.yml index cfb3b185f..9a454d34e 100644 --- a/admin-app/src/main/resources/openapi.yml +++ b/admin-app/src/main/resources/openapi.yml @@ -7,6 +7,8 @@ paths: /api/group-settings/{groupId}: get: summary: Get group settings + description: | + Retrieves the configuration settings for a specific application group. operationId: getGroupSettingsById tags: - settings @@ -16,6 +18,7 @@ paths: - name: groupId in: path required: true + description: Unique identifier for the application group whose settings are being retrieved. schema: type: string responses: @@ -27,6 +30,9 @@ paths: $ref: '#/components/schemas/DataResponse' put: summary: Save group settings + description: | + Creates or updates configuration settings for a specific application group. + If settings already exist for the group, they are overwritten with the new values. operationId: updateGroupSettingsById tags: - settings @@ -36,6 +42,7 @@ paths: - name: groupId in: path required: true + description: Unique identifier for the application group whose settings are being saved. schema: type: string requestBody: @@ -53,6 +60,9 @@ paths: $ref: '#/components/schemas/MessageResponse' delete: summary: Delete group settings + description: | + Removes all configuration settings for a specific application group, resetting them to system defaults. + After deletion, the group will use the global default values. operationId: deleteGroupSettingsById tags: - settings @@ -62,6 +72,7 @@ paths: - name: groupId in: path required: true + description: Unique identifier for the application group whose settings are being deleted. schema: type: string responses: @@ -71,12 +82,11 @@ paths: # Data Ingest Endpoints /api/data-ingest/builds: put: - summary: Persist application build metadata + summary: Persist application build identity description: | - Saves application build information, including build version and associated Git commit metadata. - This endpoint captures critical build data required for change tracking, impact analysis, risk assessment, and - correlation with test coverage metrics. The service ingests build metadata from instrumented applications to enable - detailed analysis of code changes, test recommendations, and coverage reports across different application versions. + Saves application build identity information, including groupId, appId, commitSha, and buildVersion. + This endpoint only persists the build identity fields. To save additional Git metadata (branch, commitDate, + commitMessage, commitAuthor), use the PUT /api/data-ingest/builds/info endpoint. operationId: putBuild tags: - data-ingest @@ -98,6 +108,35 @@ paths: application/json: schema: $ref: '#/components/schemas/MessageResponse' + /api/data-ingest/builds/info: + put: + summary: Persist application build Git metadata + description: | + Saves Git metadata (branch, commitDate, commitMessage, commitAuthor) for an application build. + The build is identified by groupId, appId, commitSha, and buildVersion. If a build with the given + identity does not exist, a new build is created. If it already exists, only the Git metadata fields + are updated while preserving the existing identity and instance data. + operationId: putBuildInfo + tags: + - data-ingest + security: + - apiKeyAuth: [ ] + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/BuildPayload' + application/protobuf: + schema: + $ref: '#/components/schemas/BuildPayload' + responses: + '200': + description: Build info saved + content: + application/json: + schema: + $ref: '#/components/schemas/MessageResponse' /api/data-ingest/instances: put: summary: Persist application instance metadata @@ -302,6 +341,13 @@ paths: /api/data-ingest/method-ignore-rules: post: summary: Save method ignore rule + description: | + Creates a new rule for excluding methods from coverage analysis and metrics computation. + Rules are matched by name pattern and/or class name pattern using glob-style matching. + + **Note:** Newly created rules are automatically applied during subsequent ETL cycles for new data only. + If you need the rule to take effect for already loaded data, trigger an ETL refresh from scratch + by calling `POST /api/metrics/refresh?reset=true`. operationId: postMethodIgnoreRules tags: - data-ingest @@ -325,6 +371,14 @@ paths: $ref: '#/components/schemas/MessageResponse' get: summary: Get all method ignore rules + description: | + Retrieves the complete list of method ignore rules configured in the system. + Each rule defines patterns (name and/or class name) used to exclude matching methods + from coverage analysis and metrics computation. + + **Note:** Changes to ignore rules (creation or deletion) are automatically applied during subsequent ETL cycles + for new data only. If you need updated rules to take effect for already loaded data, trigger an ETL refresh + from scratch by calling `POST /api/metrics/refresh?reset=true`. operationId: getAllMethodIgnoreRules tags: - data-ingest @@ -340,6 +394,11 @@ paths: /api/data-ingest/method-ignore-rules/{id}: delete: summary: Delete method ignore rule by ID + description: | + Removes a specific method ignore rule by its unique identifier. + + **Note:** After deleting a rule, previously excluded methods will only reappear in metrics after + an ETL refresh from scratch. Trigger it by calling `POST /api/metrics/refresh?reset=true`. operationId: deleteMethodIgnoreRuleById tags: - data-ingest @@ -349,6 +408,7 @@ paths: - name: id in: path required: true + description: Unique numeric identifier of the method ignore rule to delete. schema: type: integer responses: @@ -362,6 +422,9 @@ paths: /api/data-management/groups/{groupId}/apps/{appId}/builds/{buildId}: delete: summary: Delete build data with all associated data (coverage, methods, instances) + description: | + Permanently deletes a specific application build and all its associated data, including code coverage records, + method metadata, and instance deployments. Use this endpoint to clean up obsolete builds or free storage. operationId: deleteBuild tags: - data-management @@ -371,16 +434,19 @@ paths: - name: groupId in: path required: true + description: Unique identifier for the application group containing the build. schema: type: string - name: appId in: path required: true + description: Unique identifier for the application containing the build. schema: type: string - name: buildId in: path required: true + description: Unique identifier of the build to delete. This is the internal build ID assigned by the system. schema: type: string responses: @@ -393,6 +459,10 @@ paths: /api/data-management/groups/{groupId}/tests/sessions/{testSessionId}: delete: summary: Delete test session data with all associated data (coverage, test launches) + description: | + Permanently deletes a specific test session and all its associated data, including test launch records + and code coverage data collected during the session. Use this endpoint to clean up + invalid or obsolete test sessions. operationId: deleteTestSession tags: - data-management @@ -402,11 +472,13 @@ paths: - name: groupId in: path required: true + description: Unique identifier for the application group containing the test session. schema: type: string - name: testSessionId in: path required: true + description: Unique identifier of the test session to delete. schema: type: string responses: @@ -420,6 +492,8 @@ paths: /api/metrics/applications: get: summary: Get applications + description: | + Retrieves a list of all registered applications in Drill4J. operationId: getApplications tags: - metrics @@ -429,6 +503,7 @@ paths: - name: groupId in: query required: false + description: Optional filter to retrieve applications belonging to a specific group only. schema: type: string responses: @@ -441,6 +516,8 @@ paths: /api/metrics/builds: get: summary: Get builds + description: | + Retrieves a paginated list of application builds for a specific application. operationId: getBuilds tags: - metrics @@ -450,27 +527,33 @@ paths: - name: groupId in: query required: true + description: Unique identifier for the application group. schema: type: string - name: appId in: query required: true + description: Unique identifier for the application. schema: type: string - name: branch in: query + description: Optional Git branch name filter. When specified, only builds from this branch are returned. schema: type: string - name: envId in: query + description: Optional environment identifier filter. When specified, only builds deployed in this environment are returned. schema: type: string - name: page in: query + description: Page number for pagination (1-based). Defaults to 1 if not specified. schema: type: integer - name: pageSize in: query + description: Number of items per page. Defaults to a system-defined value if not specified. schema: type: integer responses: @@ -483,6 +566,12 @@ paths: /api/metrics/build-diff-report: get: summary: Get build diff report + description: | + Generates a comprehensive diff report comparing a target build against a baseline build. The report includes + summary statistics on code changes (new, modified, deleted methods), overall and per-change-type coverage percentages, + risk assessment based on uncovered changes. The target build is identified by + instanceId, commitSha, or buildVersion; the baseline build is identified by baselineInstanceId, baselineCommitSha, + or baselineBuildVersion. operationId: getBuildDiffReport tags: - metrics @@ -492,39 +581,48 @@ paths: - name: groupId in: query required: true + description: Unique identifier for the application group. schema: type: string - name: appId in: query required: true + description: Unique identifier for the application. schema: type: string - name: instanceId in: query + description: Instance ID of the target build deployment. schema: type: string - name: commitSha in: query + description: Git commit SHA of the target build. schema: type: string - name: buildVersion in: query + description: Build version of the target build. schema: type: string - name: baselineInstanceId in: query + description: Instance ID of the baseline build deployment. schema: type: string - name: baselineCommitSha in: query + description: Git commit SHA of the baseline build. schema: type: string - name: baselineBuildVersion in: query + description: Build version of the baseline build. schema: type: string - name: coverageThreshold in: query + description: Minimum required coverage percentage (0.0–1.0) for the diff report quality gate. schema: type: number format: float @@ -539,6 +637,14 @@ paths: /api/metrics/recommended-tests: get: summary: Get recommended tests + description: | + Analyzes code changes between a target build and a baseline build and returns a list of tests recommended + for execution based on impact analysis. The endpoint identifies which methods have changed and finds tests + that previously covered those methods, recommending them for re-execution to validate the changes. + When testsToSkip is true, the endpoint inverts the result - returning tests that can be safely skipped + because they do not cover any changed code. The target build is identified by + targetInstanceId, targetCommitSha, or targetBuildVersion. The baseline build is identified by + baselineInstanceId, baselineCommitSha, baselineBuildVersion, or baselineBuildBranches. operationId: getRecommendedTests tags: - metrics @@ -548,55 +654,66 @@ paths: - name: groupId in: query required: true + description: Unique identifier for the application group. schema: type: string - name: appId in: query required: true + description: Unique identifier for the application. schema: type: string - name: testsToSkip in: query + description: When true, returns tests that can be safely skipped (not impacted by changes). When false (default), returns tests that should be executed because they cover changed code. schema: type: boolean default: false - name: testTaskId in: query + description: Optional test task ID filter. When specified, only considers test launches associated with this task ID for recommendations. schema: type: string - name: targetInstanceId in: query + description: Instance ID of the target build. One of targetInstanceId, targetCommitSha, or targetBuildVersion should be provided. schema: type: string - name: targetCommitSha in: query + description: Git commit SHA of the target build. One of targetInstanceId, targetCommitSha, or targetBuildVersion should be provided. schema: type: string - name: targetBuildVersion in: query + description: Build version of the target build. One of targetInstanceId, targetCommitSha, or targetBuildVersion should be provided. schema: type: string - name: baselineInstanceId in: query + description: Instance ID of the baseline build. If not provided, the system automatically selects the previous build as baseline. schema: type: string - name: baselineCommitSha in: query + description: Git commit SHA of the baseline build. If not provided, the system automatically selects the previous build as baseline. schema: type: string - name: baselineBuildVersion in: query + description: Build version of the baseline build. If not provided, the system automatically selects the previous build as baseline. schema: type: string - name: baselineBuildBranches in: query - description: List of baseline build branches to consider. + description: List of Git branch names to consider when selecting the baseline build. Useful when the baseline should be chosen from specific branches (e.g., main, develop). schema: type: array items: type: string - name: coveragePeriodDays in: query + description: Number of days to look back for coverage data when determining test recommendations. Limits the scope of historical coverage data used for analysis. schema: type: integer responses: @@ -609,6 +726,9 @@ paths: /api/metrics/coverage-treemap: get: summary: Get coverage treemap + description: | + Retrieves a hierarchical treemap representation of code coverage data for a specific build. The treemap + organizes methods into a package/class hierarchy, where each node contains aggregated coverage statistics. operationId: getCoverageTreemap tags: - metrics @@ -618,30 +738,37 @@ paths: - name: buildId in: query required: true + description: Internal build identifier for which to retrieve coverage treemap data. schema: type: string - name: testTag in: query + description: Optional test tag filter. When specified, only coverage data from tests with this tag is included. schema: type: string - name: envId in: query + description: Optional environment identifier filter. When specified, only coverage data from this environment is included. schema: type: string - name: branch in: query + description: Optional Git branch filter. When specified, only coverage data from this branch is included. schema: type: string - name: packageNamePattern in: query + description: Optional package name pattern filter for narrowing results to specific packages. schema: type: string - name: classNamePattern in: query + description: Optional class name pattern filter for narrowing results to specific classes. schema: type: string - name: rootId in: query + description: Optional root node identifier to retrieve a specific subtree of the treemap hierarchy. Used for drill-down navigation. schema: type: string - name: testSessionId @@ -664,6 +791,9 @@ paths: /api/metrics/changes-coverage-treemap: get: summary: Get changes coverage treemap + description: | + Retrieves a hierarchical treemap representation of code coverage focused only on changed methods + between a target build and a baseline build. operationId: getChangesCoverageTreemap tags: - metrics @@ -673,51 +803,63 @@ paths: - name: buildId in: query required: true + description: Internal build identifier of the target (newer) build. schema: type: string - name: baselineBuildId in: query required: true + description: Internal build identifier of the baseline (older) build to compare against. schema: type: string - name: testTag in: query + description: Optional test tag filter. When specified, only coverage data from tests with this tag is included. schema: type: string - name: envId in: query + description: Optional environment identifier filter. When specified, only coverage data from this environment is included. schema: type: string - name: branch in: query + description: Optional Git branch filter. When specified, only coverage data from this branch is included. schema: type: string - name: packageNamePattern in: query + description: Optional package name pattern filter for narrowing results to specific packages. schema: type: string - name: classNamePattern in: query + description: Optional class name pattern filter for narrowing results to specific classes. schema: type: string - name: rootId in: query + description: Optional root node identifier to retrieve a specific subtree of the treemap hierarchy. schema: type: string - name: page in: query + description: Page number for pagination (1-based). Defaults to 1 if not specified. schema: type: integer - name: pageSize in: query + description: Number of items per page. Defaults to a system-defined value if not specified. schema: type: integer - name: includeDeleted in: query + description: Whether to include methods that were deleted in the target build (present in baseline but absent in target). Defaults to false. schema: type: boolean - name: includeEqual in: query + description: Whether to include methods that are unchanged between builds. Defaults to false. schema: type: boolean responses: @@ -730,6 +872,10 @@ paths: /api/metrics/changes: get: summary: Get method changes between builds + description: | + Retrieves a paginated list of method-level changes between a target build and a baseline build. + The target build is identified by instanceId, commitSha, or buildVersion; the baseline build is identified + by baselineInstanceId, baselineCommitSha, or baselineBuildVersion. operationId: getMethodChanges tags: - metrics @@ -739,51 +885,63 @@ paths: - name: groupId in: query required: true + description: Unique identifier for the application group. schema: type: string - name: appId in: query required: true + description: Unique identifier for the application. schema: type: string - name: instanceId in: query + description: Instance ID of the target build deployment. schema: type: string - name: commitSha in: query + description: Git commit SHA of the target build. schema: type: string - name: buildVersion in: query + description: Build version of the target build. schema: type: string - name: baselineInstanceId in: query + description: Instance ID of the baseline build deployment. schema: type: string - name: baselineCommitSha in: query + description: Git commit SHA of the baseline build. schema: type: string - name: baselineBuildVersion in: query + description: Build version of the baseline build. schema: type: string - name: includeDeleted in: query + description: Whether to include methods that were deleted in the target build (present in baseline but absent in target). Defaults to false. schema: type: boolean - name: includeEqual in: query + description: Whether to include methods that are unchanged between builds. Defaults to false. schema: type: boolean - name: page in: query + description: Page number for pagination (1-based). Defaults to 1 if not specified. schema: type: integer - name: pageSize in: query + description: Number of items per page. Defaults to a system-defined value if not specified. schema: type: integer responses: @@ -796,6 +954,8 @@ paths: /api/metrics/coverage: get: summary: Get coverage by methods for a build + description: | + Retrieves a paginated list of methods with their code coverage data for a specific build. operationId: getCoverageByMethods tags: - metrics @@ -805,34 +965,42 @@ paths: - name: buildId in: query required: true + description: Internal build identifier for which to retrieve method-level coverage data. schema: type: string - name: testTag in: query + description: Optional test tag filter. When specified, only coverage data from tests with this tag is included. schema: type: string - name: envId in: query + description: Optional environment identifier filter. When specified, only coverage data from this environment is included. schema: type: string - name: branch in: query + description: Optional Git branch filter. When specified, only coverage data from this branch is included. schema: type: string - name: packageNamePattern in: query + description: Optional package name pattern filter for narrowing results to specific packages. schema: type: string - name: classNamePattern in: query + description: Optional class name pattern filter for narrowing results to specific classes. schema: type: string - name: page in: query + description: Page number for pagination (1-based). Defaults to 1 if not specified. schema: type: integer - name: pageSize in: query + description: Number of items per page. Defaults to a system-defined value if not specified. schema: type: integer responses: @@ -845,6 +1013,9 @@ paths: /api/metrics/impacted-tests: get: summary: Get impacted tests + description: | + Retrieves a paginated list of tests that are impacted by code changes between a target build and a baseline build. + Impact analysis identifies which tests previously covered methods that have been modified, added, or deleted. operationId: getImpactedTests tags: - metrics @@ -854,68 +1025,92 @@ paths: - name: groupId in: query required: true + description: Unique identifier for the application group. schema: type: string - name: appId in: query required: true + description: Unique identifier for the application. schema: type: string - name: instanceId in: query + description: Instance ID of the target build deployment. schema: type: string - name: commitSha in: query + description: Git commit SHA of the target build. schema: type: string - name: buildVersion in: query + description: Build version of the target build. schema: type: string - name: baselineInstanceId in: query + description: Instance ID of the baseline build deployment. schema: type: string - name: baselineCommitSha in: query + description: Git commit SHA of the baseline build. schema: type: string - name: baselineBuildVersion in: query + description: Build version of the baseline build. schema: type: string - name: packageName in: query + description: Optional filter to only consider methods in this package when determining impacted tests. schema: type: string - name: className in: query + description: Optional filter to only consider methods in this class when determining impacted tests. schema: type: string - name: methodName in: query + description: Optional filter to only consider methods with this name when determining impacted tests. schema: type: string + - name: excludeMethodSignatures + in: query + description: List of method signatures to exclude from impact analysis. Tests covering only excluded methods will not be returned. Signature format is "className:methodName:params:returnType". + schema: + type: array + items: + type: string + style: form + explode: true - name: testTaskId in: query + description: Optional filter by test task ID. When specified, only tests from launches associated with this task ID are considered. schema: type: string - name: testTag in: query + description: Optional filter by test tag. When specified, only tests with this tag are included. schema: type: string - name: testPath in: query + description: Optional filter by test source path. When specified, only tests at this path are included. schema: type: string - name: testName in: query + description: Optional filter by test name. When specified, only tests with this name are included. schema: type: string - name: coverageBranches in: query - description: List of coverage branches to consider. + description: List of Git branch names to consider when selecting coverage data for impact analysis. schema: type: array items: @@ -924,16 +1119,7 @@ paths: explode: true - name: coverageAppEnvIds in: query - description: List of coverage app env IDs to consider. - schema: - type: array - items: - type: string - style: form - explode: true - - name: excludeMethodSignatures - in: query - description: List of method signatures to exclude from impact analysis. Tests covering only excluded methods will not be returned. Signature format is "className:methodName:params:returnType" (e.g., "com.example.Class:method1:():void"). + description: List of application environment IDs to consider when selecting coverage data for impact analysis. schema: type: array items: @@ -960,10 +1146,12 @@ paths: - DESC - name: page in: query + description: Page number for pagination (1-based). Defaults to 1 if not specified. schema: type: integer - name: pageSize in: query + description: Number of items per page. Defaults to a system-defined value if not specified. schema: type: integer responses: @@ -975,6 +1163,10 @@ paths: $ref: '#/components/schemas/ListDataResponse' post: summary: Get impacted tests (POST) + description: | + POST variant of the impacted tests endpoint. Accepts the same parameters as the GET endpoint but via a JSON + request body instead of query parameters. Useful when the number of filter parameters is large or contains + complex values (e.g., lists of method signatures to exclude) that are impractical to pass as query parameters. operationId: postImpactedTests tags: - metrics @@ -996,6 +1188,10 @@ paths: /api/metrics/impacted-methods: get: summary: Get impacted methods + description: | + Retrieves a paginated list of methods that have been covered by tests and changed between a target build and a baseline build. + While impacted-tests shows which tests are affected by changes, impacted-methods shows + which changed methods are covered (or uncovered) by existing tests. operationId: getImpactedMethods tags: - metrics @@ -1005,83 +1201,80 @@ paths: - name: groupId in: query required: true + description: Unique identifier for the application group. schema: type: string - name: appId in: query required: true + description: Unique identifier for the application. schema: type: string - name: instanceId in: query + description: Instance ID of the target build deployment. schema: type: string - name: commitSha in: query + description: Git commit SHA of the target build. schema: type: string - name: buildVersion in: query + description: Build version of the target build. schema: type: string - name: baselineInstanceId in: query + description: Instance ID of the baseline build deployment. schema: type: string - name: baselineCommitSha in: query + description: Git commit SHA of the baseline build. schema: type: string - name: baselineBuildVersion in: query + description: Build version of the baseline build schema: type: string - name: packageName in: query + description: Optional filter by package name to narrow results to methods in a specific package. schema: type: string - name: className in: query + description: Optional filter by class name to narrow results to methods in a specific class. schema: type: string - name: methodName in: query + description: Optional filter by method name to narrow results to a specific method. schema: type: string - name: testTaskId in: query + description: Optional filter by test task ID. When specified, only test launches from this task ID are considered for impact counting. schema: type: string - name: testTag in: query + description: Optional filter by test tag. When specified, only tests with this tag are considered for impact counting. schema: type: string - name: testPath in: query + description: Optional filter by test source path. When specified, only tests at this path are considered for impact counting. schema: type: string - name: testName in: query + description: Optional filter by test name. When specified, only tests with this name are considered for impact counting. schema: type: string - - name: coverageBranches - in: query - description: List of coverage branches to consider. - schema: - type: array - items: - type: string - style: form - explode: true - - name: coverageAppEnvIds - in: query - description: List of coverage app env IDs to consider. - schema: - type: array - items: - type: string - style: form - explode: true - name: sortBy in: query description: Field name to sort results by. Supported values are signature, className, name, impactedTests. @@ -1102,10 +1295,12 @@ paths: - DESC - name: page in: query + description: Page number for pagination (1-based). Defaults to 1 if not specified. schema: type: integer - name: pageSize in: query + description: Number of items per page. Defaults to a system-defined value if not specified. schema: type: integer responses: @@ -1117,6 +1312,10 @@ paths: $ref: '#/components/schemas/ListDataResponse' post: summary: Get impacted methods (POST) + description: | + POST variant of the impacted methods endpoint. Accepts the same parameters as the GET endpoint but via a JSON + request body instead of query parameters. Useful when the number of filter parameters is large or contains + complex values that are impractical to pass as query parameters. operationId: postImpactedMethods tags: - metrics @@ -1138,6 +1337,8 @@ paths: /api/metrics/refresh: post: summary: Refresh metrics + description: | + Triggers an immediate ETL (Extract-Transform-Load) job to refresh computed metrics data. operationId: refreshMetrics tags: - metrics @@ -1166,6 +1367,10 @@ paths: /api/metrics/refresh-status: get: summary: Get refresh status + description: | + Retrieves the current status of the metrics ETL (Extract-Transform-Load) refresh process for a specific + application group. Returns information about whether a refresh is currently in progress, the last + successful refresh timestamp, and any error details if the last refresh failed. operationId: metricsRefreshStatus tags: - metrics @@ -1175,6 +1380,7 @@ paths: - name: groupId in: query required: true + description: Unique identifier for the application group whose refresh status is being queried. schema: type: string responses: @@ -1188,6 +1394,8 @@ paths: /api/sign-in: post: summary: Sign in + description: | + Authenticates a user with username and password credentials. On successful authentication, returns a JWT token. operationId: signIn tags: - auth @@ -1207,6 +1415,9 @@ paths: /api/sign-up: post: summary: Sign up + description: | + Registers a new user account in the system. The newly created account requires administrator approval + before it becomes active. operationId: signUp tags: - auth @@ -1226,6 +1437,8 @@ paths: /api/sign-out: post: summary: Sign out + description: | + Signs the current user out by clearing the JWT authentication cookie. operationId: signOut tags: - auth @@ -1239,6 +1452,9 @@ paths: /api/user-info: get: summary: Get user info + description: | + Retrieves profile information for the currently authenticated user, including username, role, and + registration status. operationId: getUserInfo tags: - auth @@ -1254,6 +1470,9 @@ paths: /api/update-password: post: summary: Update password + description: | + Changes the password for the currently authenticated user. The old password must be provided for verification + before the new password is set. operationId: updatePassword tags: - auth @@ -1276,6 +1495,8 @@ paths: /api/users: get: summary: Get users + description: | + Retrieves a list of all registered users in the system. operationId: getAllUsers tags: - auth @@ -1293,6 +1514,8 @@ paths: /api/users/{userId}: get: summary: Get user by ID + description: | + Retrieves detailed information for a specific user by their unique numeric identifier. operationId: getUserById tags: - auth @@ -1302,6 +1525,7 @@ paths: - name: userId in: path required: true + description: Unique numeric identifier of the user to retrieve. schema: type: integer responses: @@ -1313,6 +1537,8 @@ paths: $ref: '#/components/schemas/DataResponse' put: summary: Edit user + description: | + Updates the role of a specific user. operationId: editUserById tags: - auth @@ -1322,6 +1548,7 @@ paths: - name: userId in: path required: true + description: Unique numeric identifier of the user to edit. schema: type: integer requestBody: @@ -1339,6 +1566,8 @@ paths: $ref: '#/components/schemas/DataResponse' delete: summary: Delete user + description: | + Permanently deletes a specific user account from the system. operationId: deleteUserById tags: - auth @@ -1348,6 +1577,7 @@ paths: - name: userId in: path required: true + description: Unique numeric identifier of the user to delete. schema: type: integer responses: @@ -1360,6 +1590,8 @@ paths: /api/users/{userId}/block: patch: summary: Block user + description: | + Blocks a specific user account, preventing them from authenticating and accessing the system. operationId: blockUserById tags: - auth @@ -1369,6 +1601,7 @@ paths: - name: userId in: path required: true + description: Unique numeric identifier of the user to block. schema: type: integer responses: @@ -1381,6 +1614,8 @@ paths: /api/users/{userId}/unblock: patch: summary: Unblock user + description: | + Unblocks a previously blocked user account, restoring their ability to authenticate and access the system. operationId: unblockUserById tags: - auth @@ -1390,6 +1625,7 @@ paths: - name: userId in: path required: true + description: Unique numeric identifier of the user to unblock. schema: type: integer responses: @@ -1402,6 +1638,8 @@ paths: /api/users/{userId}/reset-password: patch: summary: Reset user password + description: | + Resets the password for a specific user to a new auto-generated password. operationId: resetUserPasswordById tags: - auth @@ -1411,6 +1649,7 @@ paths: - name: userId in: path required: true + description: Unique numeric identifier of the user whose password is being reset. schema: type: integer responses: @@ -1424,6 +1663,8 @@ paths: /api/user-keys: get: summary: Get user API keys + description: | + Retrieves all API keys owned by the currently authenticated user. operationId: getUserApiKeys tags: - auth @@ -1440,6 +1681,9 @@ paths: $ref: '#/components/schemas/ListDataResponse' post: summary: Generate user API key + description: | + Generates a new API key for the currently authenticated user. The API key value is returned only once in the response - it cannot be + retrieved again after creation, so it must be stored securely by the client. operationId: generateUserApiKey tags: - auth @@ -1461,6 +1705,8 @@ paths: /api/user-keys/{id}: delete: summary: Delete user API key + description: | + Deletes a specific API key owned by the currently authenticated user. operationId: deleteUserApiKeyById tags: - auth @@ -1470,6 +1716,7 @@ paths: - name: id in: path required: true + description: Unique numeric identifier of the API key to delete. schema: type: integer responses: @@ -1483,6 +1730,8 @@ paths: /api/api-keys: get: summary: Get API keys + description: | + Retrieves a list of all API keys across all users in the system. operationId: getApiKeys tags: - auth @@ -1499,6 +1748,8 @@ paths: $ref: '#/components/schemas/ListDataResponse' post: summary: Generate API key + description: | + Generates a new API key on behalf of any user. operationId: generateApiKey tags: - auth @@ -1520,6 +1771,8 @@ paths: /api/api-keys/{id}: delete: summary: Delete API key + description: | + Deletes any API key by its ID. operationId: deleteApiKeyById tags: - auth @@ -1529,6 +1782,7 @@ paths: - name: id in: path required: true + description: Unique numeric identifier of the API key to delete. schema: type: integer responses: @@ -1908,7 +2162,6 @@ components: description: Optional array of tags for categorizing and filtering tests (e.g., ['slow', 'database', 'critical'], ['@tag.one', '@tag.two']). Used for test selection and organization. items: type: string - default: [ ] metadata: type: object description: Optional json object for storing custom test metadata. Framework or project-specific information. @@ -2003,130 +2256,194 @@ components: description: Application build version identifier. Either commitSha or buildVersion must be provided to identify a build. MethodIgnoreRulePayload: type: object + description: | + Rule definition for excluding methods from coverage analysis and metrics computation. properties: groupId: type: string + description: Unique identifier for the application group this rule applies to. appId: type: string + description: Unique identifier for the application this rule applies to. namePattern: type: string nullable: true + description: Glob-style pattern for matching method names to exclude (e.g., 'get*', 'set*', 'toString'). Methods with names matching this pattern will be excluded from analysis. classnamePattern: type: string nullable: true + description: Glob-style pattern for matching fully qualified class names to exclude (e.g., 'com.example.generated.*'). All methods in classes matching this pattern will be excluded. # Auth Schemas LoginPayload: type: object + description: | + User authentication credentials for signing in to the system. + required: + - username + - password properties: username: type: string + description: The username of the account to authenticate. password: type: string + description: The password for the account. SignUpPayload: type: object + description: | + New user registration payload. Creates a user account that requires administrator approval before activation. + required: + - username + - password properties: username: type: string + description: Desired username for the new account. Must be unique in the system. password: type: string + description: Password for the new account. email: type: string + description: Optional email address for the new user. UpdatePasswordPayload: type: object + description: | + Password change request payload. The old password is verified before the new password is set. + required: + - oldPassword + - newPassword properties: oldPassword: type: string + description: The current password of the user. Must match the existing password for verification. newPassword: type: string + description: The new password to set for the user. EditUserPayload: type: object + description: | + Payload for editing user properties. Currently supports changing the user's role. + required: + - role properties: username: type: string + description: Updated username (currently not used for modification). email: type: string + description: Updated email address (currently not used for modification). role: type: string + description: New role for the user. Supported values are 'ADMIN' and 'USER'. + enum: + - ADMIN + - USER GenerateApiKeyPayload: type: object + description: | + Payload for generating a new API key. + required: + - name properties: name: type: string + description: Human-readable description or name for the API key (e.g., 'CI/CD pipeline key', 'Agent key for staging'). # Settings Schemas GroupSettingsPayload: type: object + description: | + Configuration settings for an application group. Controls data lifecycle policies and metrics computation scope. properties: retentionPeriodDays: type: integer nullable: true + description: Number of days to retain raw ingested data (builds, coverage, methods, test sessions) before automatic cleanup. Set to null to use the system default. metricsPeriodDays: type: integer nullable: true + description: Number of days of historical data to include when computing metrics and coverage aggregations. Set to null to use the system default. # Metrics Request Schemas ImpactedTestsRequest: type: object + description: | + Request body for the POST variant of the impacted tests endpoint. required: - groupId - appId properties: groupId: type: string + description: Unique identifier for the application group. appId: type: string + description: Unique identifier for the application. instanceId: type: string nullable: true + description: Instance ID of the target build. One of instanceId, commitSha, or buildVersion should be provided. commitSha: type: string nullable: true + description: Git commit SHA of the target build. One of instanceId, commitSha, or buildVersion should be provided. buildVersion: type: string nullable: true + description: Build version of the target build. One of instanceId, commitSha, or buildVersion should be provided. baselineInstanceId: type: string nullable: true + description: Instance ID of the baseline build. If not provided, the system automatically selects the previous build. baselineCommitSha: type: string nullable: true + description: Git commit SHA of the baseline build. If not provided, the system automatically selects the previous build. baselineBuildVersion: type: string nullable: true + description: Build version of the baseline build. If not provided, the system automatically selects the previous build. packageName: type: string nullable: true + description: Optional filter to only consider methods in this package when determining impacted tests. className: type: string nullable: true + description: Optional filter to only consider methods in this class when determining impacted tests. methodName: type: string nullable: true + description: Optional filter to only consider methods with this name when determining impacted tests. excludeMethodSignatures: type: array items: type: string - description: List of method signatures to exclude from impact analysis. Signature format is "className:methodName:params:returnType". + description: List of method signatures to exclude from impact analysis. Tests covering only excluded methods will not be returned. Signature format is "className:methodName:params:returnType". testTaskId: type: string nullable: true + description: Optional filter by test task ID. When specified, only tests from launches associated with this task ID are considered. testTag: type: string nullable: true + description: Optional filter by test tag. When specified, only tests with this tag are included. testPath: type: string nullable: true + description: Optional filter by test source path. When specified, only tests at this path are included. testName: type: string nullable: true + description: Optional filter by test name. When specified, only tests with this name are included. coverageBranches: type: array items: type: string - description: List of coverage branches to consider. + description: List of Git branch names to consider when selecting coverage data for impact analysis. coverageAppEnvIds: type: array items: type: string - description: List of coverage app env IDs to consider. + description: List of application environment IDs to consider when selecting coverage data for impact analysis. sortBy: type: string enum: @@ -2151,63 +2468,70 @@ components: nullable: true ImpactedMethodsRequest: type: object + description: | + Request body for the POST variant of the impacted methods endpoint. required: - groupId - appId properties: groupId: type: string + description: Unique identifier for the application group. appId: type: string + description: Unique identifier for the application. instanceId: type: string nullable: true + description: Instance ID of the target build. One of instanceId, commitSha, or buildVersion should be provided. commitSha: type: string nullable: true + description: Git commit SHA of the target build. One of instanceId, commitSha, or buildVersion should be provided. buildVersion: type: string nullable: true + description: Build version of the target build. One of instanceId, commitSha, or buildVersion should be provided. baselineInstanceId: type: string nullable: true + description: Instance ID of the baseline build. If not provided, the system automatically selects the previous build. baselineCommitSha: type: string nullable: true + description: Git commit SHA of the baseline build. If not provided, the system automatically selects the previous build. baselineBuildVersion: type: string nullable: true + description: Build version of the baseline build. If not provided, the system automatically selects the previous build. packageName: type: string nullable: true + description: Optional filter by package name to narrow results to methods in a specific package. className: type: string nullable: true + description: Optional filter by class name to narrow results to methods in a specific class. methodName: type: string nullable: true + description: Optional filter by method name to narrow results to a specific method. testTaskId: type: string nullable: true + description: Optional filter by test task ID. When specified, only test launches from this task ID are considered for impact counting. testTag: type: string nullable: true + description: Optional filter by test tag. When specified, only tests with this tag are considered for impact counting. testPath: type: string nullable: true + description: Optional filter by test source path. When specified, only tests at this path are considered for impact counting. testName: type: string nullable: true - coverageBranches: - type: array - items: - type: string - description: List of coverage branches to consider. - coverageAppEnvIds: - type: array - items: - type: string - description: List of coverage app env IDs to consider. + description: Optional filter by test name. When specified, only tests with this name are considered for impact counting. sortBy: type: string enum: diff --git a/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/BatchDataLoader.kt b/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/BatchDataLoader.kt index a946ece4d..3ee84ab87 100644 --- a/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/BatchDataLoader.kt +++ b/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/BatchDataLoader.kt @@ -125,7 +125,8 @@ abstract class BatchDataLoader( if (previousTimestamp != null && currentTimestamp != previousTimestamp && buffer.isEmpty() && skippedRowsForUpdate >= batchSize) { onLoadingProgress( EtlLoadingResult( - lastProcessedAt = previousTimestamp ?: throw IllegalStateException("Previous timestamp is null"), + lastProcessedAt = previousTimestamp + ?: throw IllegalStateException("Previous timestamp is null"), processedRows = 0, ) ) @@ -154,7 +155,8 @@ abstract class BatchDataLoader( // Commit any remaining rows in the buffer result += flushBuffer(groupId, buffer, batchNo) { batch -> if (batch.success) { - lastLoadedTimestamp = untilTimestamp + lastLoadedTimestamp = previousTimestamp + ?: throw IllegalStateException("Previous timestamp is null") } EtlLoadingResult( errorMessage = if (!batch.success) batch.errorMessage else null, @@ -166,11 +168,13 @@ abstract class BatchDataLoader( } } } else { - // Update last processed timestamp even if no rows were left in the buffer - result += EtlLoadingResult( - lastProcessedAt = untilTimestamp - ).also { - onLoadingProgress(it) + // Update last processed timestamp even if no rows were loaded + if (lastLoadedTimestamp == sinceTimestamp) { + result += EtlLoadingResult( + lastProcessedAt = untilTimestamp + ).also { + onLoadingProgress(it) + } } } onStatusChanged(EtlStatus.SUCCESS) diff --git a/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/EtlOrchestratorImpl.kt b/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/EtlOrchestratorImpl.kt index a1a2df82e..c6b865fae 100644 --- a/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/EtlOrchestratorImpl.kt +++ b/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/EtlOrchestratorImpl.kt @@ -43,12 +43,14 @@ open class EtlOrchestratorImpl( override val name: String, open val pipelines: List>, open val metadataRepository: EtlMetadataRepository, + open val consistencyWindow: Long = 0, + open val processingDelay: Long = 0, ) : EtlOrchestrator { private val logger = KotlinLogging.logger {} override suspend fun run(groupId: String, initTimestamp: Instant): List = withContext(Dispatchers.IO) { - logger.info("ETL [$name] for group [$groupId] is starting with init timestamp $initTimestamp...") + logger.info("ETL [$name] for group [$groupId] is starting...") val results = Collections.synchronizedList(mutableListOf()) val duration = measureTimeMillis { trackProgressOf { @@ -97,11 +99,18 @@ open class EtlOrchestratorImpl( pipeline: EtlPipeline<*, *>, initTimestamp: Instant ): EtlProcessingResult = coroutineScope { - val snapshotTime = Instant.now() + val snapshotTime = Instant.now().minusSeconds(processingDelay) val metadata = metadataRepository.getAllMetadataByExtractor(groupId, pipeline.name, pipeline.extractor.name) .associateBy { it.loaderName } val loaderNames = pipeline.loaders.map { it.second.name }.toSet() - val timestampPerLoader = loaderNames.associateWith { (metadata[it]?.lastProcessedAt ?: initTimestamp) } + val timestampPerLoader = loaderNames.associateWith { + val lastProcessedAt = metadata[it]?.lastProcessedAt + if (lastProcessedAt != null) { + lastProcessedAt.minusSeconds(consistencyWindow) + } else { + initTimestamp + } + } try { for (loader in loaderNames) { diff --git a/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/PageDataExtractor.kt b/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/PageDataExtractor.kt index ba0372c0f..2d7c7311b 100644 --- a/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/PageDataExtractor.kt +++ b/admin-etl/src/main/kotlin/com/epam/drill/admin/etl/impl/PageDataExtractor.kt @@ -51,7 +51,7 @@ abstract class PageDataExtractor( try { while (hasMore && currentSince < untilTimestamp) { page.incrementAndGet() - logger.debug { "ETL extractor [$name] for group [$groupId] is executing query for page ${page.get()} since $currentSince ..." } + logger.debug { "ETL extractor [$name] for group [$groupId] is executing query for page ${page.get()} since $currentSince until $untilTimestamp ..." } var previousTimestamp: Instant? = null var previousEmittedTimestamp: Instant? = null @@ -95,10 +95,13 @@ abstract class PageDataExtractor( if (pageRows == 0L || pageRows < extractionLimit) { hasMore = false emitBuffer(buffer, emitter) - logger.debug { "ETL extractor [$name] for group [$groupId] completed fetching" + - ", rows fetched: ${rowsFetched.get()}" + - ", total pages: ${page.get()}" + - ", last extracted at $currentSince" } + previousEmittedTimestamp = previousTimestamp + logger.debug { + "ETL extractor [$name] for group [$groupId] completed fetching" + + ", rows fetched: ${rowsFetched.get()}" + + ", total pages: ${page.get()}" + + (if (previousEmittedTimestamp != null) ", last extracted at $previousEmittedTimestamp" else "") + } } else { currentSince = previousEmittedTimestamp ?: throw IllegalStateException( "No rows were emitted on page $page because all fetched records had the same timestamp. Please increase the extraction limit. Current is $extractionLimit." @@ -106,7 +109,8 @@ abstract class PageDataExtractor( // Remove rows from buffer that have timestamp greater than currentSince to avoid re-emission on the next page buffer.removeIf { it.timestamp > currentSince } hasMore = true - logger.debug { "ETL extractor [$name] for group [$groupId] fetched $pageRows rows on page ${page.get()}, last extracted at $currentSince" } + logger.debug { "ETL extractor [$name] for group [$groupId] fetched $pageRows rows on page ${page.get()}" + + ", last extracted at $previousEmittedTimestamp" } } } } catch (e: Exception) { diff --git a/admin-etl/src/test/kotlin/com/epam/drill/admin/etl/ETLSimpleTest.kt b/admin-etl/src/test/kotlin/com/epam/drill/admin/etl/ETLSimpleTest.kt index 86b5dd53f..a7136cc8b 100644 --- a/admin-etl/src/test/kotlin/com/epam/drill/admin/etl/ETLSimpleTest.kt +++ b/admin-etl/src/test/kotlin/com/epam/drill/admin/etl/ETLSimpleTest.kt @@ -304,4 +304,36 @@ class ETLSimpleTest { assertTrue(result2.first().status == EtlStatus.SUCCESS) assertEquals(3, result2.first().rowsProcessed) } + + @Test + fun `given consistencyWindow, ETL orchestrator should re-process records within the lookback window`() = runBlocking { + val groupId = "test-group" + val orchestrator = EtlOrchestratorImpl( + name = "lookback-etl", + pipelines = listOf( + EtlPipelineImpl.singleLoader( + "simple-pipeline", + extractor = SimpleExtractor(), + loader = SimpleLoader() + ) + ), + metadataRepository = SimpleMetadataRepository(), + consistencyWindow = 60 + ) + + // Add initial data + addNewRecords(5) + // First run ETL — should process all initial data + val result1 = orchestrator.run(groupId) + assertTrue(result1.first().status == EtlStatus.SUCCESS) + assertEquals(5, result1.first().rowsProcessed) + + // Add new data after last processed timestamp + addNewRecords(3) + // Second run ETL — lookback of 60s should re-process all 8 records (5 original + 3 new) + // because all records were created within the last 60 seconds + val result2 = orchestrator.run(groupId) + assertTrue(result2.first().status == EtlStatus.SUCCESS) + assertEquals(8, result2.first().rowsProcessed) + } } diff --git a/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/config/EtlConfig.kt b/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/config/EtlConfig.kt index 2d7e5ac3f..f3f563f4c 100644 --- a/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/config/EtlConfig.kt +++ b/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/config/EtlConfig.kt @@ -56,4 +56,19 @@ class EtlConfig(private val config: ApplicationConfig) { */ val loggingFrequency : Int get() = config.propertyOrNull("loggingFrequency")?.getString()?.toIntOrNull() ?: 10 + + /** + * Number of seconds to subtract from the last processed timestamp when starting an ETL run. + * This allows re-processing records that arrived in the source database with a delay + * or were inconsistent at the time of the previous ETL run. + */ + val consistencyWindow : Long + get() = config.propertyOrNull("consistencyWindow")?.getString()?.toLongOrNull() ?: 0L + + /** + * Number of seconds to subtract from the current time when calculating the upper bound of the ETL processing window. + * This delays ETL process to allow current transactions in the data source to complete. + */ + val processingDelay : Long + get() = config.propertyOrNull("processingDelay")?.getString()?.toLongOrNull() ?: 0L } \ No newline at end of file diff --git a/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/config/MetricsModule.kt b/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/config/MetricsModule.kt index 3a5d73f8b..6d5d29fec 100644 --- a/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/config/MetricsModule.kt +++ b/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/config/MetricsModule.kt @@ -26,6 +26,7 @@ import com.epam.drill.admin.metrics.etl.methodsPipeline import com.epam.drill.admin.metrics.etl.buildsPipeline import com.epam.drill.admin.metrics.etl.coveragePipeline import com.epam.drill.admin.metrics.etl.testDefinitionsPipeline +import com.epam.drill.admin.metrics.etl.testLaunchCoveragePipeline import com.epam.drill.admin.metrics.etl.testLaunchesPipeline import com.epam.drill.admin.metrics.etl.testSessionBuildsPipeline import com.epam.drill.admin.metrics.etl.testSessionsPipeline @@ -63,9 +64,11 @@ val metricsDIModule pipelines = listOf( buildsPipeline, methodsPipeline, testLaunchesPipeline, testDefinitionsPipeline, testSessionsPipeline, - coveragePipeline, testSessionBuildsPipeline + coveragePipeline, testLaunchCoveragePipeline, testSessionBuildsPipeline ), - metadataRepository = instance() + metadataRepository = instance(), + consistencyWindow = etlConfig.consistencyWindow, + processingDelay = etlConfig.processingDelay ) } } diff --git a/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/etl/CoverageEtl.kt b/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/etl/CoverageEtl.kt index ea8cde0ac..b26eac8e7 100644 --- a/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/etl/CoverageEtl.kt +++ b/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/etl/CoverageEtl.kt @@ -38,6 +38,17 @@ val EtlConfig.coverageExtractor lastExtractedAtColumnName = "created_at", ) +val EtlConfig.testLaunchCoverageExtractor + get() = UntypedSqlDataExtractor( + name = "test_launch_coverage", + sqlQuery = fromResource("/metrics/db/etl/test_launch_coverage_extractor.sql"), + database = MetricsDatabaseConfig.database, + fetchSize = fetchSize, + extractionLimit = extractionLimit, + loggingFrequency = loggingFrequency, + lastExtractedAtColumnName = "test_completed_at", + ) + val EtlConfig.buildMethodTestDefinitionCoverageLoader get() = UntypedSqlDataLoader( name = "build_method_test_definition_coverage", @@ -161,6 +172,19 @@ val EtlConfig.coveragePipeline get() = EtlPipelineImpl( name = "coverage", extractor = coverageExtractor, + loaders = listOf( + untypedNopTransformer to buildMethodTestSessionCoverageLoader, + buildMethodCoverageTransformer to buildMethodCoverageLoader, + methodDailyCoverageTransformer to methodDailyCoverageLoader, + untypedNopTransformer to testSessionBuildsLoader + ), + bufferSize = bufferSize + ) + +val EtlConfig.testLaunchCoveragePipeline + get() = EtlPipelineImpl( + name = "test_launch_coverage", + extractor = testLaunchCoverageExtractor, loaders = listOf( untypedNopTransformer to buildMethodTestDefinitionCoverageLoader, untypedNopTransformer to buildMethodTestSessionCoverageLoader, diff --git a/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/service/impl/MetricsServiceImpl.kt b/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/service/impl/MetricsServiceImpl.kt index 8ceba9e2f..07f75cd45 100644 --- a/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/service/impl/MetricsServiceImpl.kt +++ b/admin-metrics/src/main/kotlin/com/epam/drill/admin/metrics/service/impl/MetricsServiceImpl.kt @@ -516,10 +516,10 @@ class MetricsServiceImpl( page: Int?, pageSize: Int? ): PagedList = transaction { - val baselineBuildId = build.id.takeIf { metricsRepository.buildExists(it) } + val targetBuildId = build.id.takeIf { metricsRepository.buildExists(it) } ?: throw BuildNotFound("Target build info not found for ${build.id}") - val targetBuildId = baselineBuild.id.takeIf { metricsRepository.buildExists(it) } + val baselineBuildId = baselineBuild.id.takeIf { metricsRepository.buildExists(it) } ?: throw BuildNotFound("Baseline build info not found for ${baselineBuild.id}") // Map response field names to database column names @@ -581,10 +581,10 @@ class MetricsServiceImpl( page: Int?, pageSize: Int? ): PagedList = transaction { - val baselineBuildId = build.id.takeIf { metricsRepository.buildExists(it) } + val targetBuildId = build.id.takeIf { metricsRepository.buildExists(it) } ?: throw BuildNotFound("Target build info not found for ${build.id}") - val targetBuildId = baselineBuild.id.takeIf { metricsRepository.buildExists(it) } + val baselineBuildId = baselineBuild.id.takeIf { metricsRepository.buildExists(it) } ?: throw BuildNotFound("Baseline build info not found for ${baselineBuild.id}") // Map response field names to database column names diff --git a/admin-metrics/src/main/resources/metrics/db/etl/build_methods_extractor.sql b/admin-metrics/src/main/resources/metrics/db/etl/build_methods_extractor.sql index e7cc2b722..3b36e856c 100644 --- a/admin-metrics/src/main/resources/metrics/db/etl/build_methods_extractor.sql +++ b/admin-metrics/src/main/resources/metrics/db/etl/build_methods_extractor.sql @@ -26,5 +26,5 @@ WHERE bm.group_id = :group_id AND (r.classname_pattern IS NOT NULL AND m.class_name::text ~ r.classname_pattern::text OR r.name_pattern IS NOT NULL AND m.method_name::text ~ r.name_pattern::text) ) -ORDER BY bm.created_at ASC, bm.group_id, bm.method_id +ORDER BY bm.created_at ASC, bm.method_id LIMIT :limit \ No newline at end of file diff --git a/admin-metrics/src/main/resources/metrics/db/etl/coverage_extractor.sql b/admin-metrics/src/main/resources/metrics/db/etl/coverage_extractor.sql index 2b34be0f6..57f93dee7 100644 --- a/admin-metrics/src/main/resources/metrics/db/etl/coverage_extractor.sql +++ b/admin-metrics/src/main/resources/metrics/db/etl/coverage_extractor.sql @@ -3,17 +3,17 @@ SELECT c.app_id, i.build_id, i.env_id AS app_env_id, - CASE WHEN c.test_session_id = 'GLOBAL' THEN NULL ELSE c.test_session_id END AS test_session_id, - CASE WHEN c.test_id = 'TEST_CONTEXT_NONE' THEN NULL ELSE c.test_id END AS test_launch_id, + ts.id AS test_session_id, + NULL AS test_launch_id, c.method_id, m.signature, b.branch, - tl.test_definition_id, - test_tag, - td.path AS test_path, - td.name AS test_name, + NULL AS test_definition_id, + NULL AS test_tag, + NULL AS test_path, + NULL AS test_name, ts.test_task_id, - tl.result AS test_result, + NULL AS test_result, c.created_at, DATE_TRUNC('day', c.created_at) AS created_at_day, c.probes AS probes @@ -30,11 +30,9 @@ JOIN raw_data.methods m ON m.method_id = c.method_id AND m.app_id = c.app_id AND JOIN raw_data.instances i ON i.id = c.instance_id AND i.app_id = c.app_id AND i.group_id = c.group_id JOIN raw_data.builds b ON b.group_id = c.group_id AND b.app_id = c.app_id AND b.id = c.build_id LEFT JOIN raw_data.test_sessions ts ON ts.id = c.test_session_id AND ts.group_id = c.group_id -LEFT JOIN raw_data.test_launches tl ON tl.id = c.test_id AND tl.group_id = c.group_id -LEFT JOIN raw_data.test_definitions td ON td.group_id = tl.group_id AND td.id = tl.test_definition_id -LEFT JOIN LATERAL unnest(td.tags) AS test_tag ON TRUE WHERE c.created_at > :since_timestamp AND c.created_at <= :until_timestamp AND c.group_id = :group_id -ORDER BY c.created_at, c.group_id, c.method_id + AND c.test_id IS NULL +ORDER BY c.created_at, c.method_id LIMIT :limit \ No newline at end of file diff --git a/admin-metrics/src/main/resources/metrics/db/etl/test_launch_coverage_extractor.sql b/admin-metrics/src/main/resources/metrics/db/etl/test_launch_coverage_extractor.sql new file mode 100644 index 000000000..ba831e3bd --- /dev/null +++ b/admin-metrics/src/main/resources/metrics/db/etl/test_launch_coverage_extractor.sql @@ -0,0 +1,41 @@ +SELECT + c.group_id, + c.app_id, + i.build_id, + i.env_id AS app_env_id, + ts.id AS test_session_id, + tl.id AS test_launch_id, + c.method_id, + m.signature, + b.branch, + tl.test_definition_id, + test_tag, + td.path AS test_path, + td.name AS test_name, + ts.test_task_id, + tl.result AS test_result, + tl.created_at AS test_completed_at, + c.created_at AS created_at, + DATE_TRUNC('day', c.created_at) AS created_at_day, + c.probes AS probes +FROM raw_data.test_launches tl +JOIN raw_data.test_sessions ts ON ts.id = tl.test_session_id AND ts.group_id = tl.group_id +JOIN raw_data.test_definitions td ON td.id = tl.test_definition_id AND td.group_id = tl.group_id +JOIN raw_data.method_coverage c ON c.test_id = tl.id AND c.group_id = tl.group_id +JOIN raw_data.methods m ON m.method_id = c.method_id AND m.app_id = c.app_id AND m.group_id = c.group_id + AND NOT EXISTS ( + SELECT 1 + FROM raw_data.method_ignore_rules r + WHERE r.group_id = m.group_id + AND r.app_id = m.app_id + AND (r.classname_pattern IS NOT NULL AND m.class_name::text ~ r.classname_pattern::text + OR r.name_pattern IS NOT NULL AND m.method_name::text ~ r.name_pattern::text) + ) +JOIN raw_data.instances i ON i.id = c.instance_id AND i.app_id = c.app_id AND i.group_id = c.group_id +JOIN raw_data.builds b ON b.group_id = c.group_id AND b.app_id = c.app_id AND b.id = c.build_id +LEFT JOIN LATERAL unnest(td.tags) AS test_tag ON TRUE +WHERE tl.group_id = :group_id + AND tl.created_at > :since_timestamp + AND tl.created_at <= :until_timestamp +ORDER BY tl.created_at, c.created_at, c.method_id +LIMIT :limit \ No newline at end of file diff --git a/admin-metrics/src/test/kotlin/com/epam/drill/admin/metrics/DataIngestClient.kt b/admin-metrics/src/test/kotlin/com/epam/drill/admin/metrics/DataIngestClient.kt index c3a0a4b9e..fb9f28d5a 100644 --- a/admin-metrics/src/test/kotlin/com/epam/drill/admin/metrics/DataIngestClient.kt +++ b/admin-metrics/src/test/kotlin/com/epam/drill/admin/metrics/DataIngestClient.kt @@ -98,7 +98,7 @@ val TestDetails.definitionId: String } suspend fun HttpClient.putBuild(payload: BuildPayload): HttpResponse { - return put("/data-ingest/builds") { + return put("/data-ingest/builds/info") { setBody(payload) }.assertSuccessStatus() } diff --git a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/entity/Build.kt b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/entity/Build.kt index f3cbd9535..a96cf6d12 100644 --- a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/entity/Build.kt +++ b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/entity/Build.kt @@ -23,9 +23,9 @@ class Build( val appId: String, val commitSha: String?, val buildVersion: String?, - val branch: String?, val instanceId: String?, - val commitDate: LocalDateTime?, - val commitMessage: String?, - val commitAuthor: String? + val branch: String? = null, + val commitDate: LocalDateTime? = null, + val commitMessage: String? = null, + val commitAuthor: String? = null ) diff --git a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/repository/BuildRepository.kt b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/repository/BuildRepository.kt index 8614384bf..fa8764c40 100644 --- a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/repository/BuildRepository.kt +++ b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/repository/BuildRepository.kt @@ -19,7 +19,8 @@ import com.epam.drill.admin.writer.rawdata.entity.Build import java.time.LocalDate interface BuildRepository { - suspend fun create(build: Build) + suspend fun saveBuildInfo(build: Build) + suspend fun saveBuildId(build: Build) suspend fun existsById(groupId: String, appId: String, buildId: String): Boolean suspend fun deleteAllCreatedBefore(groupId: String, createdBefore: LocalDate) suspend fun deleteByBuildId(groupId: String, appId: String, buildId: String) diff --git a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/repository/impl/BuildRepositoryImpl.kt b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/repository/impl/BuildRepositoryImpl.kt index b1f159d9c..bc16dd0fd 100644 --- a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/repository/impl/BuildRepositoryImpl.kt +++ b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/repository/impl/BuildRepositoryImpl.kt @@ -27,9 +27,11 @@ import org.jetbrains.exposed.sql.upsert import java.time.LocalDate class BuildRepositoryImpl: BuildRepository { - override suspend fun create(build: Build) { + override suspend fun saveBuildInfo(build: Build) { BuildTable.upsert( - onUpdateExclude = listOf(BuildTable.createdAt), + onUpdateExclude = listOf( + BuildTable.createdAt, + ), ) { it[id] = build.id it[groupId] = build.groupId @@ -44,6 +46,27 @@ class BuildRepositoryImpl: BuildRepository { it[updatedAt] = org.jetbrains.exposed.sql.javatime.CurrentDateTime } } + + override suspend fun saveBuildId(build: Build) { + BuildTable.upsert( + onUpdateExclude = listOf( + BuildTable.createdAt, + BuildTable.branch, + BuildTable.committedAt, + BuildTable.commitAuthor, + BuildTable.commitMessage + ), + ) { + it[id] = build.id + it[groupId] = build.groupId + it[appId] = build.appId + it[commitSha] = build.commitSha + it[buildVersion] = build.buildVersion + it[instanceId] = build.instanceId + it[updatedAt] = org.jetbrains.exposed.sql.javatime.CurrentDateTime + } + } + override suspend fun existsById(groupId: String, appId: String, buildId: String): Boolean { return BuildTable.selectAll().where { (BuildTable.groupId eq groupId) and diff --git a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/route/RawDataWriterRoutes.kt b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/route/RawDataWriterRoutes.kt index d02cf23f9..09b3bf4ad 100644 --- a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/route/RawDataWriterRoutes.kt +++ b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/route/RawDataWriterRoutes.kt @@ -49,6 +49,9 @@ private val logger = KotlinLogging.logger {} @Resource("builds") class BuildsRoute() +@Resource("builds/info") +class BuildsInfoRoute() + @Resource("instances") class InstancesRoute() @@ -81,6 +84,7 @@ class MethodIgnoreRulesRoute() { fun Route.dataIngestRoutes() { route("/data-ingest") { putBuilds() + putBuildsInfo() putInstances() postCoverage() putMethods() @@ -104,6 +108,15 @@ fun Route.putBuilds() { } } +fun Route.putBuildsInfo() { + val rawDataWriter by closestDI().instance() + + put { + rawDataWriter.saveBuildInfo(call.decompressAndReceive()) + call.ok("Build info saved") + } +} + fun Route.putInstances() { val rawDataWriter by closestDI().instance() diff --git a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/service/RawDataWriter.kt b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/service/RawDataWriter.kt index 853f4ffcb..ebdca25d3 100644 --- a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/service/RawDataWriter.kt +++ b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/service/RawDataWriter.kt @@ -21,6 +21,7 @@ import com.epam.drill.admin.writer.rawdata.views.MethodIgnoreRuleView interface RawDataWriter { suspend fun saveBuild(buildPayload: BuildPayload) + suspend fun saveBuildInfo(buildPayload: BuildPayload) suspend fun saveInstance(instancePayload: InstancePayload) suspend fun saveMethods(methodsPayload: MethodsPayload) suspend fun saveCoverage(coveragePayload: CoveragePayload) diff --git a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/service/impl/RawDataServiceImpl.kt b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/service/impl/RawDataServiceImpl.kt index 3231a4843..8c37b30d1 100644 --- a/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/service/impl/RawDataServiceImpl.kt +++ b/admin-writer/src/main/kotlin/com/epam/drill/admin/writer/rawdata/service/impl/RawDataServiceImpl.kt @@ -32,7 +32,7 @@ import java.time.ZonedDateTime import java.time.format.DateTimeFormatter import java.util.* -private const val EXEC_DATA_BATCH_SIZE = 100 +private const val EXEC_DATA_BATCH_SIZE = 1000 class RawDataServiceImpl( private val instanceRepository: InstanceRepository, @@ -47,6 +47,26 @@ class RawDataServiceImpl( ) : RawDataWriter { override suspend fun saveBuild(buildPayload: BuildPayload) { + val build = Build( + id = generateBuildId( + buildPayload.groupId, + buildPayload.appId, + "", + buildPayload.commitSha, + buildPayload.buildVersion + ), + groupId = buildPayload.groupId, + appId = buildPayload.appId, + instanceId = null, + commitSha = buildPayload.commitSha, + buildVersion = buildPayload.buildVersion, + ) + transaction { + buildRepository.saveBuildId(build) + } + } + + override suspend fun saveBuildInfo(buildPayload: BuildPayload) { val build = Build( id = generateBuildId( buildPayload.groupId, @@ -66,7 +86,7 @@ class RawDataServiceImpl( commitAuthor = buildPayload.commitAuthor ) transaction { - buildRepository.create(build) + buildRepository.saveBuildInfo(build) } } @@ -94,12 +114,8 @@ class RawDataServiceImpl( instanceId = instancePayload.instanceId, commitSha = instancePayload.commitSha, buildVersion = instancePayload.buildVersion, - branch = null, - commitDate = null, - commitMessage = null, - commitAuthor = null ) - buildRepository.create(build) + buildRepository.saveBuildId(build) } instanceRepository.create(instance) } @@ -167,8 +183,8 @@ class RawDataServiceImpl( coverage.bodyChecksum, coverage.probes.size ).joinToString(":").md5(), - testId = coverage.testId, - testSessionId = coverage.testSessionId, + testId = coverage.testId?.takeIf { it != "TEST_CONTEXT_NONE" }, + testSessionId = coverage.testSessionId?.takeIf { it != "GLOBAL" }, probes = coverage.probes ) } diff --git a/admin-writer/src/main/resources/raw_data/db/migration/V32__normalize_coverage_test_and_session_ids.sql b/admin-writer/src/main/resources/raw_data/db/migration/V32__normalize_coverage_test_and_session_ids.sql new file mode 100644 index 000000000..5b418f1ef --- /dev/null +++ b/admin-writer/src/main/resources/raw_data/db/migration/V32__normalize_coverage_test_and_session_ids.sql @@ -0,0 +1,7 @@ +UPDATE raw_data.method_coverage +SET test_session_id = NULL +WHERE test_session_id = 'GLOBAL'; + +UPDATE raw_data.method_coverage +SET test_id = NULL +WHERE test_id = 'TEST_CONTEXT_NONE'; \ No newline at end of file diff --git a/admin-writer/src/test/kotlin/com/epam/drill/admin/writer/rawdata/BuildsApiTest.kt b/admin-writer/src/test/kotlin/com/epam/drill/admin/writer/rawdata/BuildsApiTest.kt index 23f6c4d00..7260a0a54 100644 --- a/admin-writer/src/test/kotlin/com/epam/drill/admin/writer/rawdata/BuildsApiTest.kt +++ b/admin-writer/src/test/kotlin/com/epam/drill/admin/writer/rawdata/BuildsApiTest.kt @@ -16,6 +16,7 @@ package com.epam.drill.admin.writer.rawdata import com.epam.drill.admin.writer.rawdata.route.putBuilds +import com.epam.drill.admin.writer.rawdata.route.putBuildsInfo import com.epam.drill.admin.writer.rawdata.table.BuildTable import com.epam.drill.admin.test.* import com.epam.drill.admin.writer.rawdata.config.RawDataWriterDatabaseConfig @@ -28,6 +29,7 @@ import java.time.LocalDateTime import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertNotNull +import kotlin.test.assertNull import kotlin.test.assertTrue class BuildsApiTest : DatabaseTests({ RawDataWriterDatabaseConfig.init(it) }) { @@ -50,8 +52,56 @@ class BuildsApiTest : DatabaseTests({ RawDataWriterDatabaseConfig.init(it) }) { "groupId": "$testGroup", "appId": "$testApp", "buildVersion": "$testBuildVersion", + "commitSha": "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2" + } + """.trimIndent() + ) + }.apply { + assertEquals(HttpStatusCode.OK, status) + assertJsonEquals( + """ + { + "message": "Build saved" + } + """.trimIndent(), bodyAsText() + ) + } + + val savedBuilds = BuildTable.selectAll() + .filter { it[BuildTable.groupId] == testGroup } + .filter { it[BuildTable.appId] == testApp } + .filter { it[BuildTable.buildVersion] == testBuildVersion } + assertEquals(1, savedBuilds.size) + savedBuilds.forEach { + assertNull(it[BuildTable.branch]) + assertNotNull(it[BuildTable.commitSha]) + assertNull(it[BuildTable.commitAuthor]) + assertNull(it[BuildTable.commitMessage]) + assertNull(it[BuildTable.committedAt]) + assertTrue(it[BuildTable.createdAt] >= timeBeforeTest) + } + } + + @Test + fun `given new build, put builds info should create new build with info fields and return OK`() = withRollback { + val testGroup = "test-group" + val testApp = "test-app" + val testBuildVersion = "2.0.0" + val timeBeforeTest = LocalDateTime.now() + val app = drillApplication(rawDataServicesDIModule) { + putBuildsInfo() + } + + app.client.put("/builds/info") { + header(HttpHeaders.ContentType, ContentType.Application.Json.toString()) + setBody( + """ + { + "groupId": "$testGroup", + "appId": "$testApp", + "buildVersion": "$testBuildVersion", + "commitSha": "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2", "branch": "main", - "commitSha": "d3516472fd72cd0f9ccb7a1dc4b5e7b80a014fd2", "commitMessage": "Initial commit", "commitDate": "Thu Feb 27 10:06:24 2025 +0100", "commitAuthor": "John Doe" @@ -63,7 +113,7 @@ class BuildsApiTest : DatabaseTests({ RawDataWriterDatabaseConfig.init(it) }) { assertJsonEquals( """ { - "message": "Build saved" + "message": "Build info saved" } """.trimIndent(), bodyAsText() ) @@ -75,12 +125,69 @@ class BuildsApiTest : DatabaseTests({ RawDataWriterDatabaseConfig.init(it) }) { .filter { it[BuildTable.buildVersion] == testBuildVersion } assertEquals(1, savedBuilds.size) savedBuilds.forEach { - assertNotNull(it[BuildTable.branch]) + assertEquals("main", it[BuildTable.branch]) assertNotNull(it[BuildTable.commitSha]) + assertEquals("John Doe", it[BuildTable.commitAuthor]) + assertEquals("Initial commit", it[BuildTable.commitMessage]) + assertNotNull(it[BuildTable.committedAt]) + assertTrue(it[BuildTable.createdAt] >= timeBeforeTest) + } + } + + @Test + fun `given existing build info, put builds should not update info fields and return OK`() = withRollback { + val testGroup = "test-group" + val testApp = "test-app" + val testBuildVersion = "3.0.0" + val testCommitSha = "b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3" + val app = drillApplication(rawDataServicesDIModule) { + putBuildsInfo() + putBuilds() + } + app.client.put("/builds/info") { + header(HttpHeaders.ContentType, ContentType.Application.Json.toString()) + setBody( + """ + { + "groupId": "$testGroup", + "appId": "$testApp", + "buildVersion": "$testBuildVersion", + "commitSha": "$testCommitSha", + "branch": "develop", + "commitMessage": "Feature commit", + "commitDate": "Thu Feb 27 10:06:24 2025 +0100", + "commitAuthor": "Jane Doe" + } + """.trimIndent() + ) + } + + app.client.put("/builds") { + header(HttpHeaders.ContentType, ContentType.Application.Json.toString()) + setBody( + """ + { + "groupId": "$testGroup", + "appId": "$testApp", + "buildVersion": "$testBuildVersion", + "commitSha": "$testCommitSha" + } + """.trimIndent() + ) + }.apply { + assertEquals(HttpStatusCode.OK, status) + } + + val buildsBeforeInfo = BuildTable.selectAll() + .filter { it[BuildTable.groupId] == testGroup } + .filter { it[BuildTable.appId] == testApp } + .filter { it[BuildTable.buildVersion] == testBuildVersion } + assertEquals(1, buildsBeforeInfo.size) + buildsBeforeInfo.forEach { + assertNotNull(it[BuildTable.branch]) assertNotNull(it[BuildTable.commitAuthor]) assertNotNull(it[BuildTable.commitMessage]) assertNotNull(it[BuildTable.committedAt]) - assertTrue(it[BuildTable.createdAt] >= timeBeforeTest) } }