diff --git a/pantera-main/Dockerfile b/pantera-main/Dockerfile
index 00913abbf..39b7d2d6b 100644
--- a/pantera-main/Dockerfile
+++ b/pantera-main/Dockerfile
@@ -1,9 +1,30 @@
FROM eclipse-temurin:21-jre-alpine
ARG JAR_FILE
ARG APM_VERSION=1.55.4
+ARG TRIVY_VERSION=0.69.3
+ARG GRYPE_VERSION=0.110.0
-# Install curl for downloading Elastic APM agent + jattach for programmatic attachment (jmap + jstack + jcmd + jinfo)
-RUN apk add --no-cache curl jattach
+# Install curl, jattach, and vulnerability scanners (Trivy + Grype).
+# Both are static Go binaries — no glibc required on Alpine.
+# Trivy asset names use a hyphen (Linux-64bit), Grype uses underscore (linux_amd64).
+RUN apk add --no-cache curl jattach && \
+ ARCH="$(uname -m)" && \
+ case "${ARCH}" in \
+ x86_64) TRIVY_ARCH="Linux-64bit" ; GRYPE_ARCH="amd64" ;; \
+ aarch64) TRIVY_ARCH="Linux-ARM64" ; GRYPE_ARCH="arm64" ;; \
+ armv7l) TRIVY_ARCH="Linux-ARM" ; GRYPE_ARCH="armv6" ;; \
+ *) echo "Unsupported arch: ${ARCH}" && exit 1 ;; \
+ esac && \
+ curl -fsSL "https://github.com/aquasecurity/trivy/releases/download/v${TRIVY_VERSION}/trivy_${TRIVY_VERSION}_${TRIVY_ARCH}.tar.gz" \
+ -o /tmp/trivy.tar.gz && \
+ tar -xzf /tmp/trivy.tar.gz -C /usr/local/bin trivy && \
+ rm /tmp/trivy.tar.gz && \
+ chmod 755 /usr/local/bin/trivy && \
+ curl -fsSL "https://github.com/anchore/grype/releases/download/v${GRYPE_VERSION}/grype_${GRYPE_VERSION}_linux_${GRYPE_ARCH}.tar.gz" \
+ -o /tmp/grype.tar.gz && \
+ tar -xzf /tmp/grype.tar.gz -C /usr/local/bin grype && \
+ rm /tmp/grype.tar.gz && \
+ chmod 755 /usr/local/bin/grype
ENV JVM_ARGS="-XX:+UseG1GC -XX:MaxGCPauseMillis=300 \
-XX:G1HeapRegionSize=16m \
@@ -20,7 +41,8 @@ ENV JVM_ARGS="-XX:+UseG1GC -XX:MaxGCPauseMillis=300 \
RUN addgroup -g 2020 -S pantera && \
adduser -u 2021 -S -G pantera -s /sbin/nologin pantera && \
- mkdir -p /etc/pantera /usr/lib/pantera /var/pantera/logs/dumps /var/pantera/cache/tmp /opt/apm && \
+ mkdir -p /etc/pantera /usr/lib/pantera /var/pantera/logs/dumps /var/pantera/cache/tmp /opt/apm \
+ /var/pantera/trivy/db /var/pantera/grype/db && \
chown -R pantera:pantera /etc/pantera /usr/lib/pantera /var/pantera && \
curl -L "https://repo1.maven.org/maven2/co/elastic/apm/elastic-apm-agent/${APM_VERSION}/elastic-apm-agent-${APM_VERSION}.jar" \
-o /opt/apm/elastic-apm-agent.jar && \
@@ -28,6 +50,10 @@ RUN addgroup -g 2020 -S pantera && \
ENV TMPDIR=/var/pantera/cache/tmp
ENV PANTERA_VERSION=2.0.7
+# Vulnerability scanner DB directories — each scanner downloads its CVE database
+# on first scan and caches it here. Mount named volumes for persistence.
+ENV TRIVY_CACHE_DIR=/var/pantera/trivy/db
+ENV GRYPE_DB_CACHE_DIR=/var/pantera/grype/db
USER 2021:2020
diff --git a/pantera-main/docker-compose/.env.example b/pantera-main/docker-compose/.env.example
index f91452e0a..8785220fd 100644
--- a/pantera-main/docker-compose/.env.example
+++ b/pantera-main/docker-compose/.env.example
@@ -8,6 +8,14 @@
# -----------------------------------------------------------------------------
PANTERA_VERSION=2.0.7
PANTERA_UI_VERSION=2.0.7
+
+# -----------------------------------------------------------------------------
+# Vulnerability Scanners (versions pinned in the Dockerfile)
+# Both binaries are installed in the pantera container and manage their own
+# CVE databases. DBs are persisted in named volumes (trivy-db, grype-db).
+# Trivy: https://github.com/aquasecurity/trivy/releases
+# Grype: https://github.com/anchore/grype/releases
+# -----------------------------------------------------------------------------
PANTERA_USER_NAME=PANTERA
PANTERA_USER_PASS=changeme
PANTERA_CONFIG=/etc/PANTERA/PANTERA.yml
diff --git a/pantera-main/docker-compose/docker-compose.yaml b/pantera-main/docker-compose/docker-compose.yaml
index 739a35aa4..21416b060 100644
--- a/pantera-main/docker-compose/docker-compose.yaml
+++ b/pantera-main/docker-compose/docker-compose.yaml
@@ -61,6 +61,9 @@ services:
- KEYCLOAK_CLIENT_SECRET=${KEYCLOAK_CLIENT_SECRET}
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
+ # Vulnerability scanner DB directories — persisted across restarts
+ - TRIVY_CACHE_DIR=/var/pantera/trivy/db
+ - GRYPE_DB_CACHE_DIR=/var/pantera/grype/db
volumes:
- ./pantera/pantera.yml:/etc/pantera/pantera.yml
- ./log4j2.xml:/etc/pantera/log4j2.xml
@@ -70,6 +73,9 @@ services:
- ./pantera/cache:/var/pantera/cache
- ./pantera/cache/log:/var/pantera/logs/
- ~/.aws:/home/.aws
+ # Vulnerability scanner databases — downloaded on first scan
+ - trivy-db:/var/pantera/trivy/db
+ - grype-db:/var/pantera/grype/db
networks:
- pantera-net
# - es
@@ -236,4 +242,6 @@ networks:
volumes:
valkey-data:
prometheus-data:
- grafana-data:
\ No newline at end of file
+ grafana-data:
+ trivy-db:
+ grype-db:
\ No newline at end of file
diff --git a/pantera-main/docker-compose/pantera/pantera.yml b/pantera-main/docker-compose/pantera/pantera.yml
index 245c95a0f..8c8096ef9 100755
--- a/pantera-main/docker-compose/pantera/pantera.yml
+++ b/pantera-main/docker-compose/pantera/pantera.yml
@@ -53,7 +53,32 @@ meta:
repo_types:
npm-proxy:
enabled: true
-
+
+ vulnerability:
+ # Set to true to enable on-demand vulnerability scanning.
+ # The scanner binary must be present in the container.
+ enabled: true
+ # Scanner backend type. Supported values: "trivy", "grype"
+ scanner_type: trivy
+ # Path to the scanner binary. Must be on PATH (both are installed in the container).
+ scanner_path: trivy
+ # Hours before a cached scan result is considered stale.
+ # Users will see a "stale" indicator and can click "Scan Now" to refresh.
+ cache_ttl_hours: 24
+ # Maximum seconds to allow a single scanner subprocess to run before killing it.
+ scan_timeout_seconds: 300
+ # Maximum number of artifacts scanned in parallel during a "Scan Repository" operation.
+ # Caps resource usage regardless of repository size. Rule of thumb: CPU cores / 2.
+ scan_concurrency: 4
+ # Maximum concurrent scans across ALL repositories at one time.
+ # Prevents scan-all floods from overwhelming the system. Default: 8.
+ max_global_concurrency: 8
+ # Optional cron expression for automatic full scans (Quartz format).
+ # If omitted, only on-demand scans are performed.
+ # Example: scan all repos every day at 2 AM
+ # cron: "0 0 2 * * ?"
+ cron: "0 0/2 * * * ?"
+
artifacts_database:
postgres_host: "pantera-db"
postgres_port: 5432
diff --git a/pantera-main/src/main/java/com/auto1/pantera/VertxMain.java b/pantera-main/src/main/java/com/auto1/pantera/VertxMain.java
index 671ccc677..07718c37a 100644
--- a/pantera-main/src/main/java/com/auto1/pantera/VertxMain.java
+++ b/pantera-main/src/main/java/com/auto1/pantera/VertxMain.java
@@ -24,8 +24,19 @@
import com.auto1.pantera.jetty.http3.Http3Server;
import com.auto1.pantera.jetty.http3.SslFactoryFromYaml;
import com.auto1.pantera.misc.PanteraProperties;
+import com.auto1.pantera.scheduling.JobDataRegistry;
import com.auto1.pantera.scheduling.QuartzService;
import com.auto1.pantera.scheduling.ScriptScheduler;
+import com.auto1.pantera.vuln.DefaultVulnerabilityScanner;
+import com.auto1.pantera.vuln.VulnerabilityDao;
+import com.auto1.pantera.vuln.VulnerabilityScanJob;
+import com.auto1.pantera.vuln.VulnerabilitySettings;
+import com.auto1.pantera.vuln.backend.ScannerBackendFactory;
+import com.auto1.pantera.vuln.preparer.ComposerPreparer;
+import com.auto1.pantera.vuln.preparer.GoModulePreparer;
+import com.auto1.pantera.vuln.preparer.MavenPomArtifactPreparer;
+import com.auto1.pantera.vuln.preparer.NpmArtifactPreparer;
+import com.auto1.pantera.vuln.preparer.PypiSdistArtifactPreparer;
import com.auto1.pantera.settings.ConfigFile;
import com.auto1.pantera.settings.MetricsContext;
import com.auto1.pantera.settings.Settings;
@@ -33,6 +44,11 @@
import com.auto1.pantera.settings.repo.DbRepositories;
import com.auto1.pantera.settings.repo.MapRepositories;
import com.auto1.pantera.settings.repo.RepoConfig;
+import com.auto1.pantera.api.ManageRepoSettings;
+import com.auto1.pantera.asto.blocking.BlockingStorage;
+import com.auto1.pantera.db.dao.RepositoryDao;
+import com.auto1.pantera.settings.RepoData;
+import com.auto1.pantera.settings.repo.CrudRepoSettings;
import com.auto1.pantera.http.log.EcsLogger;
import com.auto1.pantera.settings.repo.Repositories;
import com.auto1.pantera.db.DbManager;
@@ -398,6 +414,51 @@ settings, repos, new JwtTokens(jwt, jwtSettings, userTokenDao)
quartz.start();
new ScriptScheduler(quartz).loadCrontab(settings, repos);
+ final VulnerabilitySettings vsettings = settings.vulnerabilitySettings();
+ if (vsettings.enabled() && vsettings.cronExpression() != null) {
+ final CrudRepoSettings vulnCrs = sharedDs.isPresent()
+ ? new RepositoryDao(sharedDs.get())
+ : new ManageRepoSettings(
+ new BlockingStorage(settings.configStorage())
+ );
+ final VulnerabilityDao vulnDao = sharedDs
+ .map(VulnerabilityDao::new).orElse(null);
+ JobDataRegistry.register(VulnerabilityScanJob.KEY_SCANNER,
+ new DefaultVulnerabilityScanner(
+ ScannerBackendFactory.create(vsettings),
+ java.util.List.of(
+ new NpmArtifactPreparer(),
+ new MavenPomArtifactPreparer(),
+ new PypiSdistArtifactPreparer(),
+ new GoModulePreparer(),
+ new ComposerPreparer()
+ ),
+ vsettings
+ )
+ );
+ if (vulnDao != null) {
+ JobDataRegistry.register(VulnerabilityScanJob.KEY_DAO, vulnDao);
+ }
+ JobDataRegistry.register(VulnerabilityScanJob.KEY_CRS, vulnCrs);
+ JobDataRegistry.register(VulnerabilityScanJob.KEY_REPO_DATA,
+ new RepoData(settings.configStorage(), settings.caches().storagesCache())
+ );
+ JobDataRegistry.register(VulnerabilityScanJob.KEY_SETTINGS, vsettings);
+ try {
+ quartz.schedulePeriodicJob(
+ vsettings.cronExpression(), VulnerabilityScanJob.class,
+ new org.quartz.JobDataMap()
+ );
+ EcsLogger.info("com.auto1.pantera")
+ .message("Scheduled vulnerability scan job with cron: " + vsettings.cronExpression())
+ .eventCategory("security")
+ .eventAction("vulnerability_schedule")
+ .eventOutcome("success")
+ .log();
+ } catch (final org.quartz.SchedulerException ex) {
+ throw new PanteraException(ex);
+ }
+ }
// JIT warmup: fire lightweight requests through group code paths so the
// first real client request doesn't pay ~140ms JIT compilation penalty.
diff --git a/pantera-main/src/main/java/com/auto1/pantera/api/v1/AsyncApiVerticle.java b/pantera-main/src/main/java/com/auto1/pantera/api/v1/AsyncApiVerticle.java
index 2a90917b0..fc5213ed5 100644
--- a/pantera-main/src/main/java/com/auto1/pantera/api/v1/AsyncApiVerticle.java
+++ b/pantera-main/src/main/java/com/auto1/pantera/api/v1/AsyncApiVerticle.java
@@ -20,6 +20,15 @@
import com.auto1.pantera.cooldown.CooldownService;
import com.auto1.pantera.cooldown.CooldownSupport;
import com.auto1.pantera.cooldown.metadata.CooldownMetadataService;
+import com.auto1.pantera.vuln.DefaultVulnerabilityScanner;
+import com.auto1.pantera.vuln.VulnerabilityScanner;
+import com.auto1.pantera.vuln.VulnerabilitySettings;
+import com.auto1.pantera.vuln.backend.ScannerBackendFactory;
+import com.auto1.pantera.vuln.preparer.ComposerPreparer;
+import com.auto1.pantera.vuln.preparer.GoModulePreparer;
+import com.auto1.pantera.vuln.preparer.MavenPomArtifactPreparer;
+import com.auto1.pantera.vuln.preparer.NpmArtifactPreparer;
+import com.auto1.pantera.vuln.preparer.PypiSdistArtifactPreparer;
import com.auto1.pantera.db.dao.AuthProviderDao;
import com.auto1.pantera.db.dao.RoleDao;
import com.auto1.pantera.db.dao.RepositoryDao;
@@ -312,6 +321,29 @@ crs, new RepoData(this.configsStorage, this.caches.storagesCache()),
this.security.policy()
).register(router);
new SearchHandler(this.artifactIndex, this.security.policy()).register(router);
+ // Vulnerability scanning handler
+ final VulnerabilitySettings vsettings = this.settings.vulnerabilitySettings();
+ final VulnerabilityScanner vulnScanner = vsettings.enabled()
+ ? new DefaultVulnerabilityScanner(
+ ScannerBackendFactory.create(vsettings),
+ java.util.List.of(
+ new NpmArtifactPreparer(),
+ new MavenPomArtifactPreparer(),
+ new PypiSdistArtifactPreparer(),
+ new GoModulePreparer(),
+ new ComposerPreparer()
+ ),
+ vsettings
+ )
+ : VulnerabilityScanner.NOP;
+ new VulnerabilityHandler(
+ vulnScanner,
+ this.dataSource,
+ vsettings,
+ crs,
+ new RepoData(this.configsStorage, this.caches.storagesCache()),
+ this.security.policy()
+ ).register(router);
// Start server
final HttpServer server;
final String schema;
diff --git a/pantera-main/src/main/java/com/auto1/pantera/api/v1/VulnerabilityHandler.java b/pantera-main/src/main/java/com/auto1/pantera/api/v1/VulnerabilityHandler.java
new file mode 100644
index 000000000..04feb1259
--- /dev/null
+++ b/pantera-main/src/main/java/com/auto1/pantera/api/v1/VulnerabilityHandler.java
@@ -0,0 +1,604 @@
+/*
+ * Copyright (c) 2025-2026 Auto1 Group
+ * Maintainers: Auto1 DevOps Team
+ * Lead Maintainer: Ayd Asraf
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License v3.0.
+ *
+ * Originally based on Artipie (https://github.com/artipie/artipie), MIT License.
+ */
+package com.auto1.pantera.api.v1;
+
+import com.auto1.pantera.api.AuthzHandler;
+import com.auto1.pantera.api.RepositoryName;
+import com.auto1.pantera.api.perms.ApiRepositoryPermission;
+import com.auto1.pantera.asto.Key;
+import com.auto1.pantera.asto.Storage;
+import com.auto1.pantera.http.log.EcsLogger;
+import com.auto1.pantera.http.log.MdcPropagatingCallable;
+import com.auto1.pantera.security.policy.Policy;
+import com.auto1.pantera.settings.RepoData;
+import com.auto1.pantera.settings.repo.CrudRepoSettings;
+import com.auto1.pantera.vuln.VulnerabilityDao;
+import com.auto1.pantera.vuln.VulnerabilityReport;
+import com.auto1.pantera.vuln.VulnerabilityScanner;
+import com.auto1.pantera.vuln.VulnerabilitySettings;
+import io.vertx.core.json.JsonArray;
+import io.vertx.core.json.JsonObject;
+import io.vertx.ext.web.Router;
+import io.vertx.ext.web.RoutingContext;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.atomic.AtomicInteger;
+import javax.sql.DataSource;
+
+/**
+ * REST handler for vulnerability scanning endpoints.
+ *
+ *
Routes:
+ *
+ *
GET /api/v1/vulnerabilities/summary — cross-repo summary
+ *
GET /api/v1/vulnerabilities/findings — paginated findings
+ *
DELETE /api/v1/vulnerabilities — delete all findings (admin)
+ *
GET /api/v1/repositories/:name/vulnerabilities — all reports for a repo
+ *
GET /api/v1/repositories/:name/vulnerabilities/artifact — single artifact report
+ *
POST /api/v1/repositories/:name/vulnerabilities/scan — trigger/refresh scan for one artifact
+ *
POST /api/v1/repositories/:name/vulnerabilities/scan-all — scan every artifact in a repo
+ *
+ *
+ *
All read routes require {@code api_repository_permissions READ}.
+ * The POST scan route requires {@code api_repository_permissions WRITE}.
+ *
+ * @since 2.1.0
+ * @checkstyle ClassDataAbstractionCouplingCheck (400 lines)
+ */
+public final class VulnerabilityHandler {
+
+ /**
+ * Sort column allowlist for the findings endpoint.
+ */
+ private static final java.util.Set SORT_COLS = java.util.Set.of(
+ "cve_id", "severity", "package_name", "repo_name", "scanned_at"
+ );
+
+ /**
+ * Tracks repositories with an active scan-all in progress.
+ * Maps repo name → epoch-millis when the scan started.
+ * The guard is cleared only when the background job finishes.
+ * Using a ConcurrentHashMap ensures the add/check is atomic.
+ */
+ private final ConcurrentHashMap activeScanAll = new ConcurrentHashMap<>();
+
+ /**
+ * Global semaphore bounding the total number of concurrent scans across all
+ * repositories. Prevents scan-all floods from multiple repos at once.
+ * Permits = {@link VulnerabilitySettings#maxGlobalConcurrency()}.
+ */
+ private final Semaphore globalScanSemaphore;
+
+ /**
+ * Vulnerability scanner (Trivy or NOP).
+ */
+ private final VulnerabilityScanner scanner;
+
+ /**
+ * Vulnerability DAO (nullable — disabled when no DB).
+ */
+ private final VulnerabilityDao dao;
+
+ /**
+ * Vulnerability settings (cache TTL etc.).
+ */
+ private final VulnerabilitySettings vsettings;
+
+ /**
+ * Repository settings CRUD.
+ */
+ private final CrudRepoSettings crs;
+
+ /**
+ * Repository data (storage resolver).
+ */
+ private final RepoData repoData;
+
+ /**
+ * Security policy.
+ */
+ private final Policy> policy;
+
+ /**
+ * Ctor.
+ * @param scanner Vulnerability scanner implementation
+ * @param dataSource Database data source (nullable)
+ * @param vsettings Vulnerability settings
+ * @param crs Repository settings CRUD
+ * @param repoData Repository data accessor
+ * @param policy Security policy
+ * @checkstyle ParameterNumberCheck (5 lines)
+ */
+ public VulnerabilityHandler(
+ final VulnerabilityScanner scanner,
+ final DataSource dataSource,
+ final VulnerabilitySettings vsettings,
+ final CrudRepoSettings crs,
+ final RepoData repoData,
+ final Policy> policy
+ ) {
+ this.scanner = scanner;
+ this.dao = dataSource != null ? new VulnerabilityDao(dataSource) : null;
+ this.vsettings = vsettings;
+ this.crs = crs;
+ this.repoData = repoData;
+ this.policy = policy;
+ this.globalScanSemaphore = new Semaphore(vsettings.maxGlobalConcurrency());
+ }
+
+ /**
+ * Register all vulnerability routes on the router.
+ * @param router Vert.x router
+ */
+ public void register(final Router router) {
+ // Cross-repo summary
+ router.get("/api/v1/vulnerabilities/summary")
+ .handler(new AuthzHandler(this.policy, READ))
+ .handler(this::handleSummary);
+ // Paginated findings across all repos
+ router.get("/api/v1/vulnerabilities/findings")
+ .handler(new AuthzHandler(this.policy, READ))
+ .handler(this::handleFindings);
+ // Delete all findings (admin cleanup)
+ router.delete("/api/v1/vulnerabilities")
+ .handler(new AuthzHandler(this.policy, WRITE))
+ .handler(this::handleDeleteAll);
+ // All cached reports for a specific repository
+ router.get("/api/v1/repositories/:name/vulnerabilities")
+ .handler(new AuthzHandler(this.policy, READ))
+ .handler(this::handleRepoVulnerabilities);
+ // Cached report for a specific artifact (GET — returns from cache or 404)
+ router.get("/api/v1/repositories/:name/vulnerabilities/artifact")
+ .handler(new AuthzHandler(this.policy, READ))
+ .handler(this::handleArtifactReport);
+ // Trigger / refresh a scan for a specific artifact (POST)
+ router.post("/api/v1/repositories/:name/vulnerabilities/scan")
+ .handler(new AuthzHandler(this.policy, WRITE))
+ .handler(this::handleScan);
+ // Scan every artifact in a repository (POST — fires and forgets, returns immediately)
+ router.post("/api/v1/repositories/:name/vulnerabilities/scan-all")
+ .handler(new AuthzHandler(this.policy, WRITE))
+ .handler(this::handleScanAll);
+ }
+
+ // -------------------------------------------------------------------------
+ // GET /api/v1/vulnerabilities/summary
+ // -------------------------------------------------------------------------
+
+ /**
+ * Return an aggregate summary per repository.
+ * @param ctx Routing context
+ */
+ private void handleSummary(final RoutingContext ctx) {
+ if (!this.vsettings.enabled() || this.dao == null) {
+ ctx.response().setStatusCode(200)
+ .putHeader("Content-Type", "application/json")
+ .end(new JsonObject().put("items", new JsonArray()).encode());
+ return;
+ }
+ ctx.vertx().executeBlocking(
+ MdcPropagatingCallable.wrap(() -> {
+ final List rows = this.dao.summarizeAll();
+ final JsonArray arr = new JsonArray();
+ rows.forEach(arr::add);
+ return new JsonObject().put("items", arr);
+ }),
+ false
+ ).onSuccess(
+ json -> ctx.response().setStatusCode(200)
+ .putHeader("Content-Type", "application/json")
+ .end(json.encode())
+ ).onFailure(
+ err -> ApiResponse.sendError(ctx, 500, "INTERNAL_ERROR", err.getMessage())
+ );
+ }
+
+ // -------------------------------------------------------------------------
+ // GET /api/v1/vulnerabilities/findings
+ // -------------------------------------------------------------------------
+
+ /**
+ * Return paginated CVE findings across all repositories with optional search/sort.
+ * @param ctx Routing context
+ */
+ private void handleFindings(final RoutingContext ctx) {
+ if (!this.vsettings.enabled() || this.dao == null) {
+ ctx.response().setStatusCode(200)
+ .putHeader("Content-Type", "application/json")
+ .end(ApiResponse.paginated(new JsonArray(), 0, 50, 0).encode());
+ return;
+ }
+ final int page = ApiResponse.intParam(
+ ctx.queryParam("page").stream().findFirst().orElse(null), 0
+ );
+ final int size = ApiResponse.clampSize(
+ ApiResponse.intParam(
+ ctx.queryParam("size").stream().findFirst().orElse(null), 50
+ )
+ );
+ final String search = ctx.queryParam("search").stream().findFirst().orElse(null);
+ final String repo = ctx.queryParam("repo").stream().findFirst().orElse(null);
+ final String severity = ctx.queryParam("severity").stream().findFirst().orElse(null);
+ final String sortBy = ctx.queryParam("sort_by").stream().findFirst().orElse("scanned_at");
+ final String sortDir = ctx.queryParam("sort_dir").stream().findFirst().orElse("desc");
+ final boolean sortAsc = "asc".equalsIgnoreCase(sortDir);
+ final String safeSort = SORT_COLS.contains(sortBy) ? sortBy : "scanned_at";
+ ctx.vertx().executeBlocking(
+ MdcPropagatingCallable.wrap(() -> {
+ final long total = this.dao.countAllFindings(search, repo, severity);
+ final List rows = this.dao.findAllFindingsPaginated(
+ page * size, size, search, repo, severity, safeSort, sortAsc
+ );
+ final JsonArray arr = new JsonArray();
+ rows.forEach(arr::add);
+ return ApiResponse.paginated(arr, page, size, (int) Math.min(total, Integer.MAX_VALUE));
+ }),
+ false
+ ).onSuccess(
+ json -> ctx.response().setStatusCode(200)
+ .putHeader("Content-Type", "application/json")
+ .end(json.encode())
+ ).onFailure(
+ err -> ApiResponse.sendError(ctx, 500, "INTERNAL_ERROR", err.getMessage())
+ );
+ }
+
+ // -------------------------------------------------------------------------
+ // GET /api/v1/repositories/:name/vulnerabilities
+ // -------------------------------------------------------------------------
+
+ /**
+ * Return all cached scan reports for a specific repository.
+ * @param ctx Routing context
+ */
+ private void handleRepoVulnerabilities(final RoutingContext ctx) {
+ final String repoName = ctx.pathParam("name");
+ if (!this.vsettings.enabled() || this.dao == null) {
+ ctx.response().setStatusCode(200)
+ .putHeader("Content-Type", "application/json")
+ .end(new JsonObject().put("items", new JsonArray()).encode());
+ return;
+ }
+ ctx.vertx().executeBlocking(
+ MdcPropagatingCallable.wrap(() -> {
+ final List reports = this.dao.findByRepo(repoName);
+ final JsonArray arr = new JsonArray();
+ for (final VulnerabilityReport r : reports) {
+ arr.add(r.toJson(this.vsettings.cacheTtlHours()));
+ }
+ return new JsonObject()
+ .put("repo_name", repoName)
+ .put("items", arr)
+ .put("total", reports.size());
+ }),
+ false
+ ).onSuccess(
+ json -> ctx.response().setStatusCode(200)
+ .putHeader("Content-Type", "application/json")
+ .end(json.encode())
+ ).onFailure(
+ err -> ApiResponse.sendError(ctx, 500, "INTERNAL_ERROR", err.getMessage())
+ );
+ }
+
+ // -------------------------------------------------------------------------
+ // GET /api/v1/repositories/:name/vulnerabilities/artifact?path=…
+ // -------------------------------------------------------------------------
+
+ /**
+ * Return the cached vulnerability report for a specific artifact.
+ * Returns 404 if the artifact has never been scanned.
+ * Returns the report with {@code is_stale: true} if the cache has expired.
+ * @param ctx Routing context
+ */
+ private void handleArtifactReport(final RoutingContext ctx) {
+ final String repoName = ctx.pathParam("name");
+ final String path = ctx.queryParam("path").stream().findFirst().orElse(null);
+ if (path == null || path.isBlank()) {
+ ApiResponse.sendError(ctx, 400, "BAD_REQUEST", "Query parameter 'path' is required");
+ return;
+ }
+ if (!this.vsettings.enabled() || this.dao == null) {
+ ApiResponse.sendError(ctx, 503, "SCANNING_DISABLED",
+ "Vulnerability scanning is not enabled");
+ return;
+ }
+ ctx.vertx().>executeBlocking(
+ MdcPropagatingCallable.wrap(() -> this.dao.findByArtifact(repoName, path)),
+ false
+ ).onSuccess(opt -> {
+ if (opt.isEmpty()) {
+ ApiResponse.sendError(ctx, 404, "NOT_FOUND",
+ "No scan result found for this artifact. Use POST /scan to trigger a scan.");
+ } else {
+ ctx.response().setStatusCode(200)
+ .putHeader("Content-Type", "application/json")
+ .end(opt.get().toJson(this.vsettings.cacheTtlHours()).encode());
+ }
+ }).onFailure(
+ err -> ApiResponse.sendError(ctx, 500, "INTERNAL_ERROR", err.getMessage())
+ );
+ }
+
+ // -------------------------------------------------------------------------
+ // POST /api/v1/repositories/:name/vulnerabilities/scan?path=…
+ // -------------------------------------------------------------------------
+
+ /**
+ * Trigger (or force-refresh) a vulnerability scan for a specific artifact.
+ * Downloads the artifact from storage, runs Trivy, persists and returns the result.
+ * @param ctx Routing context
+ */
+ private void handleScan(final RoutingContext ctx) {
+ final String repoName = ctx.pathParam("name");
+ final String path = ctx.queryParam("path").stream().findFirst().orElse(null);
+ if (path == null || path.isBlank()) {
+ ApiResponse.sendError(ctx, 400, "BAD_REQUEST", "Query parameter 'path' is required");
+ return;
+ }
+ if (!this.vsettings.enabled()) {
+ ApiResponse.sendError(ctx, 503, "SCANNING_DISABLED",
+ "Vulnerability scanning is not enabled. Set vulnerability.enabled: true in pantera.yml");
+ return;
+ }
+ if (!this.globalScanSemaphore.tryAcquire()) {
+ ApiResponse.sendError(ctx, 429, "TOO_MANY_SCANS",
+ String.format(
+ "Maximum concurrent scans (%d) reached. Try again later.",
+ this.vsettings.maxGlobalConcurrency()));
+ return;
+ }
+ final RepositoryName rname = new RepositoryName.Simple(repoName);
+ this.repoData.repoStorage(rname, this.crs)
+ .thenCompose(storage -> this.scanner.scan(repoName, path, storage))
+ .thenAccept(report -> {
+ if (this.dao != null) {
+ this.dao.upsert(report);
+ }
+ ctx.response().setStatusCode(200)
+ .putHeader("Content-Type", "application/json")
+ .end(report.toJson(this.vsettings.cacheTtlHours()).encode());
+ })
+ .exceptionally(err -> {
+ ApiResponse.sendError(ctx, 500, "SCAN_FAILED",
+ "Scan failed: " + err.getMessage());
+ return null;
+ })
+ .whenComplete((v, t) -> this.globalScanSemaphore.release());
+ }
+
+ // -------------------------------------------------------------------------
+ // POST /api/v1/repositories/:name/vulnerabilities/scan-all
+ // -------------------------------------------------------------------------
+
+ /**
+ * Scan every artifact in a repository with bounded concurrency.
+ *
+ *
Safety guarantees:
+ *
+ *
Only one scan-all per repository at a time — returns 409 if already running.
+ *
At most {@code scan_concurrency} (default 4) Trivy subprocesses run in parallel,
+ * regardless of how many artifacts the repo contains.
+ *
Responds with 202 immediately; scans run in the background.
+ *
Poll {@code GET /repositories/:name/vulnerabilities} to track progress.
+ *
+ *
+ * @param ctx Routing context
+ */
+ private void handleScanAll(final RoutingContext ctx) {
+ final String repoName = ctx.pathParam("name");
+ if (!this.vsettings.enabled()) {
+ ApiResponse.sendError(ctx, 503, "SCANNING_DISABLED",
+ "Vulnerability scanning is not enabled.");
+ return;
+ }
+ // Reject duplicate scan requests for the same repo.
+ // putIfAbsent is atomic: only one concurrent caller gets null back (the winner).
+ // The guard is removed only when the background job's whenComplete fires.
+ final Long existing = this.activeScanAll.putIfAbsent(repoName, System.currentTimeMillis());
+ if (existing != null) {
+ ApiResponse.sendError(ctx, 409, "SCAN_ALREADY_RUNNING",
+ String.format(
+ "A scan-all is already in progress for repository '%s'. "
+ + "Poll GET /repositories/%s/vulnerabilities for progress.",
+ repoName, repoName));
+ return;
+ }
+ final RepositoryName rname = new RepositoryName.Simple(repoName);
+ this.repoData.repoStorage(rname, this.crs)
+ .thenCompose(storage ->
+ listAllFiles(storage, new Key.From(repoName), repoName)
+ .thenApply(paths -> {
+ final int total = paths.size();
+ ctx.response().setStatusCode(202)
+ .putHeader("Content-Type", "application/json")
+ .end(new JsonObject()
+ .put("enqueued", total)
+ .put("repo_name", repoName)
+ .put("concurrency", this.vsettings.scanConcurrency())
+ .put("message", String.format(
+ "Scanning %d artifact(s) in background "
+ + "(%d at a time). "
+ + "Poll GET /repositories/%s/vulnerabilities for progress.",
+ total, this.vsettings.scanConcurrency(), repoName))
+ .encode());
+ // Run background pipeline in a single virtual thread.
+ // The semaphore limits how many Trivy subprocesses run at once.
+ final int concurrency = this.vsettings.scanConcurrency();
+ final VulnerabilityDao daoRef = this.dao;
+ final VulnerabilityScanner scannerRef = this.scanner;
+ final VulnerabilitySettings vsRef = this.vsettings;
+ final ConcurrentHashMap activeRef = this.activeScanAll;
+ final Semaphore globalSem = this.globalScanSemaphore;
+ Thread.ofVirtual().start(() -> {
+ // Per-repo semaphore limits how many artifacts scan in parallel
+ // for this specific repo.
+ final Semaphore sem = new Semaphore(concurrency);
+ final AtomicInteger done = new AtomicInteger(0);
+ final AtomicInteger failed = new AtomicInteger(0);
+ final List> futures = new ArrayList<>(
+ Math.min(total, concurrency * 2)
+ );
+ for (final String artifactPath : paths) {
+ try {
+ // Acquire both semaphores: per-repo AND global.
+ // Per-repo limits concurrency within this scan-all.
+ // Global limits total scans across all repos.
+ globalSem.acquire();
+ sem.acquire();
+ } catch (final InterruptedException ex) {
+ Thread.currentThread().interrupt();
+ break;
+ }
+ final CompletableFuture fut = scannerRef
+ .scan(repoName, artifactPath, storage)
+ .thenAccept(report -> {
+ if (daoRef != null) {
+ daoRef.upsert(report);
+ }
+ EcsLogger.info("com.auto1.pantera.vuln")
+ .message("Repo scan: artifact complete")
+ .field("repo_name", repoName)
+ .field("artifact_path", artifactPath)
+ .field("vuln_count", report.vulnCount())
+ .field("done", done.incrementAndGet())
+ .field("total", total)
+ .log();
+ })
+ .exceptionally(err -> {
+ EcsLogger.warn("com.auto1.pantera.vuln")
+ .message("Repo scan: artifact failed")
+ .field("repo_name", repoName)
+ .field("artifact_path", artifactPath)
+ .field("failed", failed.incrementAndGet())
+ .error(err)
+ .log();
+ return null;
+ })
+ .whenComplete((v, t) -> {
+ sem.release();
+ globalSem.release();
+ });
+ futures.add(fut);
+ }
+ // Wait for all scans to finish, then release the guard
+ CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
+ .whenComplete((v, t) -> {
+ activeRef.remove(repoName);
+ // ^ remove is atomic — safe to call from virtual thread
+ EcsLogger.info("com.auto1.pantera.vuln")
+ .message("Repo scan-all complete")
+ .field("repo_name", repoName)
+ .field("total", total)
+ .field("done", done.get())
+ .field("failed", failed.get())
+ .log();
+ });
+ });
+ return null;
+ })
+ )
+ .exceptionally(err -> {
+ activeScanAll.remove(repoName);
+ ApiResponse.sendError(ctx, 500, "SCAN_FAILED",
+ "Failed to list repository artifacts: " + err.getMessage());
+ return null;
+ });
+ }
+
+ // -------------------------------------------------------------------------
+ // DELETE /api/v1/vulnerabilities
+ // -------------------------------------------------------------------------
+
+ /**
+ * Delete all vulnerability findings from the database.
+ * Intended for admin use — clears both real findings and scan-marker rows.
+ * @param ctx Routing context
+ */
+ private void handleDeleteAll(final RoutingContext ctx) {
+ if (this.dao == null) {
+ ApiResponse.sendError(ctx, 503, "NO_DATABASE", "Database not available");
+ return;
+ }
+ ctx.vertx().executeBlocking(
+ MdcPropagatingCallable.wrap(() -> {
+ this.dao.deleteAll();
+ return null;
+ }),
+ false
+ ).onSuccess(
+ ignored -> ctx.response().setStatusCode(200)
+ .putHeader("Content-Type", "application/json")
+ .end(new JsonObject().put("deleted", true).encode())
+ ).onFailure(
+ err -> ApiResponse.sendError(ctx, 500, "INTERNAL_ERROR", err.getMessage())
+ );
+ }
+
+ /**
+ * Recursively list all file paths under a storage prefix.
+ * Returns paths relative to the repo root (repo name prefix stripped).
+ *
+ *
Note: this collects all paths in memory before returning. For very
+ * large repositories (100k+ artifacts) a streaming approach would be
+ * preferable, but the memory cost here is minimal — a list of strings,
+ * ~100 bytes each, is ~10MB for 100k artifacts.
+ *
+ * @param storage Storage backend
+ * @param prefix Current key prefix to list
+ * @param repoName Repository name (for stripping from returned paths)
+ * @return Future resolving to all file paths found
+ */
+ private static CompletableFuture> listAllFiles(
+ final Storage storage, final Key prefix, final String repoName
+ ) {
+ return storage.list(prefix, "/").thenCompose(listing -> {
+ final List>> subdirFutures = new ArrayList<>();
+ for (final Key dir : listing.directories()) {
+ subdirFutures.add(listAllFiles(storage, dir, repoName));
+ }
+ final String repoPrefix = repoName + "/";
+ final List files = new ArrayList<>();
+ for (final Key file : listing.files()) {
+ final String raw = file.string();
+ files.add(raw.startsWith(repoPrefix) ? raw.substring(repoPrefix.length()) : raw);
+ }
+ if (subdirFutures.isEmpty()) {
+ return CompletableFuture.completedFuture(files);
+ }
+ return CompletableFuture.allOf(subdirFutures.toArray(new CompletableFuture[0]))
+ .thenApply(ignored -> {
+ final List all = new ArrayList<>(files);
+ for (final CompletableFuture> f : subdirFutures) {
+ all.addAll(f.join());
+ }
+ return all;
+ });
+ }).toCompletableFuture();
+ }
+
+ /**
+ * Convenience READ permission constant (reuses api_repository_permissions).
+ */
+ private static final ApiRepositoryPermission READ =
+ new ApiRepositoryPermission(ApiRepositoryPermission.RepositoryAction.READ);
+
+ /**
+ * Convenience CREATE permission constant used for triggering scans (write-like).
+ * Reuses api_repository_permissions — users who can create repos can trigger scans.
+ */
+ private static final ApiRepositoryPermission WRITE =
+ new ApiRepositoryPermission(ApiRepositoryPermission.RepositoryAction.CREATE);
+}
diff --git a/pantera-main/src/main/java/com/auto1/pantera/settings/Settings.java b/pantera-main/src/main/java/com/auto1/pantera/settings/Settings.java
index 89c1f623f..0845c726b 100644
--- a/pantera-main/src/main/java/com/auto1/pantera/settings/Settings.java
+++ b/pantera-main/src/main/java/com/auto1/pantera/settings/Settings.java
@@ -16,6 +16,7 @@
import com.auto1.pantera.asto.Storage;
import com.auto1.pantera.cache.ValkeyConnection;
import com.auto1.pantera.cooldown.CooldownSettings;
+import com.auto1.pantera.vuln.VulnerabilitySettings;
import com.auto1.pantera.http.client.HttpClientSettings;
import com.auto1.pantera.index.ArtifactIndex;
import com.auto1.pantera.scheduling.MetadataEventQueues;
@@ -168,4 +169,13 @@ default ArtifactIndex artifactIndex() {
default Optional valkeyConnection() {
return Optional.empty();
}
+
+ /**
+ * Vulnerability scanning configuration.
+ * Returns disabled settings by default when not configured.
+ * @return Vulnerability settings
+ */
+ default VulnerabilitySettings vulnerabilitySettings() {
+ return VulnerabilitySettings.disabled();
+ }
}
diff --git a/pantera-main/src/main/java/com/auto1/pantera/settings/YamlSettings.java b/pantera-main/src/main/java/com/auto1/pantera/settings/YamlSettings.java
index 77d7f0d73..43aa26633 100644
--- a/pantera-main/src/main/java/com/auto1/pantera/settings/YamlSettings.java
+++ b/pantera-main/src/main/java/com/auto1/pantera/settings/YamlSettings.java
@@ -32,6 +32,7 @@
import com.auto1.pantera.cache.ValkeyConnection;
import com.auto1.pantera.cooldown.CooldownSettings;
import com.auto1.pantera.cooldown.YamlCooldownSettings;
+import com.auto1.pantera.vuln.VulnerabilitySettings;
import com.auto1.pantera.cooldown.metadata.FilteredMetadataCacheConfig;
import com.auto1.pantera.db.ArtifactDbFactory;
import com.auto1.pantera.db.DbConsumer;
@@ -144,6 +145,11 @@ public final class YamlSettings implements Settings {
*/
private final CooldownSettings cooldown;
+ /**
+ * Vulnerability scanning settings.
+ */
+ private final VulnerabilitySettings vulnerability;
+
/**
* Artifacts database data source if configured.
*/
@@ -301,6 +307,7 @@ auth, new StoragesCache(), this.security.policy(), new GuavaFiltersCache()
this.mctx = new MetricsContext(this.meta());
this.lctx = new LoggingContext(this.meta());
this.cooldown = YamlCooldownSettings.fromMeta(this.meta());
+ this.vulnerability = VulnerabilitySettings.fromMeta(this.meta());
// Initialize artifact index
final YamlMapping indexConfig = this.meta.yamlMapping("artifact_index");
final boolean indexEnabled = indexConfig != null
@@ -407,6 +414,11 @@ public CooldownSettings cooldown() {
return this.cooldown;
}
+ @Override
+ public VulnerabilitySettings vulnerabilitySettings() {
+ return this.vulnerability;
+ }
+
@Override
public Optional artifactsDatabase() {
return this.artifactsDb;
diff --git a/pantera-main/src/main/java/com/auto1/pantera/vuln/ArtifactPreparer.java b/pantera-main/src/main/java/com/auto1/pantera/vuln/ArtifactPreparer.java
new file mode 100644
index 000000000..80b706f7e
--- /dev/null
+++ b/pantera-main/src/main/java/com/auto1/pantera/vuln/ArtifactPreparer.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2025-2026 Auto1 Group
+ * Maintainers: Auto1 DevOps Team
+ * Lead Maintainer: Ayd Asraf
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License v3.0.
+ *
+ * Originally based on Artipie (https://github.com/artipie/artipie), MIT License.
+ */
+package com.auto1.pantera.vuln;
+
+import java.io.IOException;
+import java.nio.file.Path;
+
+/**
+ * Extracts dependency manifest file(s) from a downloaded artifact so the
+ * scanner backend can find them in a flat directory.
+ *
+ *
Each implementation handles one artifact format (npm tgz, Maven pom,
+ * PyPI sdist, Go module, PHP Composer, etc.). Only the minimal files required
+ * by the scanner are extracted — full artifact contents are never written to disk.
+ *
+ *
Implementations should be stateless and reusable across scans.
+ *
+ * @since 2.2.0
+ */
+public interface ArtifactPreparer {
+
+ /**
+ * Returns {@code true} if this preparer handles the given artifact path.
+ * Matching is typically done on the file extension or path pattern.
+ *
+ * @param artifactPath Storage path of the artifact,
+ * e.g. {@code lodash/-/lodash-4.17.21.tgz}
+ * @return True if this preparer supports the artifact format
+ */
+ boolean supports(String artifactPath);
+
+ /**
+ * Extract dependency manifest file(s) from the artifact bytes into
+ * {@code scanDir}.
+ *
+ *
On success, writes one or more manifest files into the flat
+ * {@code scanDir} directory. Returns {@code false} if no recognisable
+ * manifest was found (e.g. an npm tarball shipped without a lock file),
+ * in which case the caller skips the scan and records an empty report.
+ *
+ * @param artifactBytes Raw artifact bytes (already read from storage)
+ * @param scanDir Empty temporary directory to write manifest file(s) into
+ * @return {@code true} if at least one manifest file was written;
+ * {@code false} to skip the scan
+ * @throws IOException On I/O failure during extraction
+ */
+ boolean prepare(byte[] artifactBytes, Path scanDir) throws IOException;
+}
diff --git a/pantera-main/src/main/java/com/auto1/pantera/vuln/DefaultVulnerabilityScanner.java b/pantera-main/src/main/java/com/auto1/pantera/vuln/DefaultVulnerabilityScanner.java
new file mode 100644
index 000000000..98bf03dcd
--- /dev/null
+++ b/pantera-main/src/main/java/com/auto1/pantera/vuln/DefaultVulnerabilityScanner.java
@@ -0,0 +1,211 @@
+/*
+ * Copyright (c) 2025-2026 Auto1 Group
+ * Maintainers: Auto1 DevOps Team
+ * Lead Maintainer: Ayd Asraf
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License v3.0.
+ *
+ * Originally based on Artipie (https://github.com/artipie/artipie), MIT License.
+ */
+package com.auto1.pantera.vuln;
+
+import com.auto1.pantera.asto.Key;
+import com.auto1.pantera.asto.Storage;
+import com.auto1.pantera.asto.blocking.BlockingStorage;
+import com.auto1.pantera.http.log.EcsLogger;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.time.Instant;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Optional;
+import java.util.UUID;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Executors;
+import java.util.stream.Stream;
+
+/**
+ * Default {@link VulnerabilityScanner} that wires together an
+ * {@link ArtifactPreparer} list and a {@link ScannerBackend}.
+ *
+ *
For each scan request:
+ *
+ *
Finds the first {@link ArtifactPreparer} that supports the artifact type.
+ * If none matches, returns an empty report without downloading.
+ *
Downloads the artifact bytes from storage — no temp artifact file is
+ * written to disk; bytes flow directly to the preparer.
+ *
The preparer extracts dependency manifests into a temporary directory.
+ *
The {@link ScannerBackend} scans the manifest directory.
+ *
Returns a {@link VulnerabilityReport} and cleans up the temp directory.
+ *
+ *
+ *
All blocking work (download, prepare, scan) runs on a virtual-thread executor.
+ *
+ * @since 2.2.0
+ * @checkstyle ClassDataAbstractionCouplingCheck (200 lines)
+ */
+public final class DefaultVulnerabilityScanner implements VulnerabilityScanner {
+
+ /**
+ * Ordered list of artifact preparers.
+ */
+ private final List preparers;
+
+ /**
+ * CVE scanner backend.
+ */
+ private final ScannerBackend backend;
+
+ /**
+ * Scan configuration.
+ */
+ private final VulnerabilitySettings settings;
+
+ /**
+ * Executor for blocking work (download + prepare + scan).
+ */
+ private final Executor executor;
+
+ /**
+ * Ctor for production use — creates a virtual-thread-per-task executor.
+ * @param backend CVE scanner backend
+ * @param preparers Artifact preparers tried in order
+ * @param settings Scan configuration
+ */
+ public DefaultVulnerabilityScanner(
+ final ScannerBackend backend,
+ final List preparers,
+ final VulnerabilitySettings settings
+ ) {
+ this(backend, preparers, settings, Executors.newVirtualThreadPerTaskExecutor());
+ }
+
+ /**
+ * Ctor with explicit executor (for testing).
+ * @param backend CVE scanner backend
+ * @param preparers Artifact preparers tried in order
+ * @param settings Scan configuration
+ * @param executor Executor for blocking work
+ * @checkstyle ParameterNumberCheck (5 lines)
+ */
+ public DefaultVulnerabilityScanner(
+ final ScannerBackend backend,
+ final List preparers,
+ final VulnerabilitySettings settings,
+ final Executor executor
+ ) {
+ this.backend = backend;
+ this.preparers = List.copyOf(preparers);
+ this.settings = settings;
+ this.executor = executor;
+ }
+
+ @Override
+ public CompletableFuture scan(
+ final String repoName,
+ final String artifactPath,
+ final Storage storage
+ ) {
+ return CompletableFuture.supplyAsync(
+ () -> this.doScan(repoName, artifactPath, storage),
+ this.executor
+ );
+ }
+
+ /**
+ * Perform the blocking scan synchronously.
+ * @param repoName Repository name
+ * @param artifactPath Artifact storage path
+ * @param storage Storage backend
+ * @return Scan report
+ */
+ @SuppressWarnings("PMD.AvoidCatchingGenericException")
+ private VulnerabilityReport doScan(
+ final String repoName,
+ final String artifactPath,
+ final Storage storage
+ ) {
+ final Optional preparer = this.preparers.stream()
+ .filter(p -> p.supports(artifactPath))
+ .findFirst();
+ if (preparer.isEmpty()) {
+ EcsLogger.debug("com.auto1.pantera.vuln")
+ .message("No preparer for artifact type — skipping scan")
+ .field("artifact_path", artifactPath)
+ .log();
+ return emptyReport(repoName, artifactPath);
+ }
+ final Path tmpDir = Path.of(System.getProperty("java.io.tmpdir"), "pantera-vuln");
+ final String scanId = UUID.randomUUID().toString();
+ Path scanDir = null;
+ try {
+ Files.createDirectories(tmpDir);
+ // Download bytes — no temp artifact file written to disk.
+ // ByteArrayInputStream in the preparer streams directly from these bytes.
+ final String cleanPath = artifactPath.startsWith("/")
+ ? artifactPath.substring(1) : artifactPath;
+ final byte[] bytes = new BlockingStorage(storage)
+ .value(new Key.From(repoName, cleanPath));
+ // Prepare the scan directory with manifest files only
+ scanDir = tmpDir.resolve(scanId + "-scan");
+ Files.createDirectories(scanDir);
+ if (!preparer.get().prepare(bytes, scanDir)) {
+ // No manifest found in this artifact — nothing to scan
+ return emptyReport(repoName, artifactPath);
+ }
+ // Invoke the backend against the prepared directory
+ final List findings = this.backend.scan(
+ scanDir, this.settings.scanTimeoutSeconds()
+ );
+ return new VulnerabilityReport(
+ repoName, artifactPath, Instant.now(), this.backend.name(), findings
+ );
+ } catch (final Exception ex) {
+ EcsLogger.warn("com.auto1.pantera.vuln")
+ .message("Vulnerability scan failed")
+ .eventCategory("security")
+ .eventAction("vulnerability_scan")
+ .eventOutcome("failure")
+ .field("repo_name", repoName)
+ .field("artifact_path", artifactPath)
+ .error(ex)
+ .log();
+ return emptyReport(repoName, artifactPath);
+ } finally {
+ if (scanDir != null) {
+ deleteRecursively(scanDir);
+ }
+ }
+ }
+
+ /**
+ * Return an empty report (no findings) for an artifact.
+ * @param repoName Repository name
+ * @param artifactPath Artifact path
+ * @return Empty report using the backend's name as the scanner identifier
+ */
+ private VulnerabilityReport emptyReport(
+ final String repoName, final String artifactPath
+ ) {
+ return new VulnerabilityReport(
+ repoName, artifactPath, Instant.now(), this.backend.name(), List.of()
+ );
+ }
+
+ /**
+ * Recursively delete a directory tree. Best-effort — errors are ignored.
+ * @param dir Root directory to delete
+ */
+ @SuppressWarnings("PMD.AvoidCatchingGenericException")
+ private static void deleteRecursively(final Path dir) {
+ try (Stream walk = Files.walk(dir)) {
+ walk.sorted(Comparator.reverseOrder())
+ .forEach(p -> {
+ try { Files.deleteIfExists(p); } catch (final IOException ignore) { }
+ });
+ } catch (final Exception ignore) { }
+ }
+}
diff --git a/pantera-main/src/main/java/com/auto1/pantera/vuln/ScannerBackend.java b/pantera-main/src/main/java/com/auto1/pantera/vuln/ScannerBackend.java
new file mode 100644
index 000000000..184121b2d
--- /dev/null
+++ b/pantera-main/src/main/java/com/auto1/pantera/vuln/ScannerBackend.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2025-2026 Auto1 Group
+ * Maintainers: Auto1 DevOps Team
+ * Lead Maintainer: Ayd Asraf
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License v3.0.
+ *
+ * Originally based on Artipie (https://github.com/artipie/artipie), MIT License.
+ */
+package com.auto1.pantera.vuln;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.List;
+
+/**
+ * Low-level CVE scanner backend contract.
+ *
+ *
Implementations wrap specific scanning tools (Trivy, Grype, OSV-Scanner, etc.)
+ * and are responsible for invoking the tool against a prepared directory and
+ * parsing the tool-specific output into {@link VulnerabilityFinding} objects.
+ *
+ *
Backend instances are called from a virtual-thread executor — they may
+ * perform blocking I/O (spawning subprocesses, reading stdout) without blocking
+ * the Vert.x event loop.
+ *
+ * @since 2.2.0
+ */
+public interface ScannerBackend {
+
+ /**
+ * Short identifier for this scanner, written into scan reports.
+ * Examples: {@code "trivy"}, {@code "grype"}, {@code "osv"}.
+ *
+ * @return Scanner name
+ */
+ String name();
+
+ /**
+ * Run the scanner against a directory containing dependency manifests and
+ * return parsed findings.
+ *
+ *
Implementations MUST:
+ *
+ *
Only read files from {@code scanDir}.
+ *
Honour the {@code timeoutSeconds} limit; return empty list on timeout.
+ *
Never throw for scanner-level errors — log and return empty list.
+ *
+ *
+ * @param scanDir Directory containing one or more dependency manifest files
+ * @param timeoutSeconds Maximum seconds to allow the scanner to run
+ * @return Parsed vulnerability findings (never null; may be empty)
+ * @throws IOException If subprocess I/O fails at the OS level
+ * @throws InterruptedException If the calling thread is interrupted
+ */
+ List scan(Path scanDir, int timeoutSeconds)
+ throws IOException, InterruptedException;
+}
diff --git a/pantera-main/src/main/java/com/auto1/pantera/vuln/TrivyVulnerabilityScanner.java b/pantera-main/src/main/java/com/auto1/pantera/vuln/TrivyVulnerabilityScanner.java
new file mode 100644
index 000000000..d4e8883af
--- /dev/null
+++ b/pantera-main/src/main/java/com/auto1/pantera/vuln/TrivyVulnerabilityScanner.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2025-2026 Auto1 Group
+ * Maintainers: Auto1 DevOps Team
+ * Lead Maintainer: Ayd Asraf
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License v3.0.
+ *
+ * Originally based on Artipie (https://github.com/artipie/artipie), MIT License.
+ */
+package com.auto1.pantera.vuln;
+
+import com.auto1.pantera.asto.Storage;
+import com.auto1.pantera.vuln.backend.TrivyScannerBackend;
+import com.auto1.pantera.vuln.preparer.MavenPomArtifactPreparer;
+import com.auto1.pantera.vuln.preparer.NpmArtifactPreparer;
+import com.auto1.pantera.vuln.preparer.PypiSdistArtifactPreparer;
+import java.util.List;
+import java.util.concurrent.CompletableFuture;
+
+/**
+ * Trivy-backed vulnerability scanner.
+ *
+ * @deprecated Use {@link DefaultVulnerabilityScanner} with a {@link TrivyScannerBackend}
+ * and the standard preparers instead. This class is kept only for
+ * backward compatibility and will be removed in a future release.
+ *
+ * @since 2.1.0
+ */
+@Deprecated
+public final class TrivyVulnerabilityScanner implements VulnerabilityScanner {
+
+ /**
+ * Delegate scanner.
+ */
+ private final DefaultVulnerabilityScanner delegate;
+
+ /**
+ * Ctor.
+ * @param settings Vulnerability settings
+ */
+ public TrivyVulnerabilityScanner(final VulnerabilitySettings settings) {
+ this.delegate = new DefaultVulnerabilityScanner(
+ new TrivyScannerBackend(settings.scannerPath()),
+ List.of(
+ new NpmArtifactPreparer(),
+ new MavenPomArtifactPreparer(),
+ new PypiSdistArtifactPreparer()
+ ),
+ settings
+ );
+ }
+
+ @Override
+ public CompletableFuture scan(
+ final String repoName,
+ final String artifactPath,
+ final Storage storage
+ ) {
+ return this.delegate.scan(repoName, artifactPath, storage);
+ }
+}
diff --git a/pantera-main/src/main/java/com/auto1/pantera/vuln/VulnerabilityDao.java b/pantera-main/src/main/java/com/auto1/pantera/vuln/VulnerabilityDao.java
new file mode 100644
index 000000000..c9f16dc9c
--- /dev/null
+++ b/pantera-main/src/main/java/com/auto1/pantera/vuln/VulnerabilityDao.java
@@ -0,0 +1,471 @@
+/*
+ * Copyright (c) 2025-2026 Auto1 Group
+ * Maintainers: Auto1 DevOps Team
+ * Lead Maintainer: Ayd Asraf
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License v3.0.
+ *
+ * Originally based on Artipie (https://github.com/artipie/artipie), MIT License.
+ */
+package com.auto1.pantera.vuln;
+
+import io.vertx.core.json.JsonObject;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.time.Instant;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import javax.sql.DataSource;
+
+/**
+ * JDBC-backed DAO for vulnerability scan results.
+ *
+ *
All data lives in the single {@code vulnerability_findings} table (V107).
+ * One row per CVE finding; clean scans (zero findings) insert a sentinel row
+ * with {@code cve_id = ''} so that the scan timestamp is always recorded.
+ *
+ * @since 2.1.0
+ */
+public final class VulnerabilityDao {
+
+ /**
+ * Sentinel value used as {@code cve_id} when a scan found no vulnerabilities.
+ * Allows the scan timestamp to be persisted even for clean artifacts.
+ */
+ private static final String SENTINEL = "";
+
+ /**
+ * Database data source.
+ */
+ private final DataSource dataSource;
+
+ /**
+ * Ctor.
+ * @param dataSource JDBC data source
+ */
+ public VulnerabilityDao(final DataSource dataSource) {
+ this.dataSource = dataSource;
+ }
+
+ /**
+ * Upsert a vulnerability report.
+ *
+ *
Deletes all existing rows for {@code (repo_name, artifact_path)}, then
+ * inserts one row per finding. If there are no findings a single sentinel row
+ * ({@code cve_id = ''}) is inserted so the scan timestamp is preserved.
+ *
+ * @param report Report to persist
+ */
+ public void upsert(final VulnerabilityReport report) {
+ final String delete =
+ "DELETE FROM vulnerability_findings WHERE repo_name = ? AND artifact_path = ?";
+ final String insert = String.join(" ",
+ "INSERT INTO vulnerability_findings",
+ "(repo_name, artifact_path, scanned_at, scanner, cve_id,",
+ " severity, package_name, installed_version, fixed_version, title)",
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
+ );
+ try (Connection conn = this.dataSource.getConnection()) {
+ conn.setAutoCommit(false);
+ try {
+ try (PreparedStatement ps = conn.prepareStatement(delete)) {
+ ps.setString(1, report.repoName());
+ ps.setString(2, report.artifactPath());
+ ps.executeUpdate();
+ }
+ try (PreparedStatement ps = conn.prepareStatement(insert)) {
+ final Timestamp ts = Timestamp.from(report.scannedAt());
+ if (report.findings().isEmpty()) {
+ // Sentinel row — records that this artifact was scanned (with no findings)
+ ps.setString(1, report.repoName());
+ ps.setString(2, report.artifactPath());
+ ps.setTimestamp(3, ts);
+ ps.setString(4, report.scanner());
+ ps.setString(5, SENTINEL);
+ ps.setString(6, "UNKNOWN");
+ ps.setString(7, "");
+ ps.setString(8, "");
+ ps.setString(9, "");
+ ps.setString(10, "");
+ ps.executeUpdate();
+ } else {
+ for (final VulnerabilityFinding f : report.findings()) {
+ ps.setString(1, report.repoName());
+ ps.setString(2, report.artifactPath());
+ ps.setTimestamp(3, ts);
+ ps.setString(4, report.scanner());
+ ps.setString(5, f.cveId());
+ ps.setString(6, f.severity());
+ ps.setString(7, f.packageName());
+ ps.setString(8, f.installedVersion());
+ ps.setString(9, f.fixedVersion());
+ ps.setString(10, f.title());
+ ps.addBatch();
+ }
+ ps.executeBatch();
+ }
+ }
+ conn.commit();
+ } catch (final SQLException ex) {
+ conn.rollback();
+ throw ex;
+ } finally {
+ conn.setAutoCommit(true);
+ }
+ } catch (final SQLException ex) {
+ throw new IllegalStateException("Failed to upsert vulnerability report", ex);
+ }
+ }
+
+ /**
+ * Find the cached scan report for a specific artifact.
+ * Returns empty if the artifact has never been scanned.
+ * @param repoName Repository name
+ * @param artifactPath Artifact path
+ * @return Optional report, empty if never scanned
+ */
+ public Optional findByArtifact(
+ final String repoName, final String artifactPath
+ ) {
+ final String sql = String.join(" ",
+ "SELECT scanned_at, scanner, cve_id, severity,",
+ " package_name, installed_version, fixed_version, title",
+ "FROM vulnerability_findings",
+ "WHERE repo_name = ? AND artifact_path = ?",
+ "ORDER BY scanned_at DESC"
+ );
+ try (Connection conn = this.dataSource.getConnection();
+ PreparedStatement ps = conn.prepareStatement(sql)) {
+ ps.setString(1, repoName);
+ ps.setString(2, artifactPath);
+ try (ResultSet rs = ps.executeQuery()) {
+ Instant scannedAt = null;
+ String scanner = "trivy";
+ final List findings = new ArrayList<>();
+ while (rs.next()) {
+ if (scannedAt == null) {
+ scannedAt = rs.getTimestamp("scanned_at").toInstant();
+ scanner = rs.getString("scanner");
+ }
+ final String cveId = rs.getString("cve_id");
+ if (!SENTINEL.equals(cveId)) {
+ findings.add(readFinding(rs, cveId));
+ }
+ }
+ if (scannedAt == null) {
+ return Optional.empty();
+ }
+ return Optional.of(
+ new VulnerabilityReport(repoName, artifactPath, scannedAt, scanner, findings)
+ );
+ }
+ } catch (final SQLException ex) {
+ throw new IllegalStateException("Failed to find vulnerability report", ex);
+ }
+ }
+
+ /**
+ * Find all cached reports for a given repository, ordered by severity descending.
+ * @param repoName Repository name
+ * @return List of per-artifact reports (may be empty)
+ */
+ public List findByRepo(final String repoName) {
+ final String sql = String.join(" ",
+ "SELECT artifact_path, scanner, scanned_at, cve_id, severity,",
+ " package_name, installed_version, fixed_version, title",
+ "FROM vulnerability_findings",
+ "WHERE repo_name = ?",
+ "ORDER BY artifact_path, scanned_at DESC"
+ );
+ // Use LinkedHashMap to preserve artifact_path insertion order while grouping
+ final Map> grouped = new LinkedHashMap<>();
+ try (Connection conn = this.dataSource.getConnection();
+ PreparedStatement ps = conn.prepareStatement(sql)) {
+ ps.setString(1, repoName);
+ try (ResultSet rs = ps.executeQuery()) {
+ while (rs.next()) {
+ final String artifactPath = rs.getString("artifact_path");
+ grouped.computeIfAbsent(artifactPath, k -> new ArrayList<>())
+ .add(new Object[]{
+ rs.getTimestamp("scanned_at").toInstant(),
+ rs.getString("scanner"),
+ rs.getString("cve_id"),
+ rs.getString("severity"),
+ rs.getString("package_name"),
+ rs.getString("installed_version"),
+ rs.getString("fixed_version"),
+ rs.getString("title")
+ });
+ }
+ }
+ } catch (final SQLException ex) {
+ throw new IllegalStateException("Failed to list vulnerability reports for repo", ex);
+ }
+ final List reports = new ArrayList<>(grouped.size());
+ for (final Map.Entry> entry : grouped.entrySet()) {
+ final String artifactPath = entry.getKey();
+ final List