diff --git a/backend/app/api/v1/integrations.py b/backend/app/api/v1/integrations.py index e7b9048..f29c1fa 100644 --- a/backend/app/api/v1/integrations.py +++ b/backend/app/api/v1/integrations.py @@ -39,6 +39,12 @@ description="Collect MFA enrollment and identity management evidence.", collector_types=["okta_mfa_enrollment"], ), + ProviderInfo( + provider="prowler", + name="Prowler Security Scanner", + description="Run comprehensive security assessments against AWS, Azure, and GCP. Maps findings to CIS, SOC 2, HIPAA, PCI DSS, and more.", + collector_types=["prowler_aws_full_scan", "prowler_aws_service_scan", "prowler_aws_compliance_scan"], + ), ] diff --git a/backend/app/api/v1/prowler.py b/backend/app/api/v1/prowler.py new file mode 100644 index 0000000..f34a99d --- /dev/null +++ b/backend/app/api/v1/prowler.py @@ -0,0 +1,84 @@ +"""Prowler security scanner API endpoints.""" +from uuid import UUID + +from fastapi import APIRouter, Query + +from app.core.audit_middleware import log_audit +from app.core.dependencies import DB, ComplianceUser, AnyInternalUser, VerifiedOrgId +from app.schemas.prowler import ( + ProwlerScanTrigger, + ProwlerScanResultResponse, + ProwlerCompliancePosture, + ProwlerFindingSummary, +) +from app.services import prowler_service + +router = APIRouter( + prefix="/organizations/{org_id}/prowler", + tags=["prowler"], +) + + +@router.post("/scan", response_model=dict, status_code=201) +async def trigger_scan( + org_id: VerifiedOrgId, data: ProwlerScanTrigger, db: DB, current_user: ComplianceUser +): + """Trigger a Prowler security scan.""" + job = await prowler_service.trigger_scan(db, org_id, data) + await log_audit(db, current_user, "trigger_prowler_scan", "prowler", str(job.id), org_id) + return { + "job_id": str(job.id), + "status": job.status, + "collector_type": job.collector_type, + } + + +@router.get("/results", response_model=dict) +async def list_scan_results( + org_id: VerifiedOrgId, db: DB, current_user: AnyInternalUser, + severity: str | None = Query(None), + status: str | None = Query(None), + service: str | None = Query(None), + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=100), +): + """List Prowler scan results with optional filters.""" + items, total = await prowler_service.list_scan_results( + db, org_id, severity=severity, status=status, service=service, + page=page, page_size=page_size, + ) + return { + "items": [item.model_dump() for item in items], + "total": total, + "page": page, + "page_size": page_size, + "total_pages": (total + page_size - 1) // page_size, + } + + +@router.get("/results/{job_id}", response_model=ProwlerScanResultResponse) +async def get_scan_detail( + org_id: VerifiedOrgId, job_id: UUID, db: DB, current_user: AnyInternalUser +): + """Get detailed results for a specific Prowler scan.""" + result = await prowler_service.get_scan_detail(db, org_id, job_id) + if not result: + from app.core.exceptions import NotFoundError + raise NotFoundError(f"Prowler scan {job_id} not found") + return result + + +@router.get("/compliance-posture", response_model=ProwlerCompliancePosture) +async def get_compliance_posture( + org_id: VerifiedOrgId, db: DB, current_user: AnyInternalUser +): + """Get aggregate compliance posture from Prowler scans.""" + return await prowler_service.get_compliance_posture(db, org_id) + + +@router.get("/findings-summary", response_model=ProwlerFindingSummary) +async def get_findings_summary( + org_id: VerifiedOrgId, db: DB, current_user: AnyInternalUser +): + """Get summary of findings from the most recent Prowler scan.""" + return await prowler_service.get_findings_summary(db, org_id) diff --git a/backend/app/api/v1/router.py b/backend/app/api/v1/router.py index 699f3a6..98c3922 100644 --- a/backend/app/api/v1/router.py +++ b/backend/app/api/v1/router.py @@ -32,6 +32,7 @@ gap_analysis, tenants, embeddings, + prowler, ) api_router = APIRouter() @@ -69,3 +70,4 @@ api_router.include_router(gap_analysis.router) api_router.include_router(tenants.router) api_router.include_router(embeddings.router) +api_router.include_router(prowler.router) diff --git a/backend/app/collectors/__init__.py b/backend/app/collectors/__init__.py index b344e17..e30342a 100644 --- a/backend/app/collectors/__init__.py +++ b/backend/app/collectors/__init__.py @@ -5,5 +5,6 @@ import app.collectors.aws_collectors # noqa: F401 import app.collectors.github_collectors # noqa: F401 import app.collectors.okta_collectors # noqa: F401 +import app.collectors.prowler_collectors # noqa: F401 __all__ = ["COLLECTOR_REGISTRY", "BaseCollector"] diff --git a/backend/app/collectors/prowler_collectors.py b/backend/app/collectors/prowler_collectors.py new file mode 100644 index 0000000..cefe330 --- /dev/null +++ b/backend/app/collectors/prowler_collectors.py @@ -0,0 +1,293 @@ +"""Prowler security scanner collectors for cloud security posture assessment. + +Each collector attempts to run the Prowler CLI tool. If Prowler is not +installed or the scan fails, the collector falls back to mock data so that +the application works without live cloud connectivity. +""" +import asyncio +import json +import logging +import os +import tempfile +from datetime import datetime, timezone +from typing import Any + +from app.collectors.base import BaseCollector, register_collector +from app.config import get_settings + +logger = logging.getLogger(__name__) + + +async def _run_prowler_scan( + cloud_provider: str, + credentials: dict | None, + scan_scope: dict | None = None, + output_dir: str | None = None, +) -> list[dict]: + """Run the prowler CLI as an async subprocess and parse JSON output.""" + settings = get_settings() + output_dir = output_dir or settings.PROWLER_OUTPUT_DIR + os.makedirs(output_dir, exist_ok=True) + + cmd = ["prowler", cloud_provider, "-M", "json", "-o", output_dir, "-F", "prowler-output"] + + if scan_scope: + if scan_scope.get("services"): + cmd.extend(["-s", ",".join(scan_scope["services"])]) + if scan_scope.get("compliance_framework"): + cmd.extend(["--compliance", scan_scope["compliance_framework"]]) + + env = os.environ.copy() + if credentials: + if credentials.get("aws_access_key_id"): + env["AWS_ACCESS_KEY_ID"] = credentials["aws_access_key_id"] + if credentials.get("aws_secret_access_key"): + env["AWS_SECRET_ACCESS_KEY"] = credentials["aws_secret_access_key"] + if credentials.get("aws_session_token"): + env["AWS_SESSION_TOKEN"] = credentials["aws_session_token"] + if credentials.get("aws_region"): + env["AWS_DEFAULT_REGION"] = credentials["aws_region"] + + proc = await asyncio.create_subprocess_exec( + *cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + env=env, + ) + _, stderr = await asyncio.wait_for( + proc.communicate(), timeout=settings.PROWLER_TIMEOUT_SECONDS + ) + + if proc.returncode not in (0, 3): # 3 = findings found (non-zero but expected) + raise RuntimeError(f"Prowler exited with code {proc.returncode}: {stderr.decode()[:500]}") + + output_file = os.path.join(output_dir, "prowler-output.json") + if not os.path.exists(output_file): + raise FileNotFoundError(f"Prowler output not found at {output_file}") + + with open(output_file) as f: + return json.load(f) + + +def _normalize_findings(raw_findings: list[dict]) -> list[dict]: + """Normalize Prowler JSON output keys to a consistent format.""" + normalized = [] + for finding in raw_findings: + normalized.append({ + "check_id": finding.get("CheckID") or finding.get("check_id", ""), + "check_title": finding.get("CheckTitle") or finding.get("check_title", ""), + "status": finding.get("Status") or finding.get("status", ""), + "severity": finding.get("Severity") or finding.get("severity", ""), + "service": finding.get("ServiceName") or finding.get("service", ""), + "region": finding.get("Region") or finding.get("region", ""), + "resource_id": finding.get("ResourceId") or finding.get("resource_id", ""), + "resource_arn": finding.get("ResourceArn") or finding.get("resource_arn", ""), + "status_extended": finding.get("StatusExtended") or finding.get("status_extended", ""), + "risk": finding.get("Risk") or finding.get("risk", ""), + "remediation": finding.get("Remediation", {}).get("Recommendation", {}).get("Text", "") + if isinstance(finding.get("Remediation"), dict) + else finding.get("remediation", ""), + "compliance": finding.get("Compliance", {}) if isinstance(finding.get("Compliance"), dict) else {}, + }) + return normalized + + +def _compute_summary_stats(findings: list[dict]) -> dict: + """Aggregate summary statistics from normalized findings.""" + total = len(findings) + passed = sum(1 for f in findings if f["status"].upper() == "PASS") + failed = sum(1 for f in findings if f["status"].upper() == "FAIL") + pass_rate = round((passed / total * 100), 1) if total > 0 else 0.0 + + by_severity: dict[str, int] = {} + by_service: dict[str, int] = {} + for f in findings: + sev = f.get("severity", "unknown").lower() + svc = f.get("service", "unknown") + if f["status"].upper() == "FAIL": + by_severity[sev] = by_severity.get(sev, 0) + 1 + by_service[svc] = by_service.get(svc, 0) + 1 + + return { + "total": total, + "passed": passed, + "failed": failed, + "pass_rate": pass_rate, + "by_severity": by_severity, + "by_service": by_service, + } + + +# --------------------------------------------------------------------------- +# Full AWS Scan +# --------------------------------------------------------------------------- + +@register_collector("prowler_aws_full_scan") +class ProwlerAwsFullScan(BaseCollector): + async def collect(self, config: dict, credentials: dict | None = None) -> dict[str, Any]: + try: + raw = await _run_prowler_scan("aws", credentials) + findings = _normalize_findings(raw) + summary = _compute_summary_stats(findings) + logger.info("Prowler full AWS scan completed (%d findings)", len(findings)) + return { + "status": "success", + "summary": f"Prowler full AWS scan: {summary['total']} checks, {summary['pass_rate']}% pass rate", + "data": { + "collected_at": datetime.now(timezone.utc).isoformat(), + "scan_type": "full", + "cloud_provider": "aws", + "findings": findings, + "summary_stats": summary, + }, + } + except Exception as exc: + logger.warning("Prowler full AWS scan failed, using mock data: %s", exc) + return self._mock_response() + + @staticmethod + def _mock_response() -> dict[str, Any]: + findings = [ + {"check_id": "iam_root_mfa_enabled", "check_title": "Ensure MFA is enabled for the root account", "status": "FAIL", "severity": "critical", "service": "IAM", "region": "us-east-1", "resource_id": "root", "resource_arn": "arn:aws:iam::123456789012:root", "status_extended": "Root account does not have MFA enabled", "risk": "Root account compromise", "remediation": "Enable MFA on the root account", "compliance": {"CIS": ["1.5"], "SOC2": ["CC6.1"]}}, + {"check_id": "iam_password_policy_uppercase", "check_title": "Ensure IAM password policy requires uppercase", "status": "PASS", "severity": "medium", "service": "IAM", "region": "us-east-1", "resource_id": "password_policy", "resource_arn": "", "status_extended": "Password policy requires uppercase letters", "risk": "", "remediation": "", "compliance": {"CIS": ["1.8"]}}, + {"check_id": "s3_bucket_public_access", "check_title": "Ensure S3 buckets block public access", "status": "FAIL", "severity": "high", "service": "S3", "region": "us-east-1", "resource_id": "my-public-bucket", "resource_arn": "arn:aws:s3:::my-public-bucket", "status_extended": "Bucket my-public-bucket has public access enabled", "risk": "Data exposure", "remediation": "Enable S3 Block Public Access", "compliance": {"CIS": ["2.1.2"], "PCI": ["2.2"]}}, + {"check_id": "s3_bucket_encryption", "check_title": "Ensure S3 bucket encryption is enabled", "status": "PASS", "severity": "medium", "service": "S3", "region": "us-east-1", "resource_id": "encrypted-bucket", "resource_arn": "arn:aws:s3:::encrypted-bucket", "status_extended": "Bucket encryption is enabled with AES-256", "risk": "", "remediation": "", "compliance": {"CIS": ["2.1.1"]}}, + {"check_id": "cloudtrail_multi_region", "check_title": "Ensure CloudTrail is enabled in all regions", "status": "PASS", "severity": "high", "service": "CloudTrail", "region": "us-east-1", "resource_id": "org-trail", "resource_arn": "arn:aws:cloudtrail:us-east-1:123456789012:trail/org-trail", "status_extended": "Multi-region trail is active and logging", "risk": "", "remediation": "", "compliance": {"CIS": ["3.1"], "SOC2": ["CC7.2"]}}, + {"check_id": "cloudtrail_log_file_validation", "check_title": "Ensure CloudTrail log file validation is enabled", "status": "PASS", "severity": "medium", "service": "CloudTrail", "region": "us-east-1", "resource_id": "org-trail", "resource_arn": "arn:aws:cloudtrail:us-east-1:123456789012:trail/org-trail", "status_extended": "Log file validation is enabled", "risk": "", "remediation": "", "compliance": {"CIS": ["3.2"]}}, + {"check_id": "ec2_security_group_open_ssh", "check_title": "Ensure no security groups allow SSH from 0.0.0.0/0", "status": "FAIL", "severity": "high", "service": "EC2", "region": "us-east-1", "resource_id": "sg-0abc123def", "resource_arn": "arn:aws:ec2:us-east-1:123456789012:security-group/sg-0abc123def", "status_extended": "Security group sg-0abc123def allows SSH from 0.0.0.0/0", "risk": "Unauthorized SSH access", "remediation": "Restrict SSH access to known IP ranges", "compliance": {"CIS": ["5.2"], "PCI": ["1.3.4"]}}, + {"check_id": "ec2_ebs_encryption", "check_title": "Ensure EBS volume encryption is enabled by default", "status": "PASS", "severity": "medium", "service": "EC2", "region": "us-east-1", "resource_id": "ebs-default-encryption", "resource_arn": "", "status_extended": "EBS default encryption is enabled", "risk": "", "remediation": "", "compliance": {"CIS": ["2.2.1"]}}, + {"check_id": "rds_instance_encryption", "check_title": "Ensure RDS instances have encryption enabled", "status": "PASS", "severity": "high", "service": "RDS", "region": "us-east-1", "resource_id": "prod-db", "resource_arn": "arn:aws:rds:us-east-1:123456789012:db:prod-db", "status_extended": "RDS instance prod-db is encrypted", "risk": "", "remediation": "", "compliance": {"CIS": ["2.3.1"]}}, + {"check_id": "rds_instance_public_access", "check_title": "Ensure RDS instances are not publicly accessible", "status": "FAIL", "severity": "critical", "service": "RDS", "region": "us-east-1", "resource_id": "staging-db", "resource_arn": "arn:aws:rds:us-east-1:123456789012:db:staging-db", "status_extended": "RDS instance staging-db is publicly accessible", "risk": "Database exposure", "remediation": "Disable public accessibility on the RDS instance", "compliance": {"CIS": ["2.3.2"], "HIPAA": ["164.312(e)(1)"]}}, + {"check_id": "iam_user_console_access_mfa", "check_title": "Ensure MFA is enabled for all IAM users with console access", "status": "FAIL", "severity": "high", "service": "IAM", "region": "us-east-1", "resource_id": "dev-user-legacy", "resource_arn": "arn:aws:iam::123456789012:user/dev-user-legacy", "status_extended": "User dev-user-legacy has console access without MFA", "risk": "Account compromise", "remediation": "Enable MFA for the user", "compliance": {"CIS": ["1.10"], "SOC2": ["CC6.1"]}}, + {"check_id": "s3_bucket_versioning", "check_title": "Ensure S3 bucket versioning is enabled", "status": "PASS", "severity": "low", "service": "S3", "region": "us-east-1", "resource_id": "versioned-bucket", "resource_arn": "arn:aws:s3:::versioned-bucket", "status_extended": "Bucket versioning is enabled", "risk": "", "remediation": "", "compliance": {}}, + {"check_id": "cloudtrail_kms_encryption", "check_title": "Ensure CloudTrail logs are encrypted with KMS", "status": "PASS", "severity": "medium", "service": "CloudTrail", "region": "us-east-1", "resource_id": "org-trail", "resource_arn": "arn:aws:cloudtrail:us-east-1:123456789012:trail/org-trail", "status_extended": "CloudTrail logs are encrypted with KMS", "risk": "", "remediation": "", "compliance": {"CIS": ["3.7"]}}, + {"check_id": "ec2_instance_imdsv2", "check_title": "Ensure EC2 instances use IMDSv2", "status": "FAIL", "severity": "medium", "service": "EC2", "region": "us-east-1", "resource_id": "i-0abc123def456", "resource_arn": "arn:aws:ec2:us-east-1:123456789012:instance/i-0abc123def456", "status_extended": "Instance i-0abc123def456 does not enforce IMDSv2", "risk": "SSRF attacks can extract credentials", "remediation": "Set HttpTokens to required on the instance", "compliance": {"CIS": ["5.6"]}}, + {"check_id": "iam_access_key_rotation", "check_title": "Ensure access keys are rotated within 90 days", "status": "PASS", "severity": "medium", "service": "IAM", "region": "us-east-1", "resource_id": "admin-user", "resource_arn": "arn:aws:iam::123456789012:user/admin-user", "status_extended": "Access key was rotated 45 days ago", "risk": "", "remediation": "", "compliance": {"CIS": ["1.14"]}}, + ] + summary = _compute_summary_stats(findings) + return { + "status": "success", + "summary": f"Prowler full AWS scan: {summary['total']} checks, {summary['pass_rate']}% pass rate (mock)", + "data": { + "collected_at": datetime.now(timezone.utc).isoformat(), + "scan_type": "full", + "cloud_provider": "aws", + "findings": findings, + "summary_stats": summary, + }, + } + + +# --------------------------------------------------------------------------- +# Service-scoped AWS Scan +# --------------------------------------------------------------------------- + +@register_collector("prowler_aws_service_scan") +class ProwlerAwsServiceScan(BaseCollector): + async def collect(self, config: dict, credentials: dict | None = None) -> dict[str, Any]: + services = config.get("services", ["iam", "s3"]) + try: + raw = await _run_prowler_scan("aws", credentials, scan_scope={"services": services}) + findings = _normalize_findings(raw) + summary = _compute_summary_stats(findings) + logger.info("Prowler service scan (%s) completed (%d findings)", services, len(findings)) + return { + "status": "success", + "summary": f"Prowler service scan ({', '.join(services)}): {summary['total']} checks, {summary['pass_rate']}% pass rate", + "data": { + "collected_at": datetime.now(timezone.utc).isoformat(), + "scan_type": "service", + "cloud_provider": "aws", + "services_scanned": services, + "findings": findings, + "summary_stats": summary, + }, + } + except Exception as exc: + logger.warning("Prowler service scan failed, using mock data: %s", exc) + return self._mock_response(services) + + @staticmethod + def _mock_response(services: list[str] | None = None) -> dict[str, Any]: + services = services or ["iam", "s3"] + findings = [ + {"check_id": "iam_root_mfa_enabled", "check_title": "Ensure MFA is enabled for the root account", "status": "FAIL", "severity": "critical", "service": "IAM", "region": "us-east-1", "resource_id": "root", "resource_arn": "arn:aws:iam::123456789012:root", "status_extended": "Root account does not have MFA enabled", "risk": "Root account compromise", "remediation": "Enable MFA on the root account", "compliance": {"CIS": ["1.5"]}}, + {"check_id": "iam_password_policy_uppercase", "check_title": "Ensure IAM password policy requires uppercase", "status": "PASS", "severity": "medium", "service": "IAM", "region": "us-east-1", "resource_id": "password_policy", "resource_arn": "", "status_extended": "Password policy requires uppercase", "risk": "", "remediation": "", "compliance": {"CIS": ["1.8"]}}, + {"check_id": "s3_bucket_public_access", "check_title": "Ensure S3 buckets block public access", "status": "FAIL", "severity": "high", "service": "S3", "region": "us-east-1", "resource_id": "my-public-bucket", "resource_arn": "arn:aws:s3:::my-public-bucket", "status_extended": "Bucket has public access", "risk": "Data exposure", "remediation": "Enable Block Public Access", "compliance": {"CIS": ["2.1.2"]}}, + {"check_id": "s3_bucket_encryption", "check_title": "Ensure S3 bucket encryption is enabled", "status": "PASS", "severity": "medium", "service": "S3", "region": "us-east-1", "resource_id": "encrypted-bucket", "resource_arn": "arn:aws:s3:::encrypted-bucket", "status_extended": "Encryption enabled", "risk": "", "remediation": "", "compliance": {"CIS": ["2.1.1"]}}, + {"check_id": "iam_user_console_access_mfa", "check_title": "Ensure MFA for console users", "status": "FAIL", "severity": "high", "service": "IAM", "region": "us-east-1", "resource_id": "dev-user", "resource_arn": "arn:aws:iam::123456789012:user/dev-user", "status_extended": "User lacks MFA", "risk": "Account compromise", "remediation": "Enable MFA", "compliance": {"CIS": ["1.10"]}}, + {"check_id": "s3_bucket_versioning", "check_title": "Ensure S3 bucket versioning is enabled", "status": "PASS", "severity": "low", "service": "S3", "region": "us-east-1", "resource_id": "versioned-bucket", "resource_arn": "arn:aws:s3:::versioned-bucket", "status_extended": "Versioning enabled", "risk": "", "remediation": "", "compliance": {}}, + {"check_id": "iam_access_key_rotation", "check_title": "Ensure access keys rotated within 90 days", "status": "PASS", "severity": "medium", "service": "IAM", "region": "us-east-1", "resource_id": "admin-user", "resource_arn": "arn:aws:iam::123456789012:user/admin-user", "status_extended": "Rotated 30 days ago", "risk": "", "remediation": "", "compliance": {"CIS": ["1.14"]}}, + {"check_id": "s3_bucket_logging", "check_title": "Ensure S3 bucket logging is enabled", "status": "FAIL", "severity": "medium", "service": "S3", "region": "us-east-1", "resource_id": "unlogged-bucket", "resource_arn": "arn:aws:s3:::unlogged-bucket", "status_extended": "Logging not enabled", "risk": "Audit trail gap", "remediation": "Enable server access logging", "compliance": {"CIS": ["2.6"]}}, + ] + summary = _compute_summary_stats(findings) + return { + "status": "success", + "summary": f"Prowler service scan ({', '.join(services)}): {summary['total']} checks, {summary['pass_rate']}% pass rate (mock)", + "data": { + "collected_at": datetime.now(timezone.utc).isoformat(), + "scan_type": "service", + "cloud_provider": "aws", + "services_scanned": services, + "findings": findings, + "summary_stats": summary, + }, + } + + +# --------------------------------------------------------------------------- +# Compliance Framework Scan +# --------------------------------------------------------------------------- + +@register_collector("prowler_aws_compliance_scan") +class ProwlerAwsComplianceScan(BaseCollector): + async def collect(self, config: dict, credentials: dict | None = None) -> dict[str, Any]: + framework = config.get("compliance_framework", "cis_1.5_aws") + try: + raw = await _run_prowler_scan("aws", credentials, scan_scope={"compliance_framework": framework}) + findings = _normalize_findings(raw) + summary = _compute_summary_stats(findings) + logger.info("Prowler compliance scan (%s) completed (%d findings)", framework, len(findings)) + return { + "status": "success", + "summary": f"Prowler compliance scan ({framework}): {summary['total']} checks, {summary['pass_rate']}% pass rate", + "data": { + "collected_at": datetime.now(timezone.utc).isoformat(), + "scan_type": "compliance", + "cloud_provider": "aws", + "compliance_framework": framework, + "findings": findings, + "summary_stats": summary, + }, + } + except Exception as exc: + logger.warning("Prowler compliance scan failed, using mock data: %s", exc) + return self._mock_response(framework) + + @staticmethod + def _mock_response(framework: str = "cis_1.5_aws") -> dict[str, Any]: + findings = [ + {"check_id": "iam_root_mfa_enabled", "check_title": "Ensure MFA is enabled for the root account", "status": "FAIL", "severity": "critical", "service": "IAM", "region": "us-east-1", "resource_id": "root", "resource_arn": "arn:aws:iam::123456789012:root", "status_extended": "Root account MFA not enabled", "risk": "Root compromise", "remediation": "Enable root MFA", "compliance": {"CIS": ["1.5"]}}, + {"check_id": "iam_password_policy_length", "check_title": "Ensure password policy requires minimum length of 14", "status": "PASS", "severity": "medium", "service": "IAM", "region": "us-east-1", "resource_id": "password_policy", "resource_arn": "", "status_extended": "Min length is 14", "risk": "", "remediation": "", "compliance": {"CIS": ["1.9"]}}, + {"check_id": "cloudtrail_multi_region", "check_title": "Ensure CloudTrail is enabled in all regions", "status": "PASS", "severity": "high", "service": "CloudTrail", "region": "us-east-1", "resource_id": "org-trail", "resource_arn": "arn:aws:cloudtrail:us-east-1:123456789012:trail/org-trail", "status_extended": "Multi-region enabled", "risk": "", "remediation": "", "compliance": {"CIS": ["3.1"]}}, + {"check_id": "cloudtrail_log_file_validation", "check_title": "Ensure CloudTrail log file validation is enabled", "status": "PASS", "severity": "medium", "service": "CloudTrail", "region": "us-east-1", "resource_id": "org-trail", "resource_arn": "arn:aws:cloudtrail:us-east-1:123456789012:trail/org-trail", "status_extended": "Validation enabled", "risk": "", "remediation": "", "compliance": {"CIS": ["3.2"]}}, + {"check_id": "s3_bucket_public_access", "check_title": "Ensure S3 buckets block public access", "status": "FAIL", "severity": "high", "service": "S3", "region": "us-east-1", "resource_id": "public-bucket", "resource_arn": "arn:aws:s3:::public-bucket", "status_extended": "Public access not blocked", "risk": "Data leak", "remediation": "Enable Block Public Access", "compliance": {"CIS": ["2.1.2"]}}, + {"check_id": "ec2_security_group_open_ssh", "check_title": "Ensure no SGs allow SSH from 0.0.0.0/0", "status": "FAIL", "severity": "high", "service": "EC2", "region": "us-east-1", "resource_id": "sg-0abc123", "resource_arn": "arn:aws:ec2:us-east-1:123456789012:security-group/sg-0abc123", "status_extended": "SSH open to world", "risk": "Unauthorized access", "remediation": "Restrict SSH to known IPs", "compliance": {"CIS": ["5.2"]}}, + {"check_id": "rds_instance_encryption", "check_title": "Ensure RDS encryption is enabled", "status": "PASS", "severity": "high", "service": "RDS", "region": "us-east-1", "resource_id": "prod-db", "resource_arn": "arn:aws:rds:us-east-1:123456789012:db:prod-db", "status_extended": "Encrypted", "risk": "", "remediation": "", "compliance": {"CIS": ["2.3.1"]}}, + {"check_id": "ec2_ebs_encryption", "check_title": "Ensure EBS default encryption is enabled", "status": "PASS", "severity": "medium", "service": "EC2", "region": "us-east-1", "resource_id": "ebs-default", "resource_arn": "", "status_extended": "Default encryption enabled", "risk": "", "remediation": "", "compliance": {"CIS": ["2.2.1"]}}, + {"check_id": "iam_user_console_access_mfa", "check_title": "Ensure MFA for console users", "status": "FAIL", "severity": "high", "service": "IAM", "region": "us-east-1", "resource_id": "legacy-user", "resource_arn": "arn:aws:iam::123456789012:user/legacy-user", "status_extended": "Console access without MFA", "risk": "Account takeover", "remediation": "Enable MFA", "compliance": {"CIS": ["1.10"]}}, + {"check_id": "cloudtrail_kms_encryption", "check_title": "Ensure CloudTrail encrypted with KMS", "status": "PASS", "severity": "medium", "service": "CloudTrail", "region": "us-east-1", "resource_id": "org-trail", "resource_arn": "arn:aws:cloudtrail:us-east-1:123456789012:trail/org-trail", "status_extended": "KMS encryption enabled", "risk": "", "remediation": "", "compliance": {"CIS": ["3.7"]}}, + ] + summary = _compute_summary_stats(findings) + return { + "status": "success", + "summary": f"Prowler compliance scan ({framework}): {summary['total']} checks, {summary['pass_rate']}% pass rate (mock)", + "data": { + "collected_at": datetime.now(timezone.utc).isoformat(), + "scan_type": "compliance", + "cloud_provider": "aws", + "compliance_framework": framework, + "findings": findings, + "summary_stats": summary, + }, + } diff --git a/backend/app/config.py b/backend/app/config.py index 961151d..b942ed0 100644 --- a/backend/app/config.py +++ b/backend/app/config.py @@ -32,6 +32,10 @@ class Settings(BaseSettings): LITELLM_MODEL: str = "gpt-4o-mini" OPENAI_API_KEY: str = "" + # Prowler + PROWLER_OUTPUT_DIR: str = "/tmp/prowler-output" + PROWLER_TIMEOUT_SECONDS: int = 3600 + # SMTP email SMTP_HOST: str = "" SMTP_PORT: int = 587 diff --git a/backend/app/schemas/prowler.py b/backend/app/schemas/prowler.py new file mode 100644 index 0000000..330775a --- /dev/null +++ b/backend/app/schemas/prowler.py @@ -0,0 +1,73 @@ +"""Pydantic schemas for Prowler security scanner integration.""" +from pydantic import BaseModel, Field + + +class ProwlerScanTrigger(BaseModel): + integration_id: str + scan_type: str = Field(default="full", description="full | service | compliance") + services: list[str] | None = Field(default=None, description="Services to scan (for service scan type)") + compliance_framework: str | None = Field(default=None, description="Framework ID (for compliance scan type)") + + +class ProwlerFinding(BaseModel): + check_id: str = "" + check_title: str = "" + status: str = "" + severity: str = "" + service: str = "" + region: str = "" + resource_id: str = "" + resource_arn: str = "" + status_extended: str = "" + risk: str = "" + remediation: str = "" + compliance: dict = Field(default_factory=dict) + + +class ProwlerScanResultResponse(BaseModel): + job_id: str + status: str + scan_type: str | None = None + cloud_provider: str | None = None + total_findings: int = 0 + passed: int = 0 + failed: int = 0 + pass_rate: float = 0.0 + created_at: str | None = None + findings: list[ProwlerFinding] = Field(default_factory=list) + + +class ComplianceFrameworkPosture(BaseModel): + framework: str + total_checks: int = 0 + passed: int = 0 + failed: int = 0 + pass_rate: float = 0.0 + + +class ServicePosture(BaseModel): + service: str + total_checks: int = 0 + passed: int = 0 + failed: int = 0 + pass_rate: float = 0.0 + + +class ProwlerCompliancePosture(BaseModel): + frameworks: list[ComplianceFrameworkPosture] = Field(default_factory=list) + services: list[ServicePosture] = Field(default_factory=list) + overall_pass_rate: float = 0.0 + total_scans: int = 0 + last_scan_at: str | None = None + + +class ProwlerFindingSummary(BaseModel): + total: int = 0 + passed: int = 0 + failed: int = 0 + pass_rate: float = 0.0 + by_severity: dict[str, int] = Field(default_factory=dict) + by_service: dict[str, int] = Field(default_factory=dict) + critical_count: int = 0 + high_count: int = 0 + last_scan_at: str | None = None diff --git a/backend/app/services/monitoring_service.py b/backend/app/services/monitoring_service.py index e3684a6..c9a6573 100644 --- a/backend/app/services/monitoring_service.py +++ b/backend/app/services/monitoring_service.py @@ -171,6 +171,55 @@ async def _execute_checks( rule.last_result = "fail" else: rule.last_result = "pass" + elif rule.check_type == "prowler_scan": + # Check for FAIL findings at or above severity threshold from latest Prowler scan + config = rule.config or {} + severity_threshold = config.get("severity_threshold", "medium") + severity_order = {"critical": 4, "high": 3, "medium": 2, "low": 1} + min_severity = severity_order.get(severity_threshold, 2) + + from app.models.collection_job import CollectionJob + scan_result = await db.execute( + select(CollectionJob).where( + CollectionJob.org_id == org_id, + CollectionJob.collector_type.like("prowler_%"), + CollectionJob.status == "completed", + ).order_by(CollectionJob.created_at.desc()).limit(1) + ) + latest_scan = scan_result.scalar_one_or_none() + + if latest_scan and latest_scan.result_data: + data = latest_scan.result_data.get("data", {}) + findings = data.get("findings", []) + critical_findings = [ + f for f in findings + if f.get("status", "").upper() == "FAIL" + and severity_order.get(f.get("severity", "").lower(), 0) >= min_severity + ] + if critical_findings: + alert = MonitorAlert( + org_id=org_id, + rule_id=rule.id, + severity="high", + title=f"Prowler scan: {len(critical_findings)} findings at {severity_threshold}+ severity", + details={ + "finding_count": len(critical_findings), + "scan_job_id": str(latest_scan.id), + "top_findings": [ + {"check_id": f.get("check_id"), "severity": f.get("severity"), "service": f.get("service")} + for f in critical_findings[:5] + ], + }, + triggered_at=now, + ) + db.add(alert) + alerts_created.append(alert) + rule.last_result = "fail" + else: + rule.last_result = "pass" + else: + rule.last_result = "pass" + else: rule.last_result = "pass" diff --git a/backend/app/services/prowler_service.py b/backend/app/services/prowler_service.py new file mode 100644 index 0000000..4e99226 --- /dev/null +++ b/backend/app/services/prowler_service.py @@ -0,0 +1,268 @@ +"""Business logic for Prowler security scanner operations.""" +from uuid import UUID + +from sqlalchemy import select, func +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.collection_job import CollectionJob +from app.models.integration import Integration +from app.schemas.integration import CollectionTrigger +from app.schemas.prowler import ( + ProwlerScanTrigger, + ProwlerFinding, + ProwlerScanResultResponse, + ProwlerCompliancePosture, + ProwlerFindingSummary, + ComplianceFrameworkPosture, + ServicePosture, +) +from app.services import collection_service + + +SCAN_TYPE_TO_COLLECTOR = { + "full": "prowler_aws_full_scan", + "service": "prowler_aws_service_scan", + "compliance": "prowler_aws_compliance_scan", +} + + +async def trigger_scan( + db: AsyncSession, org_id: UUID, data: ProwlerScanTrigger +) -> CollectionJob: + """Trigger a Prowler scan via the collection service.""" + collector_type = SCAN_TYPE_TO_COLLECTOR.get(data.scan_type, "prowler_aws_full_scan") + + # Build config for the collector + integration_id = UUID(data.integration_id) + + # Update integration config with scan parameters + result = await db.execute( + select(Integration).where( + Integration.id == integration_id, Integration.org_id == org_id + ) + ) + integration = result.scalar_one_or_none() + if integration: + config = dict(integration.config or {}) + if data.services: + config["services"] = data.services + if data.compliance_framework: + config["compliance_framework"] = data.compliance_framework + integration.config = config + await db.flush() + + trigger = CollectionTrigger(collector_type=collector_type) + return await collection_service.trigger_collection(db, org_id, integration_id, trigger) + + +async def list_scan_results( + db: AsyncSession, + org_id: UUID, + severity: str | None = None, + status: str | None = None, + service: str | None = None, + page: int = 1, + page_size: int = 50, +) -> tuple[list[ProwlerScanResultResponse], int]: + """List Prowler scan results (collection jobs with prowler_ collector types).""" + base_q = select(CollectionJob).where( + CollectionJob.org_id == org_id, + CollectionJob.collector_type.like("prowler_%"), + ) + count_q = select(func.count()).select_from(CollectionJob).where( + CollectionJob.org_id == org_id, + CollectionJob.collector_type.like("prowler_%"), + ) + + total = (await db.execute(count_q)).scalar() or 0 + q = base_q.offset((page - 1) * page_size).limit(page_size).order_by( + CollectionJob.created_at.desc() + ) + result = await db.execute(q) + jobs = list(result.scalars().all()) + + responses = [] + for job in jobs: + data = job.result_data or {} + inner = data.get("data", {}) + stats = inner.get("summary_stats", {}) + + # Apply filters at the response level + findings_list = inner.get("findings", []) + if severity: + findings_list = [f for f in findings_list if f.get("severity", "").lower() == severity.lower()] + if status: + findings_list = [f for f in findings_list if f.get("status", "").upper() == status.upper()] + if service: + findings_list = [f for f in findings_list if f.get("service", "").lower() == service.lower()] + + responses.append(ProwlerScanResultResponse( + job_id=str(job.id), + status=job.status, + scan_type=inner.get("scan_type"), + cloud_provider=inner.get("cloud_provider"), + total_findings=stats.get("total", 0), + passed=stats.get("passed", 0), + failed=stats.get("failed", 0), + pass_rate=stats.get("pass_rate", 0.0), + created_at=job.created_at.isoformat() if job.created_at else None, + findings=[ProwlerFinding(**f) for f in findings_list], + )) + + return responses, total + + +async def get_scan_detail( + db: AsyncSession, org_id: UUID, job_id: UUID +) -> ProwlerScanResultResponse | None: + """Get detailed results for a specific Prowler scan job.""" + result = await db.execute( + select(CollectionJob).where( + CollectionJob.id == job_id, + CollectionJob.org_id == org_id, + CollectionJob.collector_type.like("prowler_%"), + ) + ) + job = result.scalar_one_or_none() + if not job: + return None + + data = job.result_data or {} + inner = data.get("data", {}) + stats = inner.get("summary_stats", {}) + findings_list = inner.get("findings", []) + + return ProwlerScanResultResponse( + job_id=str(job.id), + status=job.status, + scan_type=inner.get("scan_type"), + cloud_provider=inner.get("cloud_provider"), + total_findings=stats.get("total", 0), + passed=stats.get("passed", 0), + failed=stats.get("failed", 0), + pass_rate=stats.get("pass_rate", 0.0), + created_at=job.created_at.isoformat() if job.created_at else None, + findings=[ProwlerFinding(**f) for f in findings_list], + ) + + +async def get_compliance_posture( + db: AsyncSession, org_id: UUID +) -> ProwlerCompliancePosture: + """Aggregate compliance posture from all completed Prowler scans.""" + result = await db.execute( + select(CollectionJob) + .where( + CollectionJob.org_id == org_id, + CollectionJob.collector_type.like("prowler_%"), + CollectionJob.status == "completed", + ) + .order_by(CollectionJob.created_at.desc()) + .limit(20) + ) + jobs = list(result.scalars().all()) + + if not jobs: + return ProwlerCompliancePosture() + + # Aggregate from the latest scan + latest = jobs[0] + data = (latest.result_data or {}).get("data", {}) + findings = data.get("findings", []) + + # Build framework posture from compliance field in findings + framework_map: dict[str, dict] = {} + service_map: dict[str, dict] = {} + + for f in findings: + svc = f.get("service", "Unknown") + is_pass = f.get("status", "").upper() == "PASS" + + if svc not in service_map: + service_map[svc] = {"total": 0, "passed": 0, "failed": 0} + service_map[svc]["total"] += 1 + if is_pass: + service_map[svc]["passed"] += 1 + else: + service_map[svc]["failed"] += 1 + + compliance = f.get("compliance", {}) + for fw, _ in compliance.items(): + if fw not in framework_map: + framework_map[fw] = {"total": 0, "passed": 0, "failed": 0} + framework_map[fw]["total"] += 1 + if is_pass: + framework_map[fw]["passed"] += 1 + else: + framework_map[fw]["failed"] += 1 + + frameworks = [ + ComplianceFrameworkPosture( + framework=fw, + total_checks=v["total"], + passed=v["passed"], + failed=v["failed"], + pass_rate=round(v["passed"] / v["total"] * 100, 1) if v["total"] > 0 else 0.0, + ) + for fw, v in framework_map.items() + ] + + services = [ + ServicePosture( + service=svc, + total_checks=v["total"], + passed=v["passed"], + failed=v["failed"], + pass_rate=round(v["passed"] / v["total"] * 100, 1) if v["total"] > 0 else 0.0, + ) + for svc, v in service_map.items() + ] + + total_checks = sum(1 for _ in findings) + total_passed = sum(1 for f in findings if f.get("status", "").upper() == "PASS") + overall = round(total_passed / total_checks * 100, 1) if total_checks > 0 else 0.0 + + return ProwlerCompliancePosture( + frameworks=frameworks, + services=services, + overall_pass_rate=overall, + total_scans=len(jobs), + last_scan_at=latest.created_at.isoformat() if latest.created_at else None, + ) + + +async def get_findings_summary( + db: AsyncSession, org_id: UUID +) -> ProwlerFindingSummary: + """Summary stats from the most recent completed Prowler scan.""" + result = await db.execute( + select(CollectionJob) + .where( + CollectionJob.org_id == org_id, + CollectionJob.collector_type.like("prowler_%"), + CollectionJob.status == "completed", + ) + .order_by(CollectionJob.created_at.desc()) + .limit(1) + ) + job = result.scalar_one_or_none() + + if not job: + return ProwlerFindingSummary() + + data = (job.result_data or {}).get("data", {}) + stats = data.get("summary_stats", {}) + + by_severity = stats.get("by_severity", {}) + + return ProwlerFindingSummary( + total=stats.get("total", 0), + passed=stats.get("passed", 0), + failed=stats.get("failed", 0), + pass_rate=stats.get("pass_rate", 0.0), + by_severity=by_severity, + by_service=stats.get("by_service", {}), + critical_count=by_severity.get("critical", 0), + high_count=by_severity.get("high", 0), + last_scan_at=job.created_at.isoformat() if job.created_at else None, + ) diff --git a/backend/pyproject.toml b/backend/pyproject.toml index ab7a83e..6511cf1 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -21,6 +21,7 @@ dependencies = [ "apscheduler>=3.10.0", "boto3>=1.35.0", "sentence-transformers>=3.0.0", + "prowler>=4.0.0", ] [project.optional-dependencies] diff --git a/backend/tests/test_prowler.py b/backend/tests/test_prowler.py new file mode 100644 index 0000000..8e36e18 --- /dev/null +++ b/backend/tests/test_prowler.py @@ -0,0 +1,273 @@ +import pytest +from httpx import AsyncClient + +# Test org — must be created first +TEST_ORG_ID = None +TEST_INTEGRATION_ID = None + + +@pytest.fixture(autouse=True) +async def setup_org(client: AsyncClient): + global TEST_ORG_ID, TEST_INTEGRATION_ID + resp = await client.post( + "/api/v1/organizations", + json={"name": "Prowler Test Org", "slug": "prowler-test-org"}, + ) + TEST_ORG_ID = resp.json()["id"] + + # Create a prowler integration + resp = await client.post( + f"/api/v1/organizations/{TEST_ORG_ID}/integrations", + json={ + "provider": "prowler", + "name": "AWS Prowler Scanner", + "config": {"region": "us-east-1"}, + }, + ) + TEST_INTEGRATION_ID = resp.json()["id"] + + +@pytest.mark.asyncio +async def test_prowler_in_providers_list(client: AsyncClient): + """Prowler should appear in the providers list.""" + resp = await client.get( + f"/api/v1/organizations/{TEST_ORG_ID}/integrations/providers" + ) + assert resp.status_code == 200 + providers = resp.json() + provider_names = [p["provider"] for p in providers] + assert "prowler" in provider_names + + prowler = next(p for p in providers if p["provider"] == "prowler") + assert "Prowler" in prowler["name"] + assert len(prowler["collector_types"]) == 3 + assert "prowler_aws_full_scan" in prowler["collector_types"] + assert "prowler_aws_service_scan" in prowler["collector_types"] + assert "prowler_aws_compliance_scan" in prowler["collector_types"] + + +@pytest.mark.asyncio +async def test_trigger_full_scan(client: AsyncClient): + """Trigger a full Prowler scan (falls back to mock).""" + resp = await client.post( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/scan", + json={ + "integration_id": TEST_INTEGRATION_ID, + "scan_type": "full", + }, + ) + assert resp.status_code == 201 + data = resp.json() + assert "job_id" in data + assert data["status"] == "completed" + assert data["collector_type"] == "prowler_aws_full_scan" + + +@pytest.mark.asyncio +async def test_trigger_service_scan(client: AsyncClient): + """Trigger a service-scoped Prowler scan.""" + resp = await client.post( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/scan", + json={ + "integration_id": TEST_INTEGRATION_ID, + "scan_type": "service", + "services": ["iam", "s3"], + }, + ) + assert resp.status_code == 201 + data = resp.json() + assert data["collector_type"] == "prowler_aws_service_scan" + + +@pytest.mark.asyncio +async def test_trigger_compliance_scan(client: AsyncClient): + """Trigger a compliance-framework Prowler scan.""" + resp = await client.post( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/scan", + json={ + "integration_id": TEST_INTEGRATION_ID, + "scan_type": "compliance", + "compliance_framework": "cis_1.5_aws", + }, + ) + assert resp.status_code == 201 + data = resp.json() + assert data["collector_type"] == "prowler_aws_compliance_scan" + + +@pytest.mark.asyncio +async def test_list_scan_results(client: AsyncClient): + """List Prowler scan results after triggering a scan.""" + # First trigger a scan + await client.post( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/scan", + json={ + "integration_id": TEST_INTEGRATION_ID, + "scan_type": "full", + }, + ) + + resp = await client.get( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/results" + ) + assert resp.status_code == 200 + data = resp.json() + assert "items" in data + assert data["total"] >= 1 + assert len(data["items"]) >= 1 + + item = data["items"][0] + assert "job_id" in item + assert item["status"] == "completed" + assert item["total_findings"] > 0 + + +@pytest.mark.asyncio +async def test_get_scan_detail(client: AsyncClient): + """Get detailed results for a specific Prowler scan.""" + # Trigger a scan + trigger_resp = await client.post( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/scan", + json={ + "integration_id": TEST_INTEGRATION_ID, + "scan_type": "full", + }, + ) + job_id = trigger_resp.json()["job_id"] + + resp = await client.get( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/results/{job_id}" + ) + assert resp.status_code == 200 + data = resp.json() + assert data["job_id"] == job_id + assert data["status"] == "completed" + assert len(data["findings"]) > 0 + + # Verify finding structure + finding = data["findings"][0] + assert "check_id" in finding + assert "status" in finding + assert "severity" in finding + assert "service" in finding + + +@pytest.mark.asyncio +async def test_compliance_posture(client: AsyncClient): + """Get compliance posture after running scans.""" + # Run a scan first + await client.post( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/scan", + json={ + "integration_id": TEST_INTEGRATION_ID, + "scan_type": "full", + }, + ) + + resp = await client.get( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/compliance-posture" + ) + assert resp.status_code == 200 + data = resp.json() + assert "frameworks" in data + assert "services" in data + assert "overall_pass_rate" in data + assert data["total_scans"] >= 1 + assert data["overall_pass_rate"] > 0 + + +@pytest.mark.asyncio +async def test_findings_summary(client: AsyncClient): + """Get findings summary after running a scan.""" + await client.post( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/scan", + json={ + "integration_id": TEST_INTEGRATION_ID, + "scan_type": "full", + }, + ) + + resp = await client.get( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/findings-summary" + ) + assert resp.status_code == 200 + data = resp.json() + assert data["total"] > 0 + assert data["passed"] > 0 + assert data["failed"] > 0 + assert data["pass_rate"] > 0 + assert "by_severity" in data + assert "by_service" in data + + +@pytest.mark.asyncio +async def test_collector_mock_fallback(): + """Unit test that collector classes produce valid mock data.""" + from app.collectors.prowler_collectors import ( + ProwlerAwsFullScan, + ProwlerAwsServiceScan, + ProwlerAwsComplianceScan, + ) + + # Full scan mock + full = ProwlerAwsFullScan() + result = await full.collect({}) + assert result["status"] == "success" + assert len(result["data"]["findings"]) == 15 + assert result["data"]["summary_stats"]["total"] == 15 + + # Service scan mock + service = ProwlerAwsServiceScan() + result = await service.collect({"services": ["iam", "s3"]}) + assert result["status"] == "success" + assert len(result["data"]["findings"]) == 8 + + # Compliance scan mock + compliance = ProwlerAwsComplianceScan() + result = await compliance.collect({"compliance_framework": "cis_1.5_aws"}) + assert result["status"] == "success" + assert len(result["data"]["findings"]) == 10 + + +@pytest.mark.asyncio +async def test_collector_via_integration_api(client: AsyncClient): + """Trigger collection via the standard integrations API.""" + resp = await client.post( + f"/api/v1/organizations/{TEST_ORG_ID}/integrations/{TEST_INTEGRATION_ID}/collect", + json={"collector_type": "prowler_aws_full_scan"}, + ) + assert resp.status_code == 200 + data = resp.json() + assert data["status"] == "completed" + assert data["collector_type"] == "prowler_aws_full_scan" + assert data["result_data"]["status"] == "success" + + +@pytest.mark.asyncio +async def test_findings_summary_empty(client: AsyncClient): + """Findings summary returns zeros when no scans exist.""" + # Create a fresh org with no scans + org_resp = await client.post( + "/api/v1/organizations", + json={"name": "Empty Prowler Org", "slug": "empty-prowler-org"}, + ) + empty_org_id = org_resp.json()["id"] + + resp = await client.get( + f"/api/v1/organizations/{empty_org_id}/prowler/findings-summary" + ) + assert resp.status_code == 200 + data = resp.json() + assert data["total"] == 0 + assert data["pass_rate"] == 0 + + +@pytest.mark.asyncio +async def test_scan_detail_not_found(client: AsyncClient): + """Non-existent scan returns 404.""" + import uuid + fake_id = str(uuid.uuid4()) + resp = await client.get( + f"/api/v1/organizations/{TEST_ORG_ID}/prowler/results/{fake_id}" + ) + assert resp.status_code == 404 diff --git a/frontend/src/app/(dashboard)/integrations/[id]/page.tsx b/frontend/src/app/(dashboard)/integrations/[id]/page.tsx index 6278d84..36600fd 100644 --- a/frontend/src/app/(dashboard)/integrations/[id]/page.tsx +++ b/frontend/src/app/(dashboard)/integrations/[id]/page.tsx @@ -34,6 +34,11 @@ const PROVIDER_COLLECTORS: Record = { okta: [ { type: "okta_mfa_enrollment", label: "MFA Enrollment" }, ], + prowler: [ + { type: "prowler_aws_full_scan", label: "Full AWS Security Scan" }, + { type: "prowler_aws_service_scan", label: "Service-Specific Scan" }, + { type: "prowler_aws_compliance_scan", label: "Compliance Framework Scan" }, + ], }; const statusVariant: Record = { diff --git a/frontend/src/app/(dashboard)/integrations/page.tsx b/frontend/src/app/(dashboard)/integrations/page.tsx index 3cf7207..80a17bd 100644 --- a/frontend/src/app/(dashboard)/integrations/page.tsx +++ b/frontend/src/app/(dashboard)/integrations/page.tsx @@ -40,6 +40,11 @@ const CREDENTIAL_FIELDS: Record = { + critical: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-100", + high: "bg-orange-100 text-orange-800 dark:bg-orange-900 dark:text-orange-100", + medium: "bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-100", + low: "bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-100", +}; + +export default function ProwlerScanDetailPage() { + const params = useParams(); + const orgId = useOrgId(); + const jobId = params.jobId as string; + + const { data: scan, isLoading } = useProwlerScanDetail(orgId, jobId); + + if (isLoading) { + return ( +
+ + + +
+ ); + } + + if (!scan) { + return ( +
+ + + Back to Security Scanner + +

Scan not found.

+
+ ); + } + + return ( +
+ + + Back to Security Scanner + + + {/* Header */} +
+ +

+ {scan.scan_type === "full" + ? "Full AWS Scan" + : scan.scan_type === "service" + ? "Service Scan" + : "Compliance Scan"} +

+ + {scan.status} + +
+ + {scan.created_at && ( +

+ {new Date(scan.created_at).toLocaleString()} · {scan.cloud_provider?.toUpperCase()} +

+ )} + + {/* Summary Stats */} +
+ + +
{scan.total_findings}
+
Total Checks
+
+
+ + +
{scan.passed}
+
Passed
+
+
+ + +
{scan.failed}
+
Failed
+
+
+ + +
{scan.pass_rate}%
+
Pass Rate
+
+
+
+ + {/* Findings List */} + + + Findings ({scan.findings?.length ?? 0}) + All security checks from this scan + + + {scan.findings && scan.findings.length > 0 ? ( + scan.findings.map((finding: any, idx: number) => ( +
+ {finding.status === "PASS" ? ( + + ) : ( + + )} +
+
{finding.check_title}
+
+ {finding.check_id} +
+
+ {finding.service} · {finding.region} · {finding.resource_id} +
+ {finding.status_extended && ( +
+ {finding.status_extended} +
+ )} + {finding.remediation && finding.status === "FAIL" && ( +
+ Remediation: {finding.remediation} +
+ )} +
+
+ + {finding.severity} + + + {finding.status} + +
+
+ )) + ) : ( +

+ No findings in this scan. +

+ )} +
+
+
+ ); +} diff --git a/frontend/src/app/(dashboard)/prowler/page.tsx b/frontend/src/app/(dashboard)/prowler/page.tsx new file mode 100644 index 0000000..96a47a6 --- /dev/null +++ b/frontend/src/app/(dashboard)/prowler/page.tsx @@ -0,0 +1,491 @@ +"use client"; + +import { useState } from "react"; +import Link from "next/link"; +import { Card, CardContent, CardHeader, CardTitle, CardDescription } from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { Skeleton } from "@/components/ui/skeleton"; +import { + useProwlerFindingsSummary, + useProwlerResults, + useProwlerCompliancePosture, + useProwlerTriggerScan, + useIntegrations, +} from "@/hooks/use-api"; +import { + ShieldCheck, + Play, + Loader2, + AlertTriangle, + CheckCircle, + XCircle, + BarChart3, + X, +} from "lucide-react"; +import { useOrgId } from "@/hooks/use-org-id"; + +type TabValue = "findings" | "scans" | "compliance"; + +const SEVERITY_FILTERS: { label: string; value: string | undefined }[] = [ + { label: "All", value: undefined }, + { label: "Critical", value: "critical" }, + { label: "High", value: "high" }, + { label: "Medium", value: "medium" }, + { label: "Low", value: "low" }, +]; + +const STATUS_FILTERS: { label: string; value: string | undefined }[] = [ + { label: "All", value: undefined }, + { label: "Pass", value: "PASS" }, + { label: "Fail", value: "FAIL" }, +]; + +const severityColor: Record = { + critical: "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-100", + high: "bg-orange-100 text-orange-800 dark:bg-orange-900 dark:text-orange-100", + medium: "bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-100", + low: "bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-100", +}; + +export default function ProwlerPage() { + const orgId = useOrgId(); + const [activeTab, setActiveTab] = useState("findings"); + const [severityFilter, setSeverityFilter] = useState(undefined); + const [statusFilter, setStatusFilter] = useState(undefined); + const [showTrigger, setShowTrigger] = useState(false); + const [scanType, setScanType] = useState("full"); + const [scanIntegration, setScanIntegration] = useState(""); + const [scanServices, setScanServices] = useState(""); + const [scanFramework, setScanFramework] = useState("cis_1.5_aws"); + + const { data: summary, isLoading: summaryLoading } = useProwlerFindingsSummary(orgId); + const { data: resultsData, isLoading: resultsLoading } = useProwlerResults(orgId, { + severity: severityFilter, + status: statusFilter, + }); + const { data: posture, isLoading: postureLoading } = useProwlerCompliancePosture(orgId); + const { data: integrationsData } = useIntegrations(orgId); + const triggerScan = useProwlerTriggerScan(orgId); + + const prowlerIntegrations = (integrationsData?.items || []).filter( + (i: any) => i.provider === "prowler" + ); + const results = resultsData?.items || []; + + function handleTriggerScan() { + if (!scanIntegration) return; + triggerScan.mutate( + { + integration_id: scanIntegration, + scan_type: scanType, + services: scanType === "service" ? scanServices.split(",").map((s: string) => s.trim()).filter(Boolean) : undefined, + compliance_framework: scanType === "compliance" ? scanFramework : undefined, + }, + { + onSuccess: () => { + setShowTrigger(false); + }, + } + ); + } + + return ( +
+
+
+

+ + Security Scanner +

+

+ Cloud security posture assessment powered by Prowler +

+
+ +
+ + {/* Trigger Scan Modal */} + {showTrigger && ( + + +
+ Trigger Security Scan + +
+
+ +
+ + +
+ +
+ + +
+ + {scanType === "service" && ( +
+ + setScanServices(e.target.value)} + /> +
+ )} + + {scanType === "compliance" && ( +
+ + +
+ )} + +
+ + +
+
+
+ )} + + {/* Stats Cards */} + {summaryLoading ? ( +
+ {[1, 2, 3, 4].map((i) => ( + + ))} +
+ ) : summary ? ( +
+ + +
+ {summary.pass_rate ?? 0}% +
+
Overall Pass Rate
+
+
+ + +
{summary.total ?? 0}
+
Total Checks
+
+
+ + +
{summary.failed ?? 0}
+
Failed Findings
+
+
+ + +
+ {(summary.critical_count ?? 0) + (summary.high_count ?? 0)} +
+
Critical/High
+
+
+
+ ) : null} + + {/* Tab Buttons */} +
+ + + +
+ + {/* Findings Tab */} + {activeTab === "findings" && ( +
+
+ Severity: + {SEVERITY_FILTERS.map((f) => ( + + ))} + + Status: + {STATUS_FILTERS.map((f) => ( + + ))} +
+ + {resultsLoading ? ( +
+ {[1, 2, 3].map((i) => ( + + ))} +
+ ) : results.length > 0 ? ( +
+ {results.flatMap((scan: any) => + (scan.findings || []).map((finding: any, idx: number) => ( + + + {finding.status === "PASS" ? ( + + ) : ( + + )} +
+
{finding.check_title}
+
+ {finding.service} · {finding.region} · {finding.resource_id} +
+ {finding.status_extended && ( +
+ {finding.status_extended} +
+ )} +
+ + {finding.severity} + + + {finding.status} + +
+
+ )) + )} +
+ ) : ( + + + +

No findings

+

+ Run a security scan to see findings here. +

+
+
+ )} +
+ )} + + {/* Scans Tab */} + {activeTab === "scans" && ( +
+ {resultsLoading ? ( +
+ {[1, 2, 3].map((i) => ( + + ))} +
+ ) : results.length > 0 ? ( + results.map((scan: any) => ( + + + + +
+
+ {scan.scan_type === "full" + ? "Full AWS Scan" + : scan.scan_type === "service" + ? "Service Scan" + : "Compliance Scan"} +
+
+ {scan.created_at && new Date(scan.created_at).toLocaleString()} + {" "}· {scan.cloud_provider?.toUpperCase()} +
+
+
+
{scan.passed} passed
+
{scan.failed} failed
+
+ = 80 ? "success" : scan.pass_rate >= 50 ? "warning" : "destructive"}> + {scan.pass_rate}% + + + {scan.status} + +
+
+ + )) + ) : ( + + + +

No scans yet

+

+ Trigger a security scan to get started. +

+
+
+ )} +
+ )} + + {/* Compliance Tab */} + {activeTab === "compliance" && ( +
+ {postureLoading ? ( +
+ {[1, 2, 3].map((i) => ( + + ))} +
+ ) : posture && posture.frameworks?.length > 0 ? ( + <> +
+

Compliance Frameworks

+
+ {posture.frameworks.map((fw: any) => ( + + + {fw.framework} + + {fw.total_checks} checks · {fw.passed} passed · {fw.failed} failed + + + +
+
+
+
+ {fw.pass_rate}% +
+ + + ))} +
+
+ +
+

By Service

+
+ {posture.services.map((svc: any) => ( + + + {svc.service} + + {svc.total_checks} checks · {svc.passed} passed · {svc.failed} failed + + + +
+
+
+
+ {svc.pass_rate}% +
+ + + ))} +
+
+ + ) : ( + + + +

No compliance data

+

+ Run a security scan to see compliance posture. +

+
+
+ )} +
+ )} +
+ ); +} diff --git a/frontend/src/components/layout/app-sidebar.tsx b/frontend/src/components/layout/app-sidebar.tsx index b4f996d..41cdbe1 100644 --- a/frontend/src/components/layout/app-sidebar.tsx +++ b/frontend/src/components/layout/app-sidebar.tsx @@ -30,6 +30,7 @@ import { Users, GitBranch, Search, + ShieldCheck, } from "lucide-react"; // Role constants matching backend @@ -62,6 +63,7 @@ const navItems: NavItem[] = [ { href: "/incidents", label: "Incidents", icon: AlertCircle, section: "Operations", allowedRoles: COMPLIANCE_ROLES }, { href: "/vendors", label: "Vendor Risk", icon: Building2, section: "Operations", allowedRoles: COMPLIANCE_ROLES }, { href: "/monitoring", label: "Monitoring", icon: Activity, section: "Operations", allowedRoles: COMPLIANCE_ROLES }, + { href: "/prowler", label: "Security Scanner", icon: ShieldCheck, section: "Operations", allowedRoles: COMPLIANCE_ROLES }, // People { href: "/training", label: "Training", icon: GraduationCap, section: "People" }, { href: "/access-reviews", label: "Access Reviews", icon: UserCheck, section: "People", allowedRoles: COMPLIANCE_ROLES }, diff --git a/frontend/src/hooks/use-api.ts b/frontend/src/hooks/use-api.ts index 5550297..b2ff023 100644 --- a/frontend/src/hooks/use-api.ts +++ b/frontend/src/hooks/use-api.ts @@ -1009,6 +1009,63 @@ export function useUpdateAccessReviewEntry(orgId: string, campaignId: string) { }); } +// ===== Prowler Security Scanner ===== + +export function useProwlerTriggerScan(orgId: string) { + const qc = useQueryClient(); + return useMutation({ + mutationFn: (data: { integration_id: string; scan_type: string; services?: string[]; compliance_framework?: string }) => + api.post(`/organizations/${orgId}/prowler/scan`, data), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ["prowler-results", orgId] }); + qc.invalidateQueries({ queryKey: ["prowler-findings-summary", orgId] }); + qc.invalidateQueries({ queryKey: ["prowler-compliance-posture", orgId] }); + }, + }); +} + +export function useProwlerResults(orgId: string, params?: { severity?: string; status?: string; service?: string; page?: number }) { + const searchParams = new URLSearchParams(); + if (params?.severity) searchParams.set("severity", params.severity); + if (params?.status) searchParams.set("status", params.status); + if (params?.service) searchParams.set("service", params.service); + if (params?.page) searchParams.set("page", String(params.page)); + const qs = searchParams.toString(); + + return useQuery({ + queryKey: ["prowler-results", orgId, params], + queryFn: () => + api.get( + `/organizations/${orgId}/prowler/results${qs ? `?${qs}` : ""}` + ), + enabled: !!orgId, + }); +} + +export function useProwlerScanDetail(orgId: string, jobId: string) { + return useQuery({ + queryKey: ["prowler-scan-detail", orgId, jobId], + queryFn: () => api.get(`/organizations/${orgId}/prowler/results/${jobId}`), + enabled: !!orgId && !!jobId, + }); +} + +export function useProwlerCompliancePosture(orgId: string) { + return useQuery({ + queryKey: ["prowler-compliance-posture", orgId], + queryFn: () => api.get(`/organizations/${orgId}/prowler/compliance-posture`), + enabled: !!orgId, + }); +} + +export function useProwlerFindingsSummary(orgId: string) { + return useQuery({ + queryKey: ["prowler-findings-summary", orgId], + queryFn: () => api.get(`/organizations/${orgId}/prowler/findings-summary`), + enabled: !!orgId, + }); +} + // ===== Monitoring ===== export function useMonitorRules(orgId: string, params?: { check_type?: string; is_active?: string; page?: number }) {