From 5b5a6ae9704e1b7f4733b3708d71efe60e5e4343 Mon Sep 17 00:00:00 2001 From: Matias Alvarez Sabate Date: Mon, 29 Dec 2025 15:22:53 -0300 Subject: [PATCH 1/6] Add Dockerfile with Graphviz support --- .dockerignore | 20 ++++++++++++++++++++ Dockerfile | 12 ++++++++++++ 2 files changed, 32 insertions(+) create mode 100644 .dockerignore create mode 100644 Dockerfile diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..cb32c4c --- /dev/null +++ b/.dockerignore @@ -0,0 +1,20 @@ +#virtual environment +env/ +venv/ +.venv/ + +# Python +__pycache__/ +*.pyc + +# Git +.git +.gitignore + +# Outputs locales +results/ +output_*.csv + +# IDE +.vscode/ +.idea/ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..32f7b75 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.10-slim + +RUN apt-get update \ + && apt-get install -y --no-install-recommends graphviz \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app +COPY . . + +RUN pip install --no-cache-dir requests graphviz reportlab + +CMD ["python", "pyfrc2g.py"] From 4ed76467af200e9466eccffe0f82adf2eafdd622 Mon Sep 17 00:00:00 2001 From: Matias Alvarez Sabate Date: Mon, 29 Dec 2025 15:33:13 -0300 Subject: [PATCH 2/6] Read pfSense API config from environment variables --- modules/config.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/config.py b/modules/config.py index 45bdbc7..33cdfc9 100644 --- a/modules/config.py +++ b/modules/config.py @@ -1,3 +1,4 @@ +import os """ Configuration module for PyFRC2G """ @@ -6,8 +7,8 @@ GATEWAY_TYPE = "pfsense" # pfSense Configuration -PFS_BASE_URL = "https://" -PFS_TOKEN = "" +PFS_BASE_URL = os.environ.get("PFS_BASE_URL") +PFS_TOKEN = os.environ.get("PFS_TOKEN") # OPNSense Configuration OPNS_BASE_URL = "https://" From 30f82e5103a9452e3d99912d932b2889fb81cd09 Mon Sep 17 00:00:00 2001 From: Matias Alvarez Sabate Date: Mon, 29 Dec 2025 19:56:58 -0300 Subject: [PATCH 3/6] Fix long filenames in per-interface graphs and ignore results output --- modules/graph_generator.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/modules/graph_generator.py b/modules/graph_generator.py index 874b741..f1b1262 100644 --- a/modules/graph_generator.py +++ b/modules/graph_generator.py @@ -6,6 +6,7 @@ import glob import csv import logging +import hashlib from collections import OrderedDict from graphviz import Digraph from modules.utils import normalize_ports, safe_filename, map_value, format_alias_label @@ -40,6 +41,12 @@ def generate_by_interface(self, csv_path, output_dir): continue interface_safe = safe_filename(interface_name) + if len(interface_safe) > 80: + interface_safe = ( + interface_safe[:60] + + "_" + + hashlib.md5(interface_safe.encode()).hexdigest()[:8] + ) logging.info(f"Processing interface: {interface_name} ({len(rules)} rules)") # Extract host from output directory path (results/host/) From c6639e86455df676a8c2b421ea1a60de1a97c5f5 Mon Sep 17 00:00:00 2001 From: Matias Alvarez Sabate Date: Mon, 29 Dec 2025 19:58:15 -0300 Subject: [PATCH 4/6] Fix long filenames in per-interface graphs and ignore results output --- .gitignore | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e70abc7 --- /dev/null +++ b/.gitignore @@ -0,0 +1,20 @@ +#virtual environment +env/ +venv/ +.venv/ + +# Python +__pycache__/ +*.pyc + +# Git +.git + + +# Outputs locales +results/ +output_*.csv + +# IDE +.vscode/ +.idea/ From 0531c2fa647fc3a06ca91cb740304200b04c4bd3 Mon Sep 17 00:00:00 2001 From: Matias Alvarez Sabate Date: Wed, 31 Dec 2025 11:50:07 -0300 Subject: [PATCH 5/6] Secure Docker runtime and move runtime artifacts to results directory --- Dockerfile | 15 +++++++++-- md5sum.txt | 0 modules/config.py | 7 ++++-- modules/main.py | 64 +++++++++++++++++++++++------------------------ 4 files changed, 50 insertions(+), 36 deletions(-) delete mode 100644 md5sum.txt diff --git a/Dockerfile b/Dockerfile index 32f7b75..b0f2a05 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,23 @@ FROM python:3.10-slim +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + RUN apt-get update \ && apt-get install -y --no-install-recommends graphviz \ && rm -rf /var/lib/apt/lists/* WORKDIR /app -COPY . . -RUN pip install --no-cache-dir requests graphviz reportlab +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +RUN addgroup --system appgroup \ + && adduser --system --ingroup appgroup --shell /usr/sbin/nologin appuser + +COPY . . +RUN mkdir -p /app/results \ + && chown -R appuser:appgroup /app +USER appuser CMD ["python", "pyfrc2g.py"] diff --git a/md5sum.txt b/md5sum.txt deleted file mode 100644 index e69de29..0000000 diff --git a/modules/config.py b/modules/config.py index 33cdfc9..9362b51 100644 --- a/modules/config.py +++ b/modules/config.py @@ -80,9 +80,12 @@ def __init__(self): if self.gateway_name is None: self.gateway_name = firewall_host + #self.graph_output_dir = f"results/{self.gateway_name}" + #self.csv_file = f"output_{self.gateway_name}.csv" self.graph_output_dir = f"results/{self.gateway_name}" - self.csv_file = f"output_{self.gateway_name}.csv" - + os.makedirs(self.graph_output_dir, exist_ok=True) + self.csv_file = os.path.join(self.graph_output_dir, f"output_{self.gateway_name}.csv") + self.md5_file = os.path.join(self.graph_output_dir, "md5sum.txt") # CISO Assistant Configuration self.ciso_url = CISO_URL self.ciso_token = CISO_TOKEN diff --git a/modules/main.py b/modules/main.py index a981cf6..8a48ac9 100644 --- a/modules/main.py +++ b/modules/main.py @@ -24,36 +24,36 @@ def main(): format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S' ) - + config = Config() api_client = APIClient(config) graph_generator = GraphGenerator(config) ciso_client = CISOCClient(config) - + logging.debug(f"Configuration loaded: gateway_type={config.gateway_type}, gateway_name={config.gateway_name}") if config.gateway_type.lower() == "pfsense": logging.debug(f"pfSense URL: {config.pfs_url}, Base URL: {config.pfs_base_url}") elif config.gateway_type.lower() == "opnsense": logging.debug(f"OPNSense Base URL: {config.opns_base_url}, Rules URL: {config.opns_url}") logging.debug(f"OPNSense Interfaces: {config.interfaces}") - + logging.info(f"Starting rule extraction for {config.gateway_type}") - + # Fetch aliases from API logging.info("Fetching aliases from API...") logging.debug("Calling fetch_aliases()...") api_client.fetch_aliases() logging.debug(f"Aliases loaded: {len(api_client.interface_map)} interfaces, {len(api_client.net_map)} networks, {len(api_client.port_map)} ports") - + # Extract rules with open(config.csv_file, "w", newline="", encoding="utf-8") as f: writer = csv.DictWriter(f, fieldnames=config.csv_fieldnames) writer.writeheader() - + if config.gateway_type.lower() == "pfsense": logging.debug("Fetching pfSense rules...") entries = api_client.fetch_rules() - + if entries: logging.info(f"Retrieved {len(entries)} rules from pfSense") logging.debug(f"First rule sample: {entries[0] if entries else 'N/A'}") @@ -71,33 +71,33 @@ def main(): }) else: logging.warning("No firewall rules retrieved from pfSense") - + elif config.gateway_type.lower() == "opnsense": logging.debug("Fetching OPNSense rules...") entries = api_client.fetch_rules() - + if not entries: logging.error("No rules retrieved from OPNSense") return - + logging.debug(f"Retrieved {len(entries)} rules from OPNSense") if entries: logging.debug(f"First rule sample: {entries[0] if entries else 'N/A'}") - + # Write entries for entry in entries: - source_val = (entry.get('source', {}).get('network') or - entry.get('source', {}).get('address') or - entry.get('source_net') or + source_val = (entry.get('source', {}).get('network') or + entry.get('source', {}).get('address') or + entry.get('source_net') or entry.get('source', {}).get('any')) - destination_val = (entry.get('destination', {}).get('network') or - entry.get('destination', {}).get('address') or - entry.get('destination', {}).get('any') or + destination_val = (entry.get('destination', {}).get('network') or + entry.get('destination', {}).get('address') or + entry.get('destination', {}).get('any') or entry.get("destination_net")) - port_dest_val = (entry.get('destination', {}).get('port') or + port_dest_val = (entry.get('destination', {}).get('port') or entry.get("destination_port")) entry_interface = entry.get("interface") - + writer.writerow({ "SOURCE": map_value(source_val, "source", config.any_value), "GATEWAY": f"{config.gateway_name}/{map_value(entry_interface, 'interface', config.any_value)}" if entry_interface else f"{config.gateway_name}/Floating-rules", @@ -112,38 +112,38 @@ def main(): else: logging.error(f"Unknown gateway type: {config.gateway_type}. Use 'pfsense' or 'opnsense'.") return - + logging.info(f"✓ CSV file generated: {config.csv_file}") - + # Check for changes using MD5 prev_md5 = "" - if os.path.exists("md5sum.txt"): - with open("md5sum.txt", "r") as f: + if os.path.exists(config.md5_file): + with open(config.md5_file, "r") as f: prev_md5 = f.readline().strip() - + actual_md5 = calculate_md5(config.csv_file) logging.debug(f"MD5 comparison: previous={prev_md5[:8]}..., current={actual_md5[:8]}...") - + if prev_md5 != actual_md5: - with open("md5sum.txt", "w") as f: + with open(config.md5_file, "w") as f: f.write(f"{actual_md5}\n") logging.info("Changes detected, generating graphs...") - + # Create global CSV file (copy of all rules) os.makedirs(config.graph_output_dir, exist_ok=True) host_name = os.path.basename(config.graph_output_dir) if os.path.basename(config.graph_output_dir) else "gateway" global_csv = os.path.join(config.graph_output_dir, f"{host_name}_ALL_flows.csv") shutil.copy2(config.csv_file, global_csv) logging.info(f"✓ Global CSV created: {global_csv}") - + # Generate global file (all interfaces together) logging.info("Generating global graph (all interfaces combined)...") graph_generator.generate_graphs(config.csv_file, config.graph_output_dir) - + # Generate per-interface files (separate graphs for each interface) logging.info("Generating per-interface graphs (separate files for each interface)...") graph_generator.generate_by_interface(config.csv_file, config.graph_output_dir) - + # Cleanup PNG files (after PDFs are generated) try: png_files = glob.glob(os.path.join(config.graph_output_dir, "*.png")) @@ -155,7 +155,7 @@ def main(): logging.info(f"✓ Cleaned up {len(png_files)} temporary PNG file(s)") except Exception as e: logging.warning(f"Could not delete some PNG files: {e}") - + # Upload to CISO Assistant if configured if ciso_client.enabled: logging.info("Uploading PDFs to CISO Assistant...") @@ -167,7 +167,7 @@ def main(): logging.warning(f"⚠ Failed to upload {stats['failed']} PDF(s) to CISO Assistant") else: logging.info("No rules created or modified") - + # Cleanup CSV if os.path.exists(config.csv_file): os.remove(config.csv_file) From ec9978a51a8e4392c0debe817ef7cc8eb54ba29c Mon Sep 17 00:00:00 2001 From: Matias Alvarez Sabate Date: Wed, 31 Dec 2025 15:39:53 -0300 Subject: [PATCH 6/6] feat: expose generated results via nginx --- Dockerfile | 17 +++++++++++++++-- docker-start.sh | 6 ++++++ nginx.conf | 39 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 60 insertions(+), 2 deletions(-) create mode 100644 docker-start.sh create mode 100644 nginx.conf diff --git a/Dockerfile b/Dockerfile index b0f2a05..0d90a5a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,7 +4,9 @@ ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 RUN apt-get update \ - && apt-get install -y --no-install-recommends graphviz \ + && apt-get install -y --no-install-recommends \ + graphviz \ + nginx \ && rm -rf /var/lib/apt/lists/* WORKDIR /app @@ -16,8 +18,19 @@ RUN addgroup --system appgroup \ && adduser --system --ingroup appgroup --shell /usr/sbin/nologin appuser COPY . . + RUN mkdir -p /app/results \ && chown -R appuser:appgroup /app + +# nginx config +COPY nginx.conf /etc/nginx/nginx.conf + +# startup script +COPY docker-start.sh /docker-start.sh +RUN chmod +x /docker-start.sh + USER appuser -CMD ["python", "pyfrc2g.py"] +EXPOSE 8080 + +CMD ["/docker-start.sh"] diff --git a/docker-start.sh b/docker-start.sh new file mode 100644 index 0000000..ab9cc8c --- /dev/null +++ b/docker-start.sh @@ -0,0 +1,6 @@ +#!/bin/sh +set -e +echo "▶ Running PyFRC2G..." +python pyfrc2g.py +echo "▶ Starting nginx..." +exec nginx -g "daemon off;" diff --git a/nginx.conf b/nginx.conf new file mode 100644 index 0000000..2c53960 --- /dev/null +++ b/nginx.conf @@ -0,0 +1,39 @@ +worker_processes 1; +pid /tmp/nginx.pid; + +events { + worker_connections 1024; +} + +http { + include mime.types; + default_type application/octet-stream; + charset utf-8; + autoindex_format html; + sendfile on; + + # logs a docker stdout/stderr + access_log /dev/stdout; + error_log /dev/stderr warn; + + # temp paths escribibles + client_body_temp_path /tmp/client_body; + proxy_temp_path /tmp/proxy; + fastcgi_temp_path /tmp/fastcgi; + uwsgi_temp_path /tmp/uwsgi; + scgi_temp_path /tmp/scgi; + + server { + listen 8080; + server_name _; + + root /app/results; + index index.html; + + location / { + autoindex on; + autoindex_exact_size off; + autoindex_localtime on; + } + } +}