diff --git a/.gitignore b/.gitignore index 2226100..6f60b61 100644 --- a/.gitignore +++ b/.gitignore @@ -184,4 +184,32 @@ ctre_sim/ compile_commands.json # Version file -src/main/java/frc/robot/generic/BuildConstants.java \ No newline at end of file +src/main/java/frc/robot/generic/BuildConstants.java + +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +env/ +venv/ +ENV/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# Tuner logs +tuner_logs/ \ No newline at end of file diff --git a/bayesopt/config/COEFFICIENT_TUNING.py b/bayesopt/config/COEFFICIENT_TUNING.py new file mode 100644 index 0000000..e73013b --- /dev/null +++ b/bayesopt/config/COEFFICIENT_TUNING.py @@ -0,0 +1,201 @@ +""" +COEFFICIENT TUNING CONFIGURATION +================================= +This file controls WHAT gets tuned, HOW MUCH it can change, and IN WHAT ORDER. +Edit this file to customize the optimization behavior. + +NO CODE CHANGES NEEDED - just modify the values below. +""" + +# ============================================================ +# TUNING ORDER - What gets optimized and in what sequence +# ============================================================ +# The optimizer tunes ONE coefficient at a time in this order. +# Put most impactful coefficients first for fastest improvement. +# Comment out (add # at start) any coefficient you DON'T want to tune. + +TUNING_ORDER = [ + "kDragCoefficient", # Air resistance (MOST IMPACT - tune this first!) + "kVelocityIterationCount", # Solver accuracy vs CPU load + "kAngleIterationCount", # Solver accuracy vs CPU load + "kVelocityTolerance", # How precise velocity calculation needs to be + "kAngleTolerance", # How precise angle calculation needs to be + "kLaunchHeight", # Physical measurement (tune last - rarely changes) + # "kAirDensity", # Commented out - air density is constant (1.225) +] + +# ============================================================ +# COEFFICIENT DEFINITIONS +# ============================================================ +# For each coefficient, you can control: +# - enabled: True = tune this, False = skip it +# - default_value: Starting value +# - min_value: Lowest allowed value (SAFETY LIMIT) +# - max_value: Highest allowed value (SAFETY LIMIT) +# - initial_step_size: How big the first changes are +# - step_decay_rate: How quickly to reduce step size (0.9 = shrink 10% per iteration) +# - is_integer: True = round to whole numbers, False = allow decimals +# - nt_key: NetworkTables path (don't change unless robot code changes) + +# TODO: Update tuning parameters before testing on robot + +COEFFICIENTS = { + "kDragCoefficient": { + "enabled": True, # ← CHANGE THIS to enable/disable tuning + "default_value": 0.003, # Starting guess + "min_value": 0.001, # ← SAFETY: Can't go below this + "max_value": 0.006, # ← SAFETY: Can't go above this + "initial_step_size": 0.001, # ← HOW MUCH: Start with big changes + "step_decay_rate": 0.9, # Gradually make smaller changes + "is_integer": False, # Allow decimals like 0.0035 + "nt_key": "/Tuning/FiringSolver/DragCoefficient", + "description": "Air resistance coefficient - affects trajectory curvature" + }, + + "kAirDensity": { + "enabled": False, # ← DISABLED: Air density is constant in FiringSolutionSolver + "default_value": 1.225, + "min_value": 1.10, + "max_value": 1.30, + "initial_step_size": 0.05, + "step_decay_rate": 0.9, + "is_integer": False, + "nt_key": "/Tuning/FiringSolver/AirDensity", + "description": "Air density (kg/m³) - typically constant at 1.225" + }, + + "kVelocityIterationCount": { + "enabled": True, # ← CHANGE THIS to enable/disable tuning + "default_value": 20, + "min_value": 10, + "max_value": 30, # ← REDUCED from 50 to prevent RoboRIO CPU overload + "initial_step_size": 5, # ← HOW MUCH: Try steps of 5 iterations + "step_decay_rate": 0.85, + "is_integer": True, # Must be whole number (10, 11, 12, not 10.5) + "nt_key": "/Tuning/FiringSolver/VelocityIterations", + "description": "Solver iterations for velocity - more = accurate but slower" + }, + + "kAngleIterationCount": { + "enabled": True, # ← CHANGE THIS to enable/disable tuning + "default_value": 20, + "min_value": 10, + "max_value": 30, # ← REDUCED from 50 to prevent RoboRIO CPU overload + "initial_step_size": 5, # ← HOW MUCH: Try steps of 5 iterations + "step_decay_rate": 0.85, + "is_integer": True, # Must be whole number + "nt_key": "/Tuning/FiringSolver/AngleIterations", + "description": "Solver iterations for angle - more = accurate but slower" + }, + + "kVelocityTolerance": { + "enabled": True, # ← CHANGE THIS to enable/disable tuning + "default_value": 0.01, + "min_value": 0.005, # ← SAFETY: Too tight = solver struggles + "max_value": 0.05, # ← SAFETY: Too loose = inaccurate + "initial_step_size": 0.005, # ← HOW MUCH: Start with 0.005 m/s changes + "step_decay_rate": 0.9, + "is_integer": False, + "nt_key": "/Tuning/FiringSolver/VelocityTolerance", + "description": "Velocity convergence tolerance (m/s) - smaller = more precise" + }, + + "kAngleTolerance": { + "enabled": True, # ← CHANGE THIS to enable/disable tuning + "default_value": 0.0001, + "min_value": 0.00001, # ← SAFETY: Too tight = solver struggles + "max_value": 0.001, # ← SAFETY: Too loose = inaccurate + "initial_step_size": 0.0001, # ← HOW MUCH: Start with 0.0001 rad changes + "step_decay_rate": 0.9, + "is_integer": False, + "nt_key": "/Tuning/FiringSolver/AngleTolerance", + "description": "Angle convergence tolerance (rad) - smaller = more precise" + }, + + "kLaunchHeight": { + "enabled": True, # ← CHANGE THIS to enable/disable tuning + "default_value": 0.8, + "min_value": 0.75, # ← SAFETY: Physical robot constraint + "max_value": 0.85, # ← SAFETY: Physical robot constraint + "initial_step_size": 0.02, # ← HOW MUCH: Start with 2cm changes + "step_decay_rate": 0.9, + "is_integer": False, + "nt_key": "/Tuning/FiringSolver/LaunchHeight", + "description": "Launch height above ground (m) - physical measurement" + }, +} + +# ============================================================ +# OPTIMIZATION SETTINGS +# ============================================================ +# How the Bayesian optimizer behaves + +# How many random points to try before starting intelligent optimization +N_INITIAL_POINTS = 5 # More = better exploration, slower convergence + +# Maximum iterations per coefficient before moving to next one +N_CALLS_PER_COEFFICIENT = 20 # More = more thorough, takes longer + +# ============================================================ +# ROBORIO PROTECTION SETTINGS +# ============================================================ +# Prevent overloading the robot's onboard computer + +# Maximum coefficient updates per second (prevents NT spam) +MAX_WRITE_RATE_HZ = 5.0 # SAFETY: Don't flood the RoboRIO with updates + +# Maximum shot data reads per second (prevents NT spam) +MAX_READ_RATE_HZ = 20.0 # SAFETY: Don't overload NT with read requests + +# Batch multiple writes together (reduces NT traffic) +BATCH_WRITES = True # True = more efficient, False = immediate writes + +# ============================================================ +# PHYSICAL ROBOT LIMITS (SAFETY) +# ============================================================ +# Hardcoded limits based on robot physical capabilities +# Reject any shot data outside these bounds (likely sensor errors) + +PHYSICAL_MAX_VELOCITY_MPS = 30.0 # Maximum physically possible exit velocity +PHYSICAL_MIN_VELOCITY_MPS = 5.0 # Minimum physically possible exit velocity +PHYSICAL_MAX_ANGLE_RAD = 1.57 # ~90 degrees (straight up) +PHYSICAL_MIN_ANGLE_RAD = 0.17 # ~10 degrees (very low angle) +PHYSICAL_MAX_DISTANCE_M = 10.0 # Maximum field distance for shots +PHYSICAL_MIN_DISTANCE_M = 1.0 # Minimum shot distance + +# ============================================================ +# HOW TO USE THIS FILE +# ============================================================ +""" +QUICK MODIFICATIONS: + +1. DISABLE A COEFFICIENT: + Set enabled = False in that coefficient's section + +2. CHANGE TUNING ORDER: + Rearrange or comment out items in TUNING_ORDER list + +3. ADJUST HOW MUCH IT CHANGES: + Modify initial_step_size (bigger = more aggressive) + +4. TIGHTEN SAFETY LIMITS: + Reduce the range between min_value and max_value + +5. PROTECT ROBORIO: + Lower MAX_WRITE_RATE_HZ or MAX_READ_RATE_HZ + +EXAMPLES: + +# Only tune drag coefficient: +TUNING_ORDER = ["kDragCoefficient"] + +# Make drag tuning more aggressive: +"initial_step_size": 0.002, # Change from 0.001 + +# Tighten drag safety range: +"min_value": 0.002, # Change from 0.001 +"max_value": 0.004, # Change from 0.006 + +# Reduce RoboRIO load: +MAX_WRITE_RATE_HZ = 2.0 # Change from 5.0 +""" diff --git a/bayesopt/config/TUNER_TOGGLES.ini b/bayesopt/config/TUNER_TOGGLES.ini new file mode 100644 index 0000000..0df34cf --- /dev/null +++ b/bayesopt/config/TUNER_TOGGLES.ini @@ -0,0 +1,38 @@ +# ============================================================ +# TUNER CONTROL - THREE MAIN TOGGLES +# ============================================================ +# This file controls the THREE main tuner settings +# NO DIGGING IN CODE REQUIRED - just edit these values +# ============================================================ + +[main_controls] +# Toggle 1: Turn the entire tuner on or off +# True = Tuner runs automatically in background +# False = Tuner is completely disabled +tuner_enabled = False + +# Toggle 2: Block shooting until shot is logged (hit or miss button pressed) +# True = Robot cannot shoot again until driver logs previous shot result +# False = Robot can shoot freely (may miss logging some shots) +# RECOMMENDED: False for normal play, True for intensive tuning sessions +require_shot_logged = False + +# Toggle 3: Block shooting until tuner finishes calculations and updates coefficients +# True = Robot waits for optimizer before allowing next shot +# False = Shooting and optimization happen independently +# RECOMMENDED: False for normal play, True for sequential testing +require_coefficients_updated = False + +# ============================================================ +# Team Configuration +# ============================================================ +[team] +# Your FRC team number (used to calculate robot IP address) +team_number = 5892 + +# ============================================================ +# THAT'S IT! +# ============================================================ +# These are the only settings you need to change regularly. +# For coefficient tuning configuration, see COEFFICIENT_TUNING.py +# ============================================================ diff --git a/bayesopt/docs/CONTRIBUTIONS.md b/bayesopt/docs/CONTRIBUTIONS.md new file mode 100644 index 0000000..cbcbe5d --- /dev/null +++ b/bayesopt/docs/CONTRIBUTIONS.md @@ -0,0 +1,69 @@ +# Contributing — SideKick / bayesopt (short) + +Thanks for contributing. Be clear, small, and test your changes. + +Quick workflow +1. Fork, clone, create a branch (e.g. feat/xxx or fix/yyy). +2. Make focused commits, run tests/linters locally. +3. Push branch and open a PR against main (link issue if present). +4. Address reviews; rebase/squash if requested. + +Etiquette (short) +- Be respectful and constructive. +- Ask questions instead of assuming. +- Explain why a change is needed and how you validated it. + +Commit message (required) +Format: +(scope): short imperative summary + +Body: +- State value change clearly: "from X → Y on Z" and why. +- Add testing/compatibility notes as needed. + +Types: feat, fix, docs, style, refactor, perf, test, chore + +Example: +- feat(optimizer): change default learning_rate from 0.01 → 0.001 + Body: "Changed Adam default LR 0.01 → 0.001 to improve noisy-data stability. Added unit test." + +## Examples + +Good commit + PR example (clear, concrete) + +- Commit title: + feat(optimizer): reduce default Adam learning_rate 0.01 → 0.001 + +- Commit body: + - What changed value-wise: changed Adam (adaptive motion estimate) default learning_rate in bayesopt/optimizer.py from 0.01 → 0.001. + - Why: reduced overshooting and improved stability on noisy datasets (see issue #42). + - Tests: updated tests/test_optimizer.py::test_default_lr to assert new default. + - Files changed: bayesopt/optimizer.py, tests/test_optimizer.py, docs/usage.md (if applicable). + +- PR description (short & required): + - Summary: Reduced Adam default LR 0.01 → 0.001 to improve noisy-data stability. + - Details: + - What changed: Adam.default_lr 0.01 → 0.001 in bayesopt/optimizer.py + - Why: training on noisy datasets showed unstable convergence; lowering LR reduced oscillation. + - How validated: ran unit tests and a small example run below. + - Validation steps (maintainers/reviewers): + 1. Run unit test: pytest -k test_default_lr + 2. Run a quick example: python examples/adam_demo.py --max-evals 10 + 3. Confirm no regression in CI and review updated test assertions. + - Related: Closes #42 + +Bad commit/PR (what to avoid) +- Title: "fix stuff" +- Body: no before/after values, no tests, no explanation. + +PR checklist +- Branch uses naming convention. +- Tests added/updated where applicable. +- Linter run. +- Docs updated for public API changes. +- PR description states: what changed, from what → to what, why, and how to validate. + +Issues +Provide summary, steps to reproduce, environment, and logs or minimal examples. + +That's it — short, clear, and testable contributions are preferred. diff --git a/bayesopt/docs/MASTERDOC.md b/bayesopt/docs/MASTERDOC.md new file mode 100644 index 0000000..1b4aa6d --- /dev/null +++ b/bayesopt/docs/MASTERDOC.md @@ -0,0 +1,132 @@ +# Bayesian Optimization Tuner — Master Developer Document + +**THIS IS NOT A HOW TO OPERATE GUIDE. THIS IS A MASTERDOC.** +**IT CONTAINS LOGIC AND EXPLINATIONS NOT NESSCARY FOR OPERATION.** +**THE OPERATION GUIDE IS START.md.** +**THIS IS NOT REQUIRED READING.** + +Why I made this +- Single-source, practical reference for maintainers. Explains architecture, runtime flow, interactions, and exactly where to change things. + +Quick summary +- Runs on a Driver Station or dev host, reads shot feedback via NetworkTables (NT), and uses a per-coefficient Bayesian optimizer to propose safe coefficient updates. Tunes one coefficient at a time in a configured priority order. Safety: clamping, rate-limits, match-mode disable, invalid-data handling. + +Bayesian optimization — quick primer +- Bayesian optimization is a sample-efficient method for tuning expensive or noisy black‑box functions: it builds a probabilistic surrogate (commonly a Gaussian Process) of the objective and uses an acquisition function to pick the next promising point, balancing exploration vs. exploitation. +- In this project the tuner treats robot "shot" performance as the objective: the optimizer proposes safe coefficient changes, receives shot scores as feedback, and updates its surrogate to suggest improved coefficients over time. + +architecture (compact flow) +- Components: + - Launcher: scripts/{RUN_TUNER.sh,RUN_TUNER.bat,tuner_daemon.py} + - Coordinator: tuner/tuner.py + - NT layer: tuner/nt_interface.py + - Optimizer: tuner/optimizer.py + - Config: bayesopt/config/* + tuner/config.py + - Logging: tuner/logger.py + - Tests: tuner/tests/* + +Flow (simple diagram): + +[Launcher] + | + v +[Coordinator — tuner/tuner.py] + |--> [nt_interface.py] (NetworkTables reads/writes) + |--> [optimizer.py] (suggest_next_value, report_result) + `--> [logger.py] (logging & CSV) + +Data path: +nt_interface.read_shot_data() --> Coordinator --> optimizer.suggest_next_value() +optimizer.report_result(...) --> Coordinator --> nt_interface.write_coefficient(...) +logger.log_shot() <--- Coordinator / optimizer / nt_interface + +Detailed data flow (step-by-step) +1. Launcher starts the daemon (tuner_daemon.py) which constructs TunerConfig and starts Coordinator. +2. Coordinator loop (tuner.py) polls nt_interface.read_shot_data() at TUNER_UPDATE_RATE_HZ. +3. Each ShotData is validated (physically plausible fields). Invalid samples increment a counter; pause when threshold exceeded. +4. Valid samples are sent to the active CoefficientTuner (optimizer.py) which maintains a GP model and suggests_next_value(). +5. Coordinator clamps the suggestion to COEFFICIENT_TUNING bounds and enforces min_write_interval per-coefficient. +6. Coordinator calls nt_interface.write_coefficient(name, value). +7. Shot outcomes are reported back to optimizer.report_result(value, score) to update the surrogate. +8. logger.log_shot() and log_event() record inputs, suggestions, results, and state changes. + +Where logic lives (file map + responsibilities) +- bayesopt/config/COEFFICIENT_TUNING.py + - Define each coefficient: nt_path/key, min, max, initial, step, safe_delta, convergence params. + - Set TUNING_ORDER (array of coefficient names). + - Change here to add coefficients or modify ranges. +- bayesopt/config/TUNER_TOGGLES.ini + - Global on/off toggles and runtime defaults. +- bayesopt/scripts/RUN_TUNER.sh, RUN_TUNER.bat, tuner_daemon.py + - Process/daemon start-up logic. Modify for autostart/system integration. +- bayesopt/tuner/config.py + - Loads and validates effective runtime configuration (typed fields). Change to add runtime flags or new config validation. +- bayesopt/tuner/nt_interface.py + - Encapsulates all NetworkTables reads/writes and connection handling: + - connect(), read_shot_data(), write_coefficient(name, value), is_match_mode(). + - To change NT key names or add new NT telemetry, update here and COEFFICIENT_TUNING.py. +- bayesopt/tuner/optimizer.py + - BayesianOptimizer / CoefficientTuner classes: + - suggest_next_value(), report_result(value, score), is_converged(). + - Swap surrogate model here while keeping the public interface. +- bayesopt/tuner/tuner.py + - Coordinator: scheduling, safety checks (match mode, invalid-data), enforcing clamping/rate-limits, sequencing TUNING_ORDER. + - Primary place to adjust orchestration logic. +- bayesopt/tuner/logger.py + - CSV and daemon logging. Single place to add/remove fields or change formats. +- bayesopt/tuner/tests/* + - Unit tests that mock nt_interface and validate config, optimizer, and coordinator behavior. +- bayesopt/tuner/requirements.txt + - Add runtime dependencies (skopt, numpy, etc). + +Interaction matrix (who calls whom) +- tuner_daemon.py -> tuner.Tuner (start/stop) +- tuner.Tuner -> nt_interface, optimizer, logger, config +- optimizer -> logger (for debug events) +- nt_interface -> logger (connection events) +- Tests -> mock nt_interface to exercise tuner and optimizer + +Change guidance (what to edit for common tasks) +- Add/modify coefficient: + 1) Edit COEFFICIENT_TUNING.py: add CoefficientConfig and update TUNING_ORDER. + 2) Update unit tests in tuner/tests/ to cover clamping and optimizer behavior. + 3) If NT key names changed, modify nt_interface.py accordingly. +- Change optimizer algorithm: + - Implement new model in optimizer.py keeping suggest_next_value/report_result/is_converged API and update tests. +- Add runtime toggle: + - Add the toggle to TUNER_TOGGLES.ini and reflect default/validation in tuner/config.py. +- Change logging fields: + - Update logger._initialize_csv_log() header and logger.log_shot(); update tests to validate columns. +- Integrate with different data source: + - Replace nt_interface.py implementation but keep the same external methods used by tuner.py. + +Safety & operational rules (explicit) +- Match-mode disable: if nt_interface.is_match_mode() is True, tuner must not write coefficients. +- Clamping: enforce [min,max] from COEFFICIENT_TUNING for every write. +- Rate-limiting: enforce min_write_interval per-coefficient; configurable in tuner/config.py. +- Invalid data: define max_bad_samples; on exceed, pause tuning until sufficient good samples appear. +- Graceful shutdown: tuner.stop() flushes logs and closes NT connection. + +Flowchart for state transitions (ASCII) + +[IDLE] --start--> [TUNING_LOOP] +[TUNING_LOOP] --match_mode_on--> [DISABLED] +[TUNING_LOOP] --invalid_data--> [PAUSED] +[PAUSED] --good_data_restored--> [TUNING_LOOP] +[TUNING_LOOP] --converged_current_coeff--> [NEXT_COEFFICIENT or COMPLETE] +[ANY] --stop/exit--> [SHUTDOWN] + +Testing strategy +- Unit tests must run offline and mock NT: + - Mock nt_interface.read_shot_data() to feed deterministic synthetic data. + - Assert clamping: suggested values outside bounds are clamped before nt_interface.write_coefficient() is called. + - Assert rate-limiting: repeated suggestions within the min interval do not trigger writes. + - Optimizer tests: feed synthetic rewards and assert the optimizer model updates and converges. +- run_tests.py executes all unit tests (tuner/tests/*). + +Debugging checklist +- Is NT connected? Check nt_interface logs and connection state. +- Is tuner disabled due to match-mode? nt_interface.is_match_mode() +- Are writes being throttled? Check timestamps in logs and min_write_interval. +- Is optimizer receiving results? Check logger entries for report_result events. +- Use DEBUG logging: setup_logging(config, log_level=logging.DEBUG) diff --git a/bayesopt/docs/START.md b/bayesopt/docs/START.md new file mode 100644 index 0000000..2505a0d --- /dev/null +++ b/bayesopt/docs/START.md @@ -0,0 +1,13 @@ +# Turn on + +- Install dependencies once per machine: + + pip install -r bayesopt/tuner/requirements.txt + +- Make sure tuner and preffered setting are turned on: + + This is in the filepath to the toggles: + + bayesopt/config/TUNER_TOGGLES.ini + +- Run robot as per usual diff --git a/bayesopt/docs/TROUBLESHOOTING.md b/bayesopt/docs/TROUBLESHOOTING.md new file mode 100644 index 0000000..e2d8e42 --- /dev/null +++ b/bayesopt/docs/TROUBLESHOOTING.md @@ -0,0 +1,52 @@ +# Troubleshooting — Why it's not turning on + +Why it's not turning on — checklist and fixes +1. TUNER_ENABLED is false + - Fix: set TUNER_ENABLED = True in TUNER_TOGGLES.ini or TunerConfig before starting. + +2. Missing/incorrect NT server IP or team number + - Symptom: "NT connect failed" in daemon log or no NT connection. + - Fix: set correct NT_SERVER_IP or TEAM_NUMBER in config and restart. + +3. Dependencies missing + - Symptom: ImportError or ModuleNotFoundError on startup. + - Fix: pip install -r bayesopt/tuner/requirements.txt + +4. Running in match mode (FMS detected) + - Symptom: Tuner auto-disables immediately; logs show match-mode detected. + - Fix: For tuning, ensure FMS is not connected or use a test environment. + +5. NT keys mismatch with robot + - Symptom: No shot data; read_shot_data returns None or invalid values. + - Fix: Confirm robot publishes the same NetworkTables keys (see COEFFICIENT_TUNING.py nt_key fields). + +6. Permissions or missing log directory + - Symptom: File write errors when creating tuner_logs. + - Fix: Ensure LOG_DIRECTORY exists and is writable; adjust path in config. + +7. Invalid shot data / too noisy + - Symptom: optimizer receives invalid readings, tuning pauses. + - Fix: Verify robot's shot logger (LogHit/LogMiss buttons), add sensors needed, or increase MIN_VALID_SHOTS_BEFORE_UPDATE. + +8. Coefficients clamped to same value (no change) + - Symptom: Suggested values repeatedly clamped to min/max. + - Fix: Check COEFFICIENT_TUNING ranges; widen sensible ranges or correct default value. + +9. Optimizer not converging + - Symptom: Iterations run but no progress. + - Fixes: + - Increase N_CALLS_PER_COEFFICIENT + - Increase N_INITIAL_POINTS + - Verify shot quality and increase MIN_VALID_SHOTS_BEFORE_UPDATE + - Review initial_step_size and step_decay_rate + +10. Port or firewall issues blocking NT + - Symptom: NT connection stalls, partial writes. + - Fix: Ensure network allows UDP/TCP used by NetworkTables between DS PC and robot. + +If still failing +- Inspect daemon log for the first ERROR/TRACE message and follow that code path. +- Run unit tests to ensure core modules load correctly. +- If NT issues persist, isolate with a local NT server emulator. +- Use logs and CSVs to identify whether the issue is NT, data quality, or optimizer-related. +- Ask either Ruthie or Michael diff --git a/bayesopt/scripts/RUN_TUNER.bat b/bayesopt/scripts/RUN_TUNER.bat new file mode 100644 index 0000000..24bef31 --- /dev/null +++ b/bayesopt/scripts/RUN_TUNER.bat @@ -0,0 +1,16 @@ +@echo off +REM ============================================================ +REM FRC SHOOTER TUNER - AUTO-START DAEMON +REM Runs in background, drivers do nothing! +REM ============================================================ + +REM Change to the directory where this script is located +cd /d "%~dp0" + +REM Run silently in background (no window) +start /B pythonw tuner_daemon.py + +REM Optional: Show a quick message +echo FRC Tuner daemon started in background +timeout /t 2 /nobreak >nul + diff --git a/bayesopt/scripts/RUN_TUNER.sh b/bayesopt/scripts/RUN_TUNER.sh new file mode 100755 index 0000000..9b21dea --- /dev/null +++ b/bayesopt/scripts/RUN_TUNER.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# ============================================================ +# FRC SHOOTER TUNER - AUTO-START DAEMON +# Runs in background, drivers do nothing! +# ============================================================ + +# Get the directory where this script is located +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +cd "$SCRIPT_DIR" + +# Run daemon in background +nohup python3 tuner_daemon.py > /dev/null 2>&1 & + +# Optional: Show a quick message +echo "FRC Tuner daemon started in background" diff --git a/bayesopt/scripts/tuner_daemon.py b/bayesopt/scripts/tuner_daemon.py new file mode 100644 index 0000000..e770a66 --- /dev/null +++ b/bayesopt/scripts/tuner_daemon.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python3 +""" +AUTO-START TUNER DAEMON + +This script runs automatically in the background when the Driver Station starts. +Programmers set TUNER_ENABLED in config, drivers do NOTHING. + +To set up auto-start: + Windows: Add to Startup folder or Task Scheduler + Mac: Add to Login Items + Linux: Add to systemd or cron @reboot +""" + +import sys +import os +import time +import configparser + +# Add parent directory (bayesopt) to path to import tuner module +script_dir = os.path.dirname(os.path.abspath(__file__)) +bayesopt_dir = os.path.dirname(script_dir) +sys.path.insert(0, bayesopt_dir) + +from tuner import BayesianTunerCoordinator, TunerConfig, setup_logging +import logging + + +def load_config_from_file(): + """Load configuration from tuner_config.ini file.""" + config_file = os.path.join(os.path.dirname(__file__), 'tuner_config.ini') + + # Defaults if no config file + if not os.path.exists(config_file): + return { + 'enabled': False, + 'team_number': 0, + 'require_shot_logged': False, + 'require_coefficients_updated': False + } + + try: + parser = configparser.ConfigParser() + parser.read(config_file) + + enabled = parser.getboolean('tuner', 'enabled', fallback=False) + team_number = parser.getint('tuner', 'team_number', fallback=0) + + # Read shooting interlock settings + require_shot_logged = parser.getboolean('shooting_interlocks', 'require_shot_logged', fallback=False) + require_coefficients_updated = parser.getboolean('shooting_interlocks', 'require_coefficients_updated', fallback=False) + + return { + 'enabled': enabled, + 'team_number': team_number, + 'require_shot_logged': require_shot_logged, + 'require_coefficients_updated': require_coefficients_updated, + } + except Exception as e: + logging.error(f"Error loading config: {e}") + return { + 'enabled': False, + 'team_number': 0, + 'require_shot_logged': False, + 'require_coefficients_updated': False + } + + +def main(): + """Main daemon loop.""" + + # Silent logging to file only + log_file = os.path.join(os.path.dirname(__file__), 'tuner_logs', 'tuner_daemon.log') + os.makedirs(os.path.dirname(log_file), exist_ok=True) + + logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(message)s', + handlers=[ + logging.FileHandler(log_file), + ] + ) + + logger = logging.getLogger(__name__) + logger.info("=" * 60) + logger.info("Tuner Daemon Started") + logger.info("=" * 60) + + # Load configuration + settings = load_config_from_file() + + if not settings['enabled']: + logger.info("Tuner is DISABLED in config - daemon will not start tuner") + logger.info("Programmers: Set enabled = True in tuner_config.ini") + # Daemon stays running but doesn't do anything + # This way it's always ready if programmers enable it + while True: + time.sleep(60) # Check every minute if config changed + new_settings = load_config_from_file() + if new_settings['enabled']: + settings = new_settings + break + + logger.info(f"Tuner ENABLED - Team {settings['team_number']}") + logger.info(f"Interlock settings: require_shot_logged={settings['require_shot_logged']}, " + f"require_coefficients_updated={settings['require_coefficients_updated']}") + + # Calculate robot IP + team_number = settings['team_number'] + if team_number > 0: + team_str = str(team_number).zfill(4) + server_ip = f"10.{team_str[:2]}.{team_str[2:]}.2" + else: + # Try common addresses + server_ip = None # Auto-detect + + logger.info(f"Target robot IP: {server_ip or 'auto-detect'}") + + # Create tuner config with interlock settings + config = TunerConfig() + config.TUNER_ENABLED = True + config.REQUIRE_SHOT_LOGGED = settings['require_shot_logged'] + config.REQUIRE_COEFFICIENTS_UPDATED = settings['require_coefficients_updated'] + if server_ip: + config.NT_SERVER_IP = server_ip + + # Start tuner coordinator + try: + logger.info("Starting tuner coordinator...") + coordinator = BayesianTunerCoordinator(config) + coordinator.start(server_ip=server_ip) + + logger.info("Tuner running in background") + logger.info("Drivers don't need to do anything!") + + # Keep running until interrupted + while True: + time.sleep(10) + + # Check if we should reload config + new_settings = load_config_from_file() + if not new_settings['enabled'] and settings['enabled']: + logger.info("Tuner disabled in config - stopping") + coordinator.stop() + settings = new_settings + # Wait for re-enable + while True: + time.sleep(60) + newer_settings = load_config_from_file() + if newer_settings['enabled']: + logger.info("Tuner re-enabled - restarting") + coordinator.start(server_ip=server_ip) + settings = newer_settings + break + + except KeyboardInterrupt: + logger.info("Daemon stopped by signal") + if 'coordinator' in locals(): + coordinator.stop() + except Exception as e: + logger.error(f"Daemon error: {e}", exc_info=True) + if 'coordinator' in locals(): + coordinator.stop() + + +if __name__ == "__main__": + main() diff --git a/bayesopt/tuner/__init__.py b/bayesopt/tuner/__init__.py new file mode 100644 index 0000000..6f1d8e8 --- /dev/null +++ b/bayesopt/tuner/__init__.py @@ -0,0 +1,36 @@ +""" +FRC Shooter Bayesian Tuner + +A Driver Station-only Bayesian optimization tuner for the FiringSolutionSolver. +Automatically tunes shooting coefficients based on shot hit/miss feedback. + +Usage: + from tuner import run_tuner + + # Run with default config + run_tuner() + + # Or with custom server IP + run_tuner(server_ip="10.12.34.2") +""" + +__version__ = "1.0.0" + +from .config import TunerConfig, CoefficientConfig +from .tuner import BayesianTunerCoordinator, run_tuner +from .nt_interface import NetworkTablesInterface, ShotData +from .optimizer import BayesianOptimizer, CoefficientTuner +from .logger import TunerLogger, setup_logging + +__all__ = [ + 'TunerConfig', + 'CoefficientConfig', + 'BayesianTunerCoordinator', + 'run_tuner', + 'NetworkTablesInterface', + 'ShotData', + 'BayesianOptimizer', + 'CoefficientTuner', + 'TunerLogger', + 'setup_logging', +] diff --git a/bayesopt/tuner/config.py b/bayesopt/tuner/config.py new file mode 100644 index 0000000..f3c59b3 --- /dev/null +++ b/bayesopt/tuner/config.py @@ -0,0 +1,226 @@ +""" +Configuration module for the FRC Shooter Bayesian Tuner. + +This module loads configuration from two simple files: +1. TUNER_TOGGLES.ini - Three main on/off switches +2. COEFFICIENT_TUNING.py - What to tune, how much, in what order + +NO NEED TO EDIT THIS FILE - edit the files above instead! +""" + +from dataclasses import dataclass +from typing import Dict, List +import os +import configparser +import importlib.util + + +@dataclass +class CoefficientConfig: + """Configuration for a single tunable coefficient.""" + + name: str + default_value: float + min_value: float + max_value: float + initial_step_size: float + step_decay_rate: float + is_integer: bool + enabled: bool + nt_key: str # NetworkTables key path + + def clamp(self, value: float) -> float: + """Clamp value to valid range.""" + clamped = max(self.min_value, min(self.max_value, value)) + if self.is_integer: + clamped = round(clamped) + return clamped + + +class TunerConfig: + """ + Global configuration for the Bayesian tuner system. + + Loads settings from: + - TUNER_TOGGLES.ini (main on/off switches) + - COEFFICIENT_TUNING.py (coefficient definitions and tuning order) + """ + + def __init__(self): + """Initialize configuration by loading from files.""" + # Load toggle settings from TUNER_TOGGLES.ini + self._load_toggles() + + # Load coefficient configuration from COEFFICIENT_TUNING.py + self._load_coefficient_config() + + # Initialize other settings + self._initialize_constants() + + def _load_toggles(self): + """Load the three main toggles from TUNER_TOGGLES.ini""" + # Find the toggles file (in ../config/ relative to tuner module) + module_dir = os.path.dirname(os.path.abspath(__file__)) + parent_dir = os.path.dirname(module_dir) + toggles_file = os.path.join(parent_dir, "config", "TUNER_TOGGLES.ini") + + config = configparser.ConfigParser() + config.read(toggles_file) + + # Load main controls + self.TUNER_ENABLED = config.getboolean('main_controls', 'tuner_enabled', fallback=True) + self.REQUIRE_SHOT_LOGGED = config.getboolean('main_controls', 'require_shot_logged', fallback=False) + self.REQUIRE_COEFFICIENTS_UPDATED = config.getboolean('main_controls', 'require_coefficients_updated', fallback=False) + + # Load team number and calculate robot IP + team_number = config.getint('team', 'team_number', fallback=5892) + self.NT_SERVER_IP = f"10.{team_number // 100}.{team_number % 100}.2" + + def _load_coefficient_config(self): + """Load coefficient definitions from COEFFICIENT_TUNING.py""" + # Find the coefficient config file (in ../config/ relative to tuner module) + module_dir = os.path.dirname(os.path.abspath(__file__)) + parent_dir = os.path.dirname(module_dir) + coeff_file = os.path.join(parent_dir, "config", "COEFFICIENT_TUNING.py") + + # Load as module + spec = importlib.util.spec_from_file_location("coeff_config", coeff_file) + coeff_module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(coeff_module) + + # Load tuning order + self.TUNING_ORDER = coeff_module.TUNING_ORDER + + # Convert coefficient dicts to CoefficientConfig objects + self.COEFFICIENTS = {} + for name, cfg in coeff_module.COEFFICIENTS.items(): + self.COEFFICIENTS[name] = CoefficientConfig( + name=name, + default_value=cfg['default_value'], + min_value=cfg['min_value'], + max_value=cfg['max_value'], + initial_step_size=cfg['initial_step_size'], + step_decay_rate=cfg['step_decay_rate'], + is_integer=cfg['is_integer'], + enabled=cfg['enabled'], + nt_key=cfg['nt_key'], + ) + + # Load optimization settings + self.N_INITIAL_POINTS = coeff_module.N_INITIAL_POINTS + self.N_CALLS_PER_COEFFICIENT = coeff_module.N_CALLS_PER_COEFFICIENT + + # Load RoboRIO protection settings + self.MAX_NT_WRITE_RATE_HZ = coeff_module.MAX_WRITE_RATE_HZ + self.MAX_NT_READ_RATE_HZ = coeff_module.MAX_READ_RATE_HZ + self.NT_BATCH_WRITES = coeff_module.BATCH_WRITES + + # Load physical limits + self.PHYSICAL_MAX_VELOCITY_MPS = coeff_module.PHYSICAL_MAX_VELOCITY_MPS + self.PHYSICAL_MIN_VELOCITY_MPS = coeff_module.PHYSICAL_MIN_VELOCITY_MPS + self.PHYSICAL_MAX_ANGLE_RAD = coeff_module.PHYSICAL_MAX_ANGLE_RAD + self.PHYSICAL_MIN_ANGLE_RAD = coeff_module.PHYSICAL_MIN_ANGLE_RAD + self.PHYSICAL_MAX_DISTANCE_M = coeff_module.PHYSICAL_MAX_DISTANCE_M + self.PHYSICAL_MIN_DISTANCE_M = coeff_module.PHYSICAL_MIN_DISTANCE_M + + def _initialize_constants(self): + """Initialize constants that don't come from config files.""" + # NetworkTables configuration + self.NT_TIMEOUT_SECONDS = 5.0 + self.NT_RECONNECT_DELAY_SECONDS = 2.0 + + # NetworkTables keys for shot data + self.NT_SHOT_DATA_TABLE = "/FiringSolver" + self.NT_SHOT_HIT_KEY = "/FiringSolver/Hit" + self.NT_SHOT_DISTANCE_KEY = "/FiringSolver/Distance" + self.NT_SHOT_ANGLE_KEY = "/FiringSolver/Solution/pitchRadians" + self.NT_SHOT_VELOCITY_KEY = "/FiringSolver/Solution/exitVelocity" + self.NT_TUNER_STATUS_KEY = "/FiringSolver/TunerStatus" + + # Match mode detection key + self.NT_MATCH_MODE_KEY = "/FMSInfo/FMSControlData" + + # Bayesian optimization settings + self.ACQUISITION_FUNCTION = "EI" # Expected Improvement + + # Safety and validation + self.MIN_VALID_SHOTS_BEFORE_UPDATE = 3 + self.MAX_CONSECUTIVE_INVALID_SHOTS = 5 + self.ABNORMAL_READING_THRESHOLD = 3.0 # Standard deviations + + # Logging configuration + self.LOG_DIRECTORY = "./tuner_logs" + self.LOG_FILENAME_PREFIX = "bayesian_tuner" + self.LOG_TO_CONSOLE = True + + # Threading configuration + self.TUNER_UPDATE_RATE_HZ = 10.0 # How often to check for new data + self.GRACEFUL_SHUTDOWN_TIMEOUT_SECONDS = 5.0 + + # Step size decay configuration + self.STEP_SIZE_DECAY_ENABLED = True + self.MIN_STEP_SIZE_RATIO = 0.1 # Minimum step size as ratio of initial + + def get_enabled_coefficients_in_order(self) -> List[CoefficientConfig]: + """Get list of enabled coefficients in tuning order.""" + return [ + self.COEFFICIENTS[name] + for name in self.TUNING_ORDER + if name in self.COEFFICIENTS and self.COEFFICIENTS[name].enabled + ] + + def validate_config(self) -> List[str]: + """ + Validate configuration and return list of warnings. + + Returns: + List of warning messages (empty if no issues) + """ + warnings = [] + + # Check that enabled coefficients are in tuning order + enabled_coeffs = [name for name, cfg in self.COEFFICIENTS.items() if cfg.enabled] + for name in enabled_coeffs: + if name not in self.TUNING_ORDER: + warnings.append(f"Enabled coefficient '{name}' not in TUNING_ORDER") + + # Check for coefficients in tuning order that don't exist + for name in self.TUNING_ORDER: + if name not in self.COEFFICIENTS: + warnings.append(f"Coefficient '{name}' in TUNING_ORDER but not defined") + + # Validate coefficient configurations + for name, coeff in self.COEFFICIENTS.items(): + if coeff.min_value >= coeff.max_value: + warnings.append(f"{name}: min_value must be < max_value") + + if coeff.default_value < coeff.min_value or coeff.default_value > coeff.max_value: + warnings.append(f"{name}: default_value outside valid range") + + if coeff.initial_step_size <= 0: + warnings.append(f"{name}: initial_step_size must be positive") + + if not 0 < coeff.step_decay_rate <= 1.0: + warnings.append(f"{name}: step_decay_rate must be in (0, 1]") + + # Validate physical limits make sense + if self.PHYSICAL_MIN_VELOCITY_MPS >= self.PHYSICAL_MAX_VELOCITY_MPS: + warnings.append("PHYSICAL_MIN_VELOCITY_MPS >= PHYSICAL_MAX_VELOCITY_MPS") + + if self.PHYSICAL_MIN_ANGLE_RAD >= self.PHYSICAL_MAX_ANGLE_RAD: + warnings.append("PHYSICAL_MIN_ANGLE_RAD >= PHYSICAL_MAX_ANGLE_RAD") + + if self.PHYSICAL_MIN_DISTANCE_M >= self.PHYSICAL_MAX_DISTANCE_M: + warnings.append("PHYSICAL_MIN_DISTANCE_M >= PHYSICAL_MAX_DISTANCE_M") + + # Validate system parameters + if self.N_INITIAL_POINTS < 1: + warnings.append("N_INITIAL_POINTS must be >= 1") + + if self.N_CALLS_PER_COEFFICIENT < self.N_INITIAL_POINTS: + warnings.append("N_CALLS_PER_COEFFICIENT must be >= N_INITIAL_POINTS") + + if self.TUNER_UPDATE_RATE_HZ <= 0: + warnings.append("TUNER_UPDATE_RATE_HZ must be positive") + + return warnings diff --git a/bayesopt/tuner/logger.py b/bayesopt/tuner/logger.py new file mode 100644 index 0000000..76c6108 --- /dev/null +++ b/bayesopt/tuner/logger.py @@ -0,0 +1,268 @@ +""" +Data logging module for the Bayesian Tuner. + +This module handles logging of all tuning data to CSV files for offline analysis. +Logs shot data, coefficient values, step sizes, and NT connection status. +""" + +import os +import csv +import logging +from datetime import datetime +from typing import Dict, Optional +from pathlib import Path + + +logger = logging.getLogger(__name__) + + +class TunerLogger: + """ + CSV logger for tuner data. + + Logs every shot with coefficient values, step sizes, hit/miss results, + and system status. + """ + + def __init__(self, config): + """ + Initialize tuner logger. + + Args: + config: TunerConfig object + """ + self.config = config + self.log_directory = Path(config.LOG_DIRECTORY) + self.csv_file = None + self.csv_writer = None + self.session_start_time = datetime.now() + + # Create log directory if it doesn't exist + self.log_directory.mkdir(parents=True, exist_ok=True) + + # Initialize CSV log file + self._initialize_csv_log() + + logger.info(f"Logger initialized, writing to {self.csv_file}") + + def _initialize_csv_log(self): + """Create and initialize CSV log file.""" + # Generate filename with timestamp + timestamp = self.session_start_time.strftime("%Y%m%d_%H%M%S") + filename = f"{self.config.LOG_FILENAME_PREFIX}_{timestamp}.csv" + self.csv_file = self.log_directory / filename + + # Create CSV file with headers + try: + file_handle = open(self.csv_file, 'w', newline='') + self.csv_writer = csv.writer(file_handle) + + # Write header row - captures ALL robot state at shot time + headers = [ + 'timestamp', + 'session_time_s', + 'coefficient_name', + 'coefficient_value', + 'step_size', + 'iteration', + 'shot_hit', + 'shot_distance', + 'shot_angle_rad', + 'shot_velocity_mps', + 'shot_yaw_rad', + 'target_height_m', + 'launch_height_m', + 'drag_coefficient_used', + 'air_density_used', + 'projectile_mass_kg', + 'projectile_area_m2', + 'nt_connected', + 'match_mode', + 'tuner_status', + 'all_coefficients', + ] + self.csv_writer.writerow(headers) + + # Store file handle for later closing + self._file_handle = file_handle + + logger.info(f"Created CSV log: {self.csv_file}") + + except Exception as e: + logger.error(f"Failed to create CSV log: {e}") + self.csv_writer = None + + def log_shot( + self, + coefficient_name: str, + coefficient_value: float, + step_size: float, + iteration: int, + shot_data, + nt_connected: bool, + match_mode: bool, + tuner_status: str, + all_coefficient_values: Dict[str, float] + ): + """ + Log a shot to the CSV file. + + Args: + coefficient_name: Name of coefficient being tuned + coefficient_value: Current value of the coefficient + step_size: Current step size + iteration: Current iteration number + shot_data: ShotData object + nt_connected: Whether NT is connected + match_mode: Whether robot is in match mode + tuner_status: Current tuner status string + all_coefficient_values: Dict of all coefficient values + """ + if not self.csv_writer: + logger.warning("CSV writer not initialized, cannot log") + return + + try: + current_time = datetime.now() + session_time = (current_time - self.session_start_time).total_seconds() + + # Format all coefficients as JSON-like string + coeff_str = "; ".join([f"{k}={v:.6f}" for k, v in all_coefficient_values.items()]) + + # Create row with ALL captured data + row = [ + current_time.isoformat(), + f"{session_time:.3f}", + coefficient_name, + f"{coefficient_value:.6f}", + f"{step_size:.6f}", + iteration, + shot_data.hit if shot_data else '', + f"{shot_data.distance:.3f}" if shot_data and shot_data.distance else '', + f"{shot_data.angle:.6f}" if shot_data and shot_data.angle else '', + f"{shot_data.velocity:.3f}" if shot_data and shot_data.velocity else '', + f"{shot_data.yaw:.6f}" if shot_data and hasattr(shot_data, 'yaw') else '', + f"{shot_data.target_height:.3f}" if shot_data and hasattr(shot_data, 'target_height') else '', + f"{shot_data.launch_height:.3f}" if shot_data and hasattr(shot_data, 'launch_height') else '', + f"{shot_data.drag_coefficient:.6f}" if shot_data and hasattr(shot_data, 'drag_coefficient') else '', + f"{shot_data.air_density:.6f}" if shot_data and hasattr(shot_data, 'air_density') else '', + f"{shot_data.projectile_mass:.6f}" if shot_data and hasattr(shot_data, 'projectile_mass') else '', + f"{shot_data.projectile_area:.6f}" if shot_data and hasattr(shot_data, 'projectile_area') else '', + nt_connected, + match_mode, + tuner_status, + coeff_str, + ] + + self.csv_writer.writerow(row) + self._file_handle.flush() # Ensure data is written immediately + + logger.debug(f"Logged shot: {coefficient_name}={coefficient_value:.6f}, hit={shot_data.hit if shot_data else 'N/A'}") + + except Exception as e: + logger.error(f"Error logging shot: {e}") + + def log_event(self, event_type: str, message: str, data: Optional[Dict] = None): + """ + Log a system event. + + Args: + event_type: Type of event (e.g., 'START', 'STOP', 'ERROR') + message: Event message + data: Optional additional data + """ + if not self.csv_writer: + return + + try: + current_time = datetime.now() + session_time = (current_time - self.session_start_time).total_seconds() + + # Log as special row with event info + row = [ + current_time.isoformat(), + f"{session_time:.3f}", + f"EVENT_{event_type}", + '', # coefficient_value + '', # step_size + '', # iteration + '', # shot_hit + '', # shot_distance + '', # shot_angle + '', # shot_velocity + '', # nt_connected + '', # match_mode + message, + str(data) if data else '', + ] + + self.csv_writer.writerow(row) + self._file_handle.flush() + + logger.info(f"Logged event: {event_type} - {message}") + + except Exception as e: + logger.error(f"Error logging event: {e}") + + def log_statistics(self, statistics: Dict): + """ + Log optimization statistics. + + Args: + statistics: Dict with optimization statistics + """ + self.log_event('STATISTICS', 'Optimization statistics', statistics) + + def close(self): + """Close the log file.""" + try: + if hasattr(self, '_file_handle') and self._file_handle: + self._file_handle.close() + logger.info(f"Closed log file: {self.csv_file}") + except Exception as e: + logger.error(f"Error closing log file: {e}") + + def get_log_file_path(self) -> Optional[Path]: + """ + Get path to current log file. + + Returns: + Path to log file or None if not initialized + """ + return self.csv_file + + def __enter__(self): + """Context manager entry.""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Context manager exit.""" + self.close() + + +def setup_logging(config, log_level=logging.INFO): + """ + Setup logging configuration for the tuner. + + Args: + config: TunerConfig object + log_level: Logging level (default: INFO) + """ + # Configure root logger + logging.basicConfig( + level=log_level, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' + ) + + # Add console handler if configured + if config.LOG_TO_CONSOLE: + console_handler = logging.StreamHandler() + console_handler.setLevel(log_level) + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + console_handler.setFormatter(formatter) + + root_logger = logging.getLogger() + root_logger.addHandler(console_handler) + + logger.info("Logging configured") diff --git a/bayesopt/tuner/nt_interface.py b/bayesopt/tuner/nt_interface.py new file mode 100644 index 0000000..310d1b1 --- /dev/null +++ b/bayesopt/tuner/nt_interface.py @@ -0,0 +1,461 @@ +""" +NetworkTables interface module for the Bayesian Tuner. + +This module handles all NetworkTables communication including: +- Reading shot data and match mode status +- Writing updated coefficient values +- Connection management and error handling +- Status feedback to drivers +""" + +import time +from typing import Optional, Dict, Any +from dataclasses import dataclass +import logging + +try: + from networktables import NetworkTables +except ImportError: + # Provide a mock for testing without pynetworktables + class NetworkTables: + @staticmethod + def initialize(server=None): + pass + + @staticmethod + def isConnected(): + return False + + @staticmethod + def getTable(name): + return None + + +logger = logging.getLogger(__name__) + + +@dataclass +class ShotData: + """Container for shot data from NetworkTables.""" + + hit: bool + distance: float + angle: float + velocity: float + timestamp: float + + # Additional data captured at shot time + yaw: float = 0.0 # Turret yaw angle + target_height: float = 0.0 # Target height used + launch_height: float = 0.0 # Launch height used + + # Current coefficient values at time of shot + drag_coefficient: float = 0.0 + air_density: float = 0.0 + projectile_mass: float = 0.0 + projectile_area: float = 0.0 + + def is_valid(self, config) -> bool: + """ + Check if shot data is valid and within physical limits. + + Args: + config: TunerConfig with physical limit constants + + Returns: + True if shot data is valid and physically reasonable + """ + return ( + isinstance(self.hit, bool) + and isinstance(self.distance, (int, float)) + and isinstance(self.angle, (int, float)) + and isinstance(self.velocity, (int, float)) + # Distance bounds check (field geometry) + and config.PHYSICAL_MIN_DISTANCE_M <= self.distance <= config.PHYSICAL_MAX_DISTANCE_M + # Velocity bounds check (motor/mechanism physical limits) + and config.PHYSICAL_MIN_VELOCITY_MPS <= self.velocity <= config.PHYSICAL_MAX_VELOCITY_MPS + # Angle bounds check (mechanism physical limits) + and config.PHYSICAL_MIN_ANGLE_RAD <= self.angle <= config.PHYSICAL_MAX_ANGLE_RAD + ) + +class NetworkTablesInterface: + """Interface for NetworkTables communication with RoboRIO protection.""" + + def __init__(self, config): + """ + Initialize NetworkTables interface with rate limiting. + + Args: + config: TunerConfig instance with NT settings and rate limits + """ + self.config = config + self.connected = False + self.last_connection_attempt = 0.0 + self.shot_data_listeners = [] + + # Rate limiting to prevent RoboRIO overload + self.last_write_time = 0.0 + self.min_write_interval = 1.0 / config.MAX_NT_WRITE_RATE_HZ + self.last_read_time = 0.0 + self.min_read_interval = 1.0 / config.MAX_NT_READ_RATE_HZ + self.pending_writes = {} # For batching writes if enabled + + # Tables + self.root_table = None + self.tuning_table = None + self.firing_solver_table = None + + # Last shot data + self.last_shot_timestamp = 0.0 + self.last_shot_data: Optional[ShotData] = None + + logger.info("NetworkTables interface initialized with rate limiting") + logger.info(f"Write rate limit: {config.MAX_NT_WRITE_RATE_HZ} Hz, " + f"Read rate limit: {config.MAX_NT_READ_RATE_HZ} Hz") + + def connect(self, server_ip: Optional[str] = None) -> bool: + """ + Connect to NetworkTables server. + + Args: + server_ip: IP address of robot/server. If None, uses config default. + + Returns: + True if connected successfully, False otherwise + """ + current_time = time.time() + + # Throttle connection attempts + if current_time - self.last_connection_attempt < self.config.NT_RECONNECT_DELAY_SECONDS: + return self.connected + + self.last_connection_attempt = current_time + + try: + if server_ip is None: + server_ip = self.config.NT_SERVER_IP + + logger.info(f"Attempting to connect to NetworkTables at {server_ip}") + NetworkTables.initialize(server=server_ip) + + # Wait for connection + timeout = self.config.NT_TIMEOUT_SECONDS + start_time = time.time() + + while not NetworkTables.isConnected(): + if time.time() - start_time > timeout: + logger.warning(f"Connection timeout after {timeout}s") + return False + time.sleep(0.1) + + # Get tables + self.root_table = NetworkTables.getTable("") + self.tuning_table = NetworkTables.getTable("/Tuning") + self.firing_solver_table = NetworkTables.getTable(self.config.NT_SHOT_DATA_TABLE) + + self.connected = True + logger.info("Connected to NetworkTables successfully") + return True + + except Exception as e: + logger.error(f"Failed to connect to NetworkTables: {e}") + self.connected = False + return False + + def is_connected(self) -> bool: + """Check if connected to NetworkTables.""" + try: + self.connected = NetworkTables.isConnected() + except Exception as e: + logger.error(f"Error checking connection status: {e}") + self.connected = False + + return self.connected + + def disconnect(self): + """Disconnect from NetworkTables.""" + try: + # NetworkTables doesn't have an explicit disconnect in pynetworktables + self.connected = False + logger.info("Disconnected from NetworkTables") + except Exception as e: + logger.error(f"Error during disconnect: {e}") + + def read_coefficient(self, nt_key: str, default_value: float) -> float: + """ + Read a coefficient value from NetworkTables. + + Args: + nt_key: NetworkTables key path + default_value: Default value if key doesn't exist + + Returns: + Current coefficient value + """ + if not self.is_connected(): + logger.warning(f"Not connected, returning default for {nt_key}") + return default_value + + try: + value = self.tuning_table.getNumber(nt_key, default_value) + return value + except Exception as e: + logger.error(f"Error reading {nt_key}: {e}") + return default_value + + def write_coefficient(self, nt_key: str, value: float, force: bool = False) -> bool: + """ + Write a coefficient value to NetworkTables with rate limiting. + + Protects RoboRIO from being overloaded with too frequent updates. + + Args: + nt_key: NetworkTables key path + value: Coefficient value to write + force: If True, bypass rate limiting (use sparingly) + + Returns: + True if write succeeded, False otherwise + """ + if not self.is_connected(): + logger.warning(f"Not connected, cannot write {nt_key}") + return False + + # Rate limiting check (unless forced) + current_time = time.time() + if not force: + time_since_last_write = current_time - self.last_write_time + if time_since_last_write < self.min_write_interval: + # Too soon, queue for batching if enabled + if self.config.NT_BATCH_WRITES: + self.pending_writes[nt_key] = value + logger.debug(f"Queueing write for {nt_key} due to rate limit") + return False + else: + logger.debug(f"Skipping write for {nt_key} due to rate limit") + return False + + try: + self.tuning_table.putNumber(nt_key, value) + self.last_write_time = current_time + logger.info(f"Wrote {nt_key} = {value}") + return True + except Exception as e: + logger.error(f"Error writing {nt_key}: {e}") + return False + + def flush_pending_writes(self) -> int: + """ + Flush any pending batched writes to NetworkTables. + + Returns: + Number of writes flushed + """ + if not self.pending_writes: + return 0 + + count = 0 + for nt_key, value in list(self.pending_writes.items()): + if self.write_coefficient(nt_key, value, force=True): + count += 1 + del self.pending_writes[nt_key] + + if count > 0: + logger.info(f"Flushed {count} batched writes to NetworkTables") + + return count + + def read_shot_data(self) -> Optional[ShotData]: + """ + Read the latest shot data from NetworkTables with rate limiting. + + Protects RoboRIO from excessive read requests. + Captures ALL robot state data at the moment of the shot including: + - Shot result (hit/miss) + - Calculated firing solution (distance, angle, velocity, yaw) + - Physical parameters (target height, launch height) + - Current coefficient values being used + + Returns: + ShotData object if new data available, None otherwise + """ + if not self.is_connected(): + return None + + # Rate limiting check + current_time = time.time() + time_since_last_read = current_time - self.last_read_time + if time_since_last_read < self.min_read_interval: + return None # Skip read to avoid overloading RoboRIO + + self.last_read_time = current_time + + try: + # Check if there's new shot data by monitoring timestamp + shot_timestamp = self.firing_solver_table.getNumber("ShotTimestamp", 0.0) + + # Only process if this is a new shot + if shot_timestamp <= self.last_shot_timestamp: + return None + + # Read shot result (hit or miss) + hit = self.firing_solver_table.getBoolean("Hit", False) + + # Read calculated firing solution data + distance = self.firing_solver_table.getNumber("Distance", 0.0) + + # Read from solution subtable + solution_table = self.firing_solver_table.getSubTable("Solution") + angle = solution_table.getNumber("pitchRadians", 0.0) + velocity = solution_table.getNumber("exitVelocity", 0.0) + yaw = solution_table.getNumber("yawRadians", 0.0) + + # Read physical parameters used in calculation + target_height = self.firing_solver_table.getNumber("TargetHeight", 0.0) + launch_height = self.firing_solver_table.getNumber("LaunchHeight", 0.0) + + # Read current coefficient values AT TIME OF SHOT + drag_coeff = self.firing_solver_table.getNumber("DragCoefficient", 0.0) + air_density = self.firing_solver_table.getNumber("AirDensity", 1.225) + projectile_mass = self.firing_solver_table.getNumber("ProjectileMass", 0.0) + projectile_area = self.firing_solver_table.getNumber("ProjectileArea", 0.0) + + # Create comprehensive shot data object + shot_data = ShotData( + hit=hit, + distance=distance, + angle=angle, + velocity=velocity, + timestamp=current_timestamp, + yaw=yaw, + target_height=target_height, + launch_height=launch_height, + drag_coefficient=drag_coeff, + air_density=air_density, + projectile_mass=projectile_mass, + projectile_area=projectile_area, + ) + + # Update tracking + self.last_shot_timestamp = shot_timestamp + self.last_shot_data = shot_data + + logger.info(f"New shot captured: hit={hit}, dist={distance:.2f}m, " + f"angle={angle:.3f}rad, vel={velocity:.2f}m/s, " + f"drag={drag_coeff:.6f}") + + return shot_data + + except Exception as e: + logger.error(f"Error reading shot data: {e}") + return None + + def is_match_mode(self) -> bool: + """ + Check if robot is in match mode (FMS attached). + + Returns: + True if in match mode, False otherwise + """ + if not self.is_connected(): + return False + + try: + # Check FMSInfo for FMS control data + fms_table = NetworkTables.getTable("/FMSInfo") + + # If FMSControlData exists and is not 0, we're in a match + fms_control = fms_table.getNumber("FMSControlData", 0) + return fms_control != 0 + + except Exception as e: + logger.error(f"Error checking match mode: {e}") + return False + + def write_status(self, status: str): + """ + Write tuner status message to NetworkTables for driver feedback. + + Args: + status: Status message string + """ + if not self.is_connected(): + return + + try: + self.firing_solver_table.putString("TunerStatus", status) + logger.debug(f"Status: {status}") + except Exception as e: + logger.error(f"Error writing status: {e}") + + def read_all_coefficients(self, coefficients: Dict[str, Any]) -> Dict[str, float]: + """ + Read all coefficient values from NetworkTables. + + Args: + coefficients: Dict of CoefficientConfig objects + + Returns: + Dict mapping coefficient names to current values + """ + values = {} + for name, coeff in coefficients.items(): + values[name] = self.read_coefficient(coeff.nt_key, coeff.default_value) + + return values + + def write_all_coefficients(self, coefficient_values: Dict[str, float]) -> bool: + """ + Write multiple coefficient values to NetworkTables. + + Args: + coefficient_values: Dict mapping coefficient names to values + + Returns: + True if all writes succeeded, False otherwise + """ + success = True + for name, value in coefficient_values.items(): + if name in self.config.COEFFICIENTS: + coeff = self.config.COEFFICIENTS[name] + if not self.write_coefficient(coeff.nt_key, value): + success = False + + return success + + def write_interlock_settings(self, require_shot_logged: bool, require_coefficients_updated: bool): + """ + Write shooting interlock settings to NetworkTables. + + Args: + require_shot_logged: If True, robot must wait for shot to be logged + require_coefficients_updated: If True, robot must wait for coefficient update + """ + if not self.is_connected(): + return + + try: + interlock_table = NetworkTables.getTable("/FiringSolver/Interlock") + interlock_table.putBoolean("RequireShotLogged", require_shot_logged) + interlock_table.putBoolean("RequireCoefficientsUpdated", require_coefficients_updated) + + logger.info(f"Interlock settings: shot_logged={require_shot_logged}, coeff_updated={require_coefficients_updated}") + except Exception as e: + logger.error(f"Error writing interlock settings: {e}") + + def signal_coefficients_updated(self): + """ + Signal that coefficients have been updated (clears interlock). + + Sets the CoefficientsUpdated flag to true, allowing robot to shoot + if that interlock is enabled. + """ + if not self.is_connected(): + return + + try: + interlock_table = NetworkTables.getTable("/FiringSolver/Interlock") + interlock_table.putBoolean("CoefficientsUpdated", True) + logger.debug("Signaled coefficients updated") + except Exception as e: + logger.error(f"Error signaling coefficient update: {e}") diff --git a/bayesopt/tuner/optimizer.py b/bayesopt/tuner/optimizer.py new file mode 100644 index 0000000..a15ceec --- /dev/null +++ b/bayesopt/tuner/optimizer.py @@ -0,0 +1,395 @@ +""" +Bayesian optimizer module for coefficient tuning. + +This module implements Bayesian optimization using scikit-optimize to tune +shooting coefficients based on hit/miss feedback with adaptive step sizes. +""" + +import logging +from typing import List, Tuple, Optional, Dict +import numpy as np + +try: + from skopt import Optimizer + from skopt.space import Real, Integer +except ImportError: + # Provide mock for testing without scikit-optimize + class Optimizer: + def __init__(self, *args, **kwargs): + pass + + def ask(self): + return [0.5] + + def tell(self, x, y): + pass + + class Real: + def __init__(self, *args, **kwargs): + pass + + class Integer: + def __init__(self, *args, **kwargs): + pass + + +logger = logging.getLogger(__name__) + + +class BayesianOptimizer: + """ + Bayesian optimizer for a single coefficient. + + Uses Expected Improvement acquisition function and Gaussian Process + to efficiently explore the parameter space. + """ + + def __init__(self, coeff_config, tuner_config): + """ + Initialize optimizer for a specific coefficient. + + Args: + coeff_config: CoefficientConfig object + tuner_config: TunerConfig object + """ + self.coeff_config = coeff_config + self.tuner_config = tuner_config + + # Create search space + if coeff_config.is_integer: + self.search_space = [Integer( + int(coeff_config.min_value), + int(coeff_config.max_value), + name=coeff_config.name + )] + else: + self.search_space = [Real( + coeff_config.min_value, + coeff_config.max_value, + name=coeff_config.name + )] + + # Initialize optimizer + self.optimizer = Optimizer( + dimensions=self.search_space, + n_initial_points=tuner_config.N_INITIAL_POINTS, + acq_func=tuner_config.ACQUISITION_FUNCTION, + random_state=None, # Use random seed for exploration + ) + + # Tracking + self.iteration = 0 + self.current_step_size = coeff_config.initial_step_size + self.best_value = coeff_config.default_value + self.best_score = float('-inf') + self.evaluation_history = [] + + logger.info(f"Initialized optimizer for {coeff_config.name}") + + def suggest_next_value(self) -> float: + """ + Suggest next coefficient value to try. + + Returns: + Suggested coefficient value + """ + try: + # Get next point from optimizer + suggested = self.optimizer.ask() + value = suggested[0] + + # Apply step size decay if enabled + if self.tuner_config.STEP_SIZE_DECAY_ENABLED and self.iteration > 0: + # Calculate decayed step size + decay_factor = self.coeff_config.step_decay_rate ** self.iteration + min_step = self.coeff_config.initial_step_size * self.tuner_config.MIN_STEP_SIZE_RATIO + self.current_step_size = max( + min_step, + self.coeff_config.initial_step_size * decay_factor + ) + + # Clamp to valid range + value = self.coeff_config.clamp(value) + + logger.info(f"Suggesting {self.coeff_config.name} = {value:.6f} (step size: {self.current_step_size:.6f})") + return value + + except Exception as e: + logger.error(f"Error suggesting next value: {e}") + return self.coeff_config.default_value + + def report_result(self, value: float, hit: bool, additional_data: Optional[Dict] = None): + """ + Report the result of testing a coefficient value. + + Args: + value: The coefficient value that was tested + hit: Whether the shot hit (True) or missed (False) + additional_data: Optional dict with distance, velocity, etc. + """ + try: + # Convert hit/miss to optimization score (maximize hit rate) + # Score is 1.0 for hit, -1.0 for miss + score = 1.0 if hit else -1.0 + + # Add small bonus for being closer to target if distance data available + if additional_data and 'distance' in additional_data: + # Smaller distances are slightly better (secondary objective) + distance = additional_data.get('distance', 0) + if distance > 0: + distance_bonus = -0.01 / max(distance, 1.0) # Small negative bonus + score += distance_bonus + + # Tell optimizer the result + self.optimizer.tell([value], score) + + # Track best result + if score > self.best_score: + self.best_score = score + self.best_value = value + logger.info(f"New best for {self.coeff_config.name}: {value:.6f} (score: {score:.3f})") + + # Record in history + self.evaluation_history.append({ + 'iteration': self.iteration, + 'value': value, + 'hit': hit, + 'score': score, + 'step_size': self.current_step_size, + 'additional_data': additional_data or {} + }) + + self.iteration += 1 + + logger.debug(f"Reported result: {self.coeff_config.name}={value:.6f}, hit={hit}, score={score:.3f}") + + except Exception as e: + logger.error(f"Error reporting result: {e}") + + def is_converged(self) -> bool: + """ + Check if optimization has converged. + + Returns: + True if converged or max iterations reached + """ + # Check if we've reached max iterations + if self.iteration >= self.tuner_config.N_CALLS_PER_COEFFICIENT: + logger.info(f"{self.coeff_config.name} reached max iterations ({self.iteration})") + return True + + # Check if step size is below minimum (indicates convergence) + min_step = self.coeff_config.initial_step_size * self.tuner_config.MIN_STEP_SIZE_RATIO + if self.current_step_size <= min_step * 1.1: # Small tolerance + logger.info(f"{self.coeff_config.name} converged (step size: {self.current_step_size:.6f})") + return True + + # Check if we have enough history to evaluate convergence + if len(self.evaluation_history) >= 5: + # Check variance in recent scores + recent_scores = [h['score'] for h in self.evaluation_history[-5:]] + variance = np.var(recent_scores) + + # If variance is very low, we've converged + if variance < 0.01: + logger.info(f"{self.coeff_config.name} converged (low variance: {variance:.6f})") + return True + + return False + + def get_best_value(self) -> float: + """ + Get the best coefficient value found so far. + + Returns: + Best coefficient value + """ + return self.best_value + + def get_statistics(self) -> Dict: + """ + Get optimization statistics. + + Returns: + Dict with statistics (iterations, best value, convergence, etc.) + """ + hit_rate = 0.0 + if self.evaluation_history: + hits = sum(1 for h in self.evaluation_history if h['hit']) + hit_rate = hits / len(self.evaluation_history) + + return { + 'coefficient_name': self.coeff_config.name, + 'iterations': self.iteration, + 'best_value': self.best_value, + 'best_score': self.best_score, + 'current_step_size': self.current_step_size, + 'hit_rate': hit_rate, + 'is_converged': self.is_converged(), + 'total_evaluations': len(self.evaluation_history), + } + + +class CoefficientTuner: + """ + Manages sequential tuning of multiple coefficients. + + Tunes one coefficient at a time in the specified priority order, + moving to the next when the current one converges. + """ + + def __init__(self, tuner_config): + """ + Initialize coefficient tuner. + + Args: + tuner_config: TunerConfig object + """ + self.config = tuner_config + self.coefficients = tuner_config.get_enabled_coefficients_in_order() + + self.current_index = 0 + self.current_optimizer: Optional[BayesianOptimizer] = None + self.completed_coefficients = [] + + # Shot accumulation for validation + self.pending_shots = [] + self.consecutive_invalid_shots = 0 + + logger.info(f"Initialized tuner for {len(self.coefficients)} coefficients") + + # Start with first coefficient + if self.coefficients: + self._start_next_coefficient() + + def _start_next_coefficient(self): + """Start optimizing the next coefficient in the sequence.""" + if self.current_index >= len(self.coefficients): + logger.info("All coefficients tuned!") + self.current_optimizer = None + return + + coeff = self.coefficients[self.current_index] + logger.info(f"Starting optimization for {coeff.name} ({self.current_index + 1}/{len(self.coefficients)})") + + self.current_optimizer = BayesianOptimizer(coeff, self.config) + self.pending_shots = [] + + def get_current_coefficient_name(self) -> Optional[str]: + """Get name of coefficient currently being tuned.""" + if self.current_optimizer: + return self.current_optimizer.coeff_config.name + return None + + def suggest_coefficient_update(self) -> Optional[Tuple[str, float]]: + """ + Suggest next coefficient value to test. + + Returns: + Tuple of (coefficient_name, suggested_value) or None if done + """ + if not self.current_optimizer: + return None + + value = self.current_optimizer.suggest_next_value() + name = self.current_optimizer.coeff_config.name + + return (name, value) + + def record_shot(self, shot_data, coefficient_values: Dict[str, float]): + """ + Record a shot result for the current coefficient being tuned. + + Args: + shot_data: ShotData object + coefficient_values: Current values of all coefficients + """ + if not self.current_optimizer: + logger.warning("No active optimizer to record shot") + return + + # Validate shot data + if not shot_data.is_valid(): + self.consecutive_invalid_shots += 1 + logger.warning(f"Invalid shot data (consecutive: {self.consecutive_invalid_shots})") + + if self.consecutive_invalid_shots >= self.config.MAX_CONSECUTIVE_INVALID_SHOTS: + logger.error("Too many consecutive invalid shots, stopping tuning") + self.current_optimizer = None + return + + # Reset invalid counter + self.consecutive_invalid_shots = 0 + + # Get current coefficient value + coeff_name = self.current_optimizer.coeff_config.name + current_value = coefficient_values.get(coeff_name, self.current_optimizer.coeff_config.default_value) + + # Add to pending shots + self.pending_shots.append({ + 'shot_data': shot_data, + 'coefficient_value': current_value, + }) + + # Check if we have enough shots to report + if len(self.pending_shots) >= self.config.MIN_VALID_SHOTS_BEFORE_UPDATE: + self._process_pending_shots() + + def _process_pending_shots(self): + """Process accumulated shots and report to optimizer.""" + if not self.pending_shots or not self.current_optimizer: + return + + # Aggregate shots - use majority vote for hit/miss + hits = sum(1 for s in self.pending_shots if s['shot_data'].hit) + hit = hits > len(self.pending_shots) / 2 + + # Use average coefficient value + avg_value = np.mean([s['coefficient_value'] for s in self.pending_shots]) + + # Average distance + avg_distance = np.mean([s['shot_data'].distance for s in self.pending_shots]) + + # Report to optimizer + additional_data = { + 'distance': avg_distance, + 'num_shots': len(self.pending_shots), + 'hit_rate': hits / len(self.pending_shots), + } + + self.current_optimizer.report_result(avg_value, hit, additional_data) + + # Clear pending shots + self.pending_shots = [] + + # Check for convergence + if self.current_optimizer.is_converged(): + stats = self.current_optimizer.get_statistics() + logger.info(f"Coefficient {stats['coefficient_name']} converged: best={stats['best_value']:.6f}, hit_rate={stats['hit_rate']:.2%}") + + self.completed_coefficients.append(self.current_optimizer) + self.current_index += 1 + self._start_next_coefficient() + + def is_complete(self) -> bool: + """Check if all coefficients have been tuned.""" + return self.current_optimizer is None and self.current_index >= len(self.coefficients) + + def get_tuning_status(self) -> str: + """ + Get human-readable tuning status. + + Returns: + Status string for display + """ + if self.is_complete(): + return "Tuning complete" + + if not self.current_optimizer: + return "Tuner idle" + + coeff_name = self.current_optimizer.coeff_config.name + iteration = self.current_optimizer.iteration + step_size = self.current_optimizer.current_step_size + + return f"Tuning {coeff_name} (iter {iteration}, step {step_size:.6f})" diff --git a/bayesopt/tuner/requirements.txt b/bayesopt/tuner/requirements.txt new file mode 100644 index 0000000..31a53dc --- /dev/null +++ b/bayesopt/tuner/requirements.txt @@ -0,0 +1,10 @@ +# FRC Shooter Bayesian Tuner - Requirements + +# Core dependencies +scikit-optimize>=0.9.0 +pynetworktables>=2021.0.0 +numpy>=1.21.0 +pandas>=1.3.0 + +# Optional dependencies for enhanced functionality +# matplotlib>=3.4.0 # For plotting optimization results (optional) diff --git a/bayesopt/tuner/run_tests.py b/bayesopt/tuner/run_tests.py new file mode 100755 index 0000000..2de9279 --- /dev/null +++ b/bayesopt/tuner/run_tests.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +""" +Test runner for the Bayesian Tuner. + +Runs all unit tests and reports results. +""" + +import sys +import unittest +import os + +# Add parent directory to path +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + + +def run_tests(): + """Discover and run all tests.""" + # Discover tests in the tests directory + loader = unittest.TestLoader() + start_dir = os.path.join(os.path.dirname(__file__), 'tests') + suite = loader.discover(start_dir, pattern='test_*.py') + + # Run tests with verbose output + runner = unittest.TextTestRunner(verbosity=2) + result = runner.run(suite) + + # Return exit code based on results + return 0 if result.wasSuccessful() else 1 + + +if __name__ == '__main__': + sys.exit(run_tests()) diff --git a/bayesopt/tuner/tests/__init__.py b/bayesopt/tuner/tests/__init__.py new file mode 100644 index 0000000..a7e1a20 --- /dev/null +++ b/bayesopt/tuner/tests/__init__.py @@ -0,0 +1,3 @@ +""" +Test package for tuner. +""" diff --git a/bayesopt/tuner/tests/test_config.py b/bayesopt/tuner/tests/test_config.py new file mode 100644 index 0000000..cbc5405 --- /dev/null +++ b/bayesopt/tuner/tests/test_config.py @@ -0,0 +1,131 @@ +""" +Unit tests for the configuration module. +""" + +import unittest +from tuner.config import TunerConfig, CoefficientConfig + + +class TestCoefficientConfig(unittest.TestCase): + """Test CoefficientConfig class.""" + + def test_clamp_float(self): + """Test clamping for float coefficients.""" + config = CoefficientConfig( + name="test", + default_value=0.5, + min_value=0.0, + max_value=1.0, + initial_step_size=0.1, + step_decay_rate=0.9, + is_integer=False, + enabled=True, + nt_key="/test" + ) + + self.assertEqual(config.clamp(0.5), 0.5) + self.assertEqual(config.clamp(-0.5), 0.0) + self.assertEqual(config.clamp(1.5), 1.0) + + def test_clamp_integer(self): + """Test clamping for integer coefficients.""" + config = CoefficientConfig( + name="test", + default_value=20, + min_value=10, + max_value=50, + initial_step_size=5, + step_decay_rate=0.85, + is_integer=True, + enabled=True, + nt_key="/test" + ) + + self.assertEqual(config.clamp(25), 25) + self.assertEqual(config.clamp(25.6), 26) # Round + self.assertEqual(config.clamp(5), 10) + self.assertEqual(config.clamp(60), 50) + + +class TestTunerConfig(unittest.TestCase): + """Test TunerConfig class.""" + + def test_default_config(self): + """Test default configuration.""" + config = TunerConfig() + + self.assertTrue(config.TUNER_ENABLED) + self.assertGreater(len(config.COEFFICIENTS), 0) + self.assertGreater(len(config.TUNING_ORDER), 0) + + def test_get_enabled_coefficients_in_order(self): + """Test getting enabled coefficients in order.""" + config = TunerConfig() + + enabled = config.get_enabled_coefficients_in_order() + + # Should have some enabled coefficients + self.assertGreater(len(enabled), 0) + + # Should be in the correct order + names = [c.name for c in enabled] + expected_order = [n for n in config.TUNING_ORDER if n in names] + self.assertEqual(names, expected_order) + + def test_validate_config_valid(self): + """Test config validation with valid config.""" + config = TunerConfig() + warnings = config.validate_config() + + # Should have no warnings for default config + self.assertEqual(len(warnings), 0) + + def test_validate_config_invalid_range(self): + """Test config validation with invalid range.""" + # Test validation logic by checking a coefficient with swapped min/max + coeff = CoefficientConfig( + name="test_invalid", + default_value=0.5, + min_value=1.0, + max_value=0.0, # Invalid: max < min + initial_step_size=0.1, + step_decay_rate=0.9, + is_integer=False, + enabled=True, + nt_key="/test" + ) + + # Manually test the validation logic that would be applied + self.assertGreaterEqual(coeff.min_value, coeff.max_value, + "Test coefficient should have invalid range") + + # Also test that the default config is valid + config = TunerConfig() + warnings = config.validate_config() + # All default coefficients should be valid + for name, c in config.COEFFICIENTS.items(): + self.assertLess(c.min_value, c.max_value, + f"{name} has invalid range") + + def test_coefficient_definitions(self): + """Test that all required coefficients are defined.""" + config = TunerConfig() + + required_coefficients = [ + "kDragCoefficient", + "kVelocityIterationCount", + "kAngleIterationCount", + "kVelocityTolerance", + "kAngleTolerance", + "kLaunchHeight", + ] + + for name in required_coefficients: + self.assertIn(name, config.COEFFICIENTS) + coeff = config.COEFFICIENTS[name] + self.assertEqual(coeff.name, name) + self.assertIsNotNone(coeff.nt_key) + + +if __name__ == '__main__': + unittest.main() diff --git a/bayesopt/tuner/tests/test_logger.py b/bayesopt/tuner/tests/test_logger.py new file mode 100644 index 0000000..4cddb26 --- /dev/null +++ b/bayesopt/tuner/tests/test_logger.py @@ -0,0 +1,134 @@ +""" +Unit tests for the logger module. +""" + +import unittest +import tempfile +import shutil +from pathlib import Path +import csv +import time + +from tuner.config import TunerConfig +from tuner.logger import TunerLogger +from tuner.nt_interface import ShotData + + +class TestTunerLogger(unittest.TestCase): + """Test TunerLogger class.""" + + def setUp(self): + """Set up test fixtures.""" + # Create temporary directory for logs + self.temp_dir = tempfile.mkdtemp() + + self.config = TunerConfig() + self.config.LOG_DIRECTORY = self.temp_dir + + self.logger = TunerLogger(self.config) + + def tearDown(self): + """Clean up test fixtures.""" + self.logger.close() + shutil.rmtree(self.temp_dir) + + def test_initialization(self): + """Test logger initialization.""" + self.assertIsNotNone(self.logger.csv_file) + self.assertTrue(self.logger.csv_file.exists()) + + def test_log_file_creation(self): + """Test that log file is created with correct name.""" + log_file = self.logger.get_log_file_path() + + self.assertIsNotNone(log_file) + self.assertTrue(log_file.exists()) + self.assertTrue(str(log_file).endswith('.csv')) + + def test_log_shot(self): + """Test logging shot data.""" + shot_data = ShotData( + hit=True, + distance=5.0, + angle=0.5, + velocity=15.0, + timestamp=time.time() + ) + + coefficient_values = { + 'kDragCoefficient': 0.003, + 'kLaunchHeight': 0.8, + } + + self.logger.log_shot( + coefficient_name='kDragCoefficient', + coefficient_value=0.003, + step_size=0.001, + iteration=1, + shot_data=shot_data, + nt_connected=True, + match_mode=False, + tuner_status='Tuning', + all_coefficient_values=coefficient_values + ) + + # Verify file was written + self.logger.close() + + with open(self.logger.csv_file, 'r') as f: + reader = csv.reader(f) + rows = list(reader) + + # Should have header + 1 data row + self.assertEqual(len(rows), 2) + + # Check data row has correct number of columns + self.assertEqual(len(rows[1]), len(rows[0])) + + def test_log_event(self): + """Test logging events.""" + self.logger.log_event('TEST', 'Test event message') + + self.logger.close() + + with open(self.logger.csv_file, 'r') as f: + reader = csv.reader(f) + rows = list(reader) + + # Should have header + event row + self.assertGreaterEqual(len(rows), 2) + + def test_log_statistics(self): + """Test logging statistics.""" + stats = { + 'coefficient_name': 'kDragCoefficient', + 'iterations': 10, + 'best_value': 0.0035, + 'hit_rate': 0.75, + } + + self.logger.log_statistics(stats) + + self.logger.close() + + with open(self.logger.csv_file, 'r') as f: + content = f.read() + self.assertIn('STATISTICS', content) + + def test_context_manager(self): + """Test context manager usage.""" + temp_dir = tempfile.mkdtemp() + config = TunerConfig() + config.LOG_DIRECTORY = temp_dir + + with TunerLogger(config) as logger: + log_file = logger.get_log_file_path() + self.assertTrue(log_file.exists()) + + # File should be closed after context + # Clean up + shutil.rmtree(temp_dir) + + +if __name__ == '__main__': + unittest.main() diff --git a/bayesopt/tuner/tests/test_optimizer.py b/bayesopt/tuner/tests/test_optimizer.py new file mode 100644 index 0000000..16eebe4 --- /dev/null +++ b/bayesopt/tuner/tests/test_optimizer.py @@ -0,0 +1,239 @@ +""" +Unit tests for the optimizer module. +""" + +import unittest +from unittest.mock import Mock, patch +import numpy as np + +from tuner.config import TunerConfig, CoefficientConfig +from tuner.optimizer import BayesianOptimizer, CoefficientTuner +from tuner.nt_interface import ShotData + + +class TestBayesianOptimizer(unittest.TestCase): + """Test BayesianOptimizer class.""" + + def setUp(self): + """Set up test fixtures.""" + self.config = TunerConfig() + self.coeff_config = self.config.COEFFICIENTS["kDragCoefficient"] + self.optimizer = BayesianOptimizer(self.coeff_config, self.config) + + def test_initialization(self): + """Test optimizer initialization.""" + self.assertEqual(self.optimizer.iteration, 0) + self.assertEqual(self.optimizer.best_value, self.coeff_config.default_value) + self.assertEqual(self.optimizer.current_step_size, self.coeff_config.initial_step_size) + + def test_suggest_next_value(self): + """Test suggesting next value.""" + value = self.optimizer.suggest_next_value() + + # Should be within valid range + self.assertGreaterEqual(value, self.coeff_config.min_value) + self.assertLessEqual(value, self.coeff_config.max_value) + + def test_report_result(self): + """Test reporting results.""" + value = 0.003 + + # Report a hit + self.optimizer.report_result(value, hit=True) + + self.assertEqual(self.optimizer.iteration, 1) + self.assertEqual(len(self.optimizer.evaluation_history), 1) + self.assertEqual(self.optimizer.evaluation_history[0]['value'], value) + self.assertEqual(self.optimizer.evaluation_history[0]['hit'], True) + + def test_best_value_tracking(self): + """Test that best value is tracked correctly.""" + # Report a miss + self.optimizer.report_result(0.001, hit=False) + first_best = self.optimizer.best_value + + # Report a hit - should become new best + self.optimizer.report_result(0.003, hit=True) + + self.assertEqual(self.optimizer.best_value, 0.003) + self.assertGreater(self.optimizer.best_score, -1.0) + + def test_convergence_max_iterations(self): + """Test convergence based on max iterations.""" + # Run until max iterations + for i in range(self.config.N_CALLS_PER_COEFFICIENT): + value = self.optimizer.suggest_next_value() + self.optimizer.report_result(value, hit=True) + + self.assertTrue(self.optimizer.is_converged()) + + def test_step_size_decay(self): + """Test step size decay over iterations.""" + initial_step = self.optimizer.current_step_size + + # Run several iterations + for i in range(5): + value = self.optimizer.suggest_next_value() + self.optimizer.report_result(value, hit=True) + + # Step size should have decayed + if self.config.STEP_SIZE_DECAY_ENABLED: + self.assertLess(self.optimizer.current_step_size, initial_step) + + def test_integer_coefficient(self): + """Test integer coefficient handling.""" + int_coeff = self.config.COEFFICIENTS["kVelocityIterationCount"] + optimizer = BayesianOptimizer(int_coeff, self.config) + + value = optimizer.suggest_next_value() + + # Should be an integer + self.assertEqual(value, int(value)) + self.assertGreaterEqual(value, int_coeff.min_value) + self.assertLessEqual(value, int_coeff.max_value) + + def test_get_statistics(self): + """Test getting optimization statistics.""" + # Run a few iterations + for i in range(3): + value = self.optimizer.suggest_next_value() + self.optimizer.report_result(value, hit=(i % 2 == 0)) + + stats = self.optimizer.get_statistics() + + self.assertIn('coefficient_name', stats) + self.assertIn('iterations', stats) + self.assertIn('best_value', stats) + self.assertIn('hit_rate', stats) + self.assertEqual(stats['iterations'], 3) + + +class TestCoefficientTuner(unittest.TestCase): + """Test CoefficientTuner class.""" + + def setUp(self): + """Set up test fixtures.""" + self.config = TunerConfig() + self.tuner = CoefficientTuner(self.config) + + def test_initialization(self): + """Test tuner initialization.""" + self.assertEqual(self.tuner.current_index, 0) + self.assertIsNotNone(self.tuner.current_optimizer) + self.assertGreater(len(self.tuner.coefficients), 0) + + def test_get_current_coefficient_name(self): + """Test getting current coefficient name.""" + name = self.tuner.get_current_coefficient_name() + self.assertIsNotNone(name) + self.assertIn(name, self.config.COEFFICIENTS) + + def test_suggest_coefficient_update(self): + """Test suggesting coefficient updates.""" + suggestion = self.tuner.suggest_coefficient_update() + + self.assertIsNotNone(suggestion) + self.assertEqual(len(suggestion), 2) + + coeff_name, value = suggestion + self.assertIn(coeff_name, self.config.COEFFICIENTS) + + coeff = self.config.COEFFICIENTS[coeff_name] + self.assertGreaterEqual(value, coeff.min_value) + self.assertLessEqual(value, coeff.max_value) + + def test_record_shot_valid(self): + """Test recording valid shot data.""" + shot_data = ShotData( + hit=True, + distance=5.0, + angle=0.5, + velocity=15.0, + timestamp=1234567890.0 + ) + + coefficient_values = { + coeff.name: coeff.default_value + for coeff in self.config.COEFFICIENTS.values() + } + + self.tuner.record_shot(shot_data, coefficient_values) + + # Should have pending shots + self.assertGreater(len(self.tuner.pending_shots), 0) + + def test_record_shot_invalid(self): + """Test recording invalid shot data.""" + # Invalid shot data (negative distance) + shot_data = ShotData( + hit=True, + distance=-1.0, # Invalid + angle=0.5, + velocity=15.0, + timestamp=1234567890.0 + ) + + coefficient_values = {} + + initial_invalid_count = self.tuner.consecutive_invalid_shots + self.tuner.record_shot(shot_data, coefficient_values) + + # Should increment invalid counter + self.assertGreater(self.tuner.consecutive_invalid_shots, initial_invalid_count) + + def test_shot_accumulation(self): + """Test that shots are accumulated before processing.""" + coefficient_values = { + coeff.name: coeff.default_value + for coeff in self.config.COEFFICIENTS.values() + } + + # Add shots one by one + for i in range(self.config.MIN_VALID_SHOTS_BEFORE_UPDATE - 1): + shot_data = ShotData( + hit=True, + distance=5.0, + angle=0.5, + velocity=15.0, + timestamp=1234567890.0 + i + ) + self.tuner.record_shot(shot_data, coefficient_values) + + # Should still have pending shots + self.assertEqual(len(self.tuner.pending_shots), + self.config.MIN_VALID_SHOTS_BEFORE_UPDATE - 1) + + # Add one more to trigger processing + shot_data = ShotData( + hit=True, + distance=5.0, + angle=0.5, + velocity=15.0, + timestamp=1234567890.0 + 10 + ) + self.tuner.record_shot(shot_data, coefficient_values) + + # Pending shots should be cleared after processing + self.assertEqual(len(self.tuner.pending_shots), 0) + + def test_is_complete(self): + """Test completion detection.""" + # Initially not complete + self.assertFalse(self.tuner.is_complete()) + + # Simulate completing all coefficients + self.tuner.current_optimizer = None + self.tuner.current_index = len(self.tuner.coefficients) + + self.assertTrue(self.tuner.is_complete()) + + def test_get_tuning_status(self): + """Test getting tuning status.""" + status = self.tuner.get_tuning_status() + + self.assertIsInstance(status, str) + self.assertGreater(len(status), 0) + + +if __name__ == '__main__': + unittest.main() diff --git a/bayesopt/tuner/tuner.py b/bayesopt/tuner/tuner.py new file mode 100644 index 0000000..bae9f8f --- /dev/null +++ b/bayesopt/tuner/tuner.py @@ -0,0 +1,334 @@ +""" +Main tuner coordinator module. + +This module coordinates all tuner components and runs the tuning loop +in a background thread with safe startup/shutdown. +""" + +import time +import threading +import logging +from typing import Optional, Dict + +from .config import TunerConfig +from .nt_interface import NetworkTablesInterface, ShotData +from .optimizer import CoefficientTuner +from .logger import TunerLogger, setup_logging + + +logger = logging.getLogger(__name__) + + +class BayesianTunerCoordinator: + """ + Main coordinator for the Bayesian tuner system. + + Manages the tuning loop, coordinates between NT interface, optimizer, + and logger, and handles safe startup/shutdown. + """ + + def __init__(self, config: Optional[TunerConfig] = None): + """ + Initialize tuner coordinator. + + Args: + config: TunerConfig object. If None, uses default config. + """ + self.config = config or TunerConfig() + + # Validate configuration + warnings = self.config.validate_config() + if warnings: + logger.warning(f"Configuration warnings: {warnings}") + + # Components + self.nt_interface = NetworkTablesInterface(self.config) + self.optimizer = CoefficientTuner(self.config) + self.data_logger = TunerLogger(self.config) + + # State + self.running = False + self.thread: Optional[threading.Thread] = None + self.last_shot_timestamp = 0.0 + + # Current coefficient values + self.current_coefficient_values: Dict[str, float] = {} + + logger.info("Bayesian Tuner Coordinator initialized") + + def start(self, server_ip: Optional[str] = None): + """ + Start the tuner in a background thread. + + Args: + server_ip: Optional NT server IP. If None, uses config default. + """ + if self.running: + logger.warning("Tuner already running") + return + + if not self.config.TUNER_ENABLED: + logger.info("Tuner is disabled (TUNER_ENABLED = False)") + return + + logger.info("Starting Bayesian Tuner...") + self.data_logger.log_event('START', 'Tuner starting') + + # Connect to NetworkTables + if not self.nt_interface.connect(server_ip): + logger.error("Failed to connect to NetworkTables, tuner not started") + self.data_logger.log_event('ERROR', 'Failed to connect to NT') + return + + # Read initial coefficient values + self.current_coefficient_values = self.nt_interface.read_all_coefficients( + self.config.COEFFICIENTS + ) + logger.info(f"Initial coefficient values: {self.current_coefficient_values}") + + # Publish interlock settings to robot + self.nt_interface.write_interlock_settings( + self.config.REQUIRE_SHOT_LOGGED, + self.config.REQUIRE_COEFFICIENTS_UPDATED + ) + logger.info(f"Interlock settings published: shot_logged={self.config.REQUIRE_SHOT_LOGGED}, " + f"coeff_updated={self.config.REQUIRE_COEFFICIENTS_UPDATED}") + + # Start tuning thread + self.running = True + self.thread = threading.Thread(target=self._tuning_loop, daemon=True) + self.thread.start() + + logger.info("Tuner started successfully") + + def stop(self): + """Stop the tuner gracefully.""" + if not self.running: + logger.info("Tuner not running") + return + + logger.info("Stopping tuner...") + self.data_logger.log_event('STOP', 'Tuner stopping') + + self.running = False + + # Wait for thread to finish + if self.thread: + self.thread.join(timeout=self.config.GRACEFUL_SHUTDOWN_TIMEOUT_SECONDS) + if self.thread.is_alive(): + logger.warning("Tuner thread did not stop gracefully") + + # Disconnect from NT + self.nt_interface.disconnect() + + # Close logger + self.data_logger.close() + + logger.info("Tuner stopped") + + def _tuning_loop(self): + """Main tuning loop that runs in background thread.""" + logger.info("Tuning loop started") + + update_period = 1.0 / self.config.TUNER_UPDATE_RATE_HZ + + while self.running: + try: + # Check for safety conditions + if not self._check_safety_conditions(): + logger.warning("Safety check failed, pausing tuning") + time.sleep(1.0) + continue + + # Check for new shot data + shot_data = self.nt_interface.read_shot_data() + + if shot_data: + self._process_shot(shot_data) + + # Suggest next coefficient value if needed + self._update_coefficients() + + # Update status + self._update_status() + + # Sleep until next update + time.sleep(update_period) + + except Exception as e: + logger.error(f"Error in tuning loop: {e}", exc_info=True) + time.sleep(1.0) + + logger.info("Tuning loop ended") + + def _check_safety_conditions(self) -> bool: + """ + Check safety conditions before continuing tuning. + + Returns: + True if safe to continue, False otherwise + """ + # Check if tuner is enabled + if not self.config.TUNER_ENABLED: + return False + + # Check NT connection + if not self.nt_interface.is_connected(): + logger.warning("NetworkTables disconnected") + return False + + # Check if in match mode + if self.nt_interface.is_match_mode(): + logger.warning("Match mode detected, pausing tuning") + return False + + return True + + def _process_shot(self, shot_data: ShotData): + """ + Process a new shot result. + + Args: + shot_data: ShotData object + """ + logger.info(f"Processing shot: hit={shot_data.hit}, distance={shot_data.distance:.2f}m") + + # Record shot with optimizer + self.optimizer.record_shot(shot_data, self.current_coefficient_values) + + # Log to CSV + coeff_name = self.optimizer.get_current_coefficient_name() or "None" + current_optimizer = self.optimizer.current_optimizer + + coefficient_value = 0.0 + step_size = 0.0 + iteration = 0 + + if current_optimizer: + coeff_name = current_optimizer.coeff_config.name + coefficient_value = self.current_coefficient_values.get( + coeff_name, + current_optimizer.coeff_config.default_value + ) + step_size = current_optimizer.current_step_size + iteration = current_optimizer.iteration + + self.data_logger.log_shot( + coefficient_name=coeff_name, + coefficient_value=coefficient_value, + step_size=step_size, + iteration=iteration, + shot_data=shot_data, + nt_connected=self.nt_interface.is_connected(), + match_mode=self.nt_interface.is_match_mode(), + tuner_status=self.optimizer.get_tuning_status(), + all_coefficient_values=self.current_coefficient_values, + ) + + def _update_coefficients(self): + """Update coefficients based on optimizer suggestions.""" + suggestion = self.optimizer.suggest_coefficient_update() + + if suggestion: + coeff_name, new_value = suggestion + + # Update in NT + coeff_config = self.config.COEFFICIENTS[coeff_name] + success = self.nt_interface.write_coefficient(coeff_config.nt_key, new_value) + + if success: + # Update local tracking + self.current_coefficient_values[coeff_name] = new_value + logger.info(f"Updated {coeff_name} = {new_value:.6f}") + + # Signal interlock system that coefficients are updated + self.nt_interface.signal_coefficients_updated() + else: + logger.error(f"Failed to write {coeff_name} to NT") + + def _update_status(self): + """Update tuner status in NetworkTables for driver feedback.""" + status = self.optimizer.get_tuning_status() + + # Add step size info if tuning + if self.optimizer.current_optimizer: + step_size = self.optimizer.current_optimizer.current_step_size + status += f" | step: {step_size:.6f}" + + self.nt_interface.write_status(status) + + def get_status(self) -> Dict: + """ + Get current tuner status. + + Returns: + Dict with status information + """ + status = { + 'running': self.running, + 'enabled': self.config.TUNER_ENABLED, + 'nt_connected': self.nt_interface.is_connected(), + 'match_mode': self.nt_interface.is_match_mode(), + 'tuning_status': self.optimizer.get_tuning_status(), + 'is_complete': self.optimizer.is_complete(), + 'current_coefficient': self.optimizer.get_current_coefficient_name(), + 'log_file': str(self.data_logger.get_log_file_path()), + } + + if self.optimizer.current_optimizer: + status['optimizer_stats'] = self.optimizer.current_optimizer.get_statistics() + + return status + + def __enter__(self): + """Context manager entry.""" + self.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Context manager exit.""" + self.stop() + + +def run_tuner(server_ip: Optional[str] = None, config: Optional[TunerConfig] = None): + """ + Convenience function to run the tuner. + + Args: + server_ip: Optional NT server IP + config: Optional TunerConfig object + """ + # Setup logging + if config: + setup_logging(config) + else: + setup_logging(TunerConfig()) + + logger.info("="*60) + logger.info("FRC Shooter Bayesian Tuner") + logger.info("="*60) + + # Create and run tuner + with BayesianTunerCoordinator(config) as tuner: + try: + logger.info("Tuner running. Press Ctrl+C to stop.") + + # Keep running until interrupted + while not tuner.optimizer.is_complete(): + time.sleep(1.0) + + # Print periodic status + status = tuner.get_status() + if status['running']: + logger.info(f"Status: {status['tuning_status']}") + + logger.info("Tuning complete!") + + # Log final statistics + for optimizer in tuner.optimizer.completed_coefficients: + stats = optimizer.get_statistics() + tuner.data_logger.log_statistics(stats) + logger.info(f"Final stats for {stats['coefficient_name']}: {stats}") + + except KeyboardInterrupt: + logger.info("Interrupted by user") diff --git a/src/main/java/frc/robot/generic/subsystems/drive/DriveConstants.java b/src/main/java/frc/robot/generic/subsystems/drive/DriveConstants.java index 9e94410..c34f00a 100644 --- a/src/main/java/frc/robot/generic/subsystems/drive/DriveConstants.java +++ b/src/main/java/frc/robot/generic/subsystems/drive/DriveConstants.java @@ -52,12 +52,12 @@ public class DriveConstants { Stand (or imagine standing) beside the robot, facing the same direction as its absolute front. - For front wheels: - Overrotated → pointing toward the robot’s right (your left) - Underrotated → pointing toward the robot’s left (your right) + Overrotated -> pointing toward the robot's right (your left) + Underrotated -> pointing toward the robot's left (your right) - For back wheels: - Overrotated → pointing toward the robot’s right (your left) - Underrotated → pointing toward the robot’s left (your right) + Overrotated -> pointing toward the robot's right (your left) + Underrotated -> pointing toward the robot's left (your right) */ public static final Rotation2d frontLeftZeroRotation = new Rotation2d(-0.7853181997882291).minus(new Rotation2d(Degrees.of(16 + 5))); diff --git a/src/main/java/frc/robot/generic/subsystems/drive/ModuleIOSpark.java b/src/main/java/frc/robot/generic/subsystems/drive/ModuleIOSpark.java index 1a7b320..b75e367 100644 --- a/src/main/java/frc/robot/generic/subsystems/drive/ModuleIOSpark.java +++ b/src/main/java/frc/robot/generic/subsystems/drive/ModuleIOSpark.java @@ -73,7 +73,7 @@ public class ModuleIOSpark implements ModuleIO { private final Debouncer driveConnectedDebounce = new Debouncer(0.5); private final Debouncer turnConnectedDebounce = new Debouncer(0.5); - // Cached cancoder status — updated only in resetToAbsolute() + // Cached cancoder status - updated only in resetToAbsolute() private boolean lastCancoderConnected = false; private final int module; diff --git a/src/main/java/frc/robot/generic/util/FiringSolutionSolver.java b/src/main/java/frc/robot/generic/util/FiringSolutionSolver.java new file mode 100644 index 0000000..71bfaf9 --- /dev/null +++ b/src/main/java/frc/robot/generic/util/FiringSolutionSolver.java @@ -0,0 +1,136 @@ +package frc.robot.generic.util; + +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.geometry.Translation3d; +import org.littletonrobotics.junction.Logger; + +/** + * Computes projectile firing solutions for turreted shooters. Fully WPILib-compliant and + * AdvantageKit-logged. Supports tunable drag, mass, area, and iterative solver parameters. + */ +public final class FiringSolutionSolver { + + // --- Tunable constants (via AdvantageKit dashboard) --- + // Drag coefficient (dimensionless) + private static final LoggedTunableNumber kDragCoefficient = + new LoggedTunableNumber("FiringSolver/DragCoefficient", 0.003); + + // Projectile cross-sectional area in square meters (m^2) + private static final LoggedTunableNumber kProjectileArea = + new LoggedTunableNumber("FiringSolver/ProjectileArea", 0.0015); + + // Projectile mass in kilograms (kg) + private static final LoggedTunableNumber kProjectileMass = + new LoggedTunableNumber("FiringSolver/ProjectileMass", 0.18); + + // Launcher height in meters (m) + private static final LoggedTunableNumber kLaunchHeight = + new LoggedTunableNumber("FiringSolver/LaunchHeight", 0.8); + + // Target height in meters (m) + private static final LoggedTunableNumber kTargetHeight = + new LoggedTunableNumber("FiringSolver/TargetHeight", 2.3); + + // Maximum exit velocity in meters per second (m/s) + private static final LoggedTunableNumber kMaxExitVelocity = + new LoggedTunableNumber("FiringSolver/MaxExitVelocity", 30.0); + + // --- Tunable iteration parameters --- + // Number of velocity iterations (unitless) + private static final LoggedTunableNumber kVelocityIterationCount = + new LoggedTunableNumber("FiringSolver/VelocityIterations", 20); + + // Number of angle iterations (unitless) + private static final LoggedTunableNumber kAngleIterationCount = + new LoggedTunableNumber("FiringSolver/AngleIterations", 20); + + // Velocity convergence tolerance in meters per second (m/s) + private static final LoggedTunableNumber kVelocityTolerance = + new LoggedTunableNumber("FiringSolver/VelocityTolerance", 0.01); + + // Angle convergence tolerance in radians (rad) + private static final LoggedTunableNumber kAngleTolerance = + new LoggedTunableNumber("FiringSolver/AngleTolerance", 1e-4); + + private static final double GRAVITY = 9.80665; + // off by ~0.075 due to humidity, not important enough to fix. + private static final double AIR_DENSITY = 1.225; + + private FiringSolutionSolver() {} + + public static FiringSolution computeFiringSolution( + Translation3d targetPosition, boolean isFieldRelative) { + + Translation3d relTarget = targetPosition; + + double dx = relTarget.getX(); + double dy = relTarget.getY(); + + // use the real z difference then apply tunable offsets + // TODO: Consider removing height offsets once launcher/target calibration is finalized. + double dz = relTarget.getZ() + (kTargetHeight.get() - kLaunchHeight.get()); + + double horizontalDistance = Math.hypot(dx, dy); + double flatYaw = Math.atan2(dy, dx); + + double velocity = estimateExitVelocity(horizontalDistance, dz); + double pitch = estimateLaunchAngle(horizontalDistance, dz, velocity); + + // Clamp pitch/yaw to safe turret limits + pitch = MathUtil.clamp(pitch, 0.0, Math.PI / 2); + flatYaw = MathUtil.clamp(flatYaw, -Math.PI, Math.PI); + + // Create solution once + FiringSolution solution = new FiringSolution(flatYaw, pitch, velocity); + + // Record output + Logger.recordOutput("FiringSolver/Solution", solution); + + // Return the same solution + return solution; + } + + private static double estimateExitVelocity(double range, double heightDiff) { + double v0 = 10.0; + int iterations = (int) kVelocityIterationCount.get(); + for (int i = 0; i < iterations; i++) { + double dragAccel = + 0.5 + * AIR_DENSITY + * kDragCoefficient.get() + * kProjectileArea.get() + * v0 + * v0 + / kProjectileMass.get(); + double t = range / v0; // rough time estimate + double estDrop = 0.5 * GRAVITY * t * t + 0.5 * dragAccel * t * t; + double error = estDrop - heightDiff; + v0 -= error * 0.5; + v0 = Math.max(2.0, Math.min(kMaxExitVelocity.get(), v0)); + if (Math.abs(error) < kVelocityTolerance.get()) break; + } + return v0; + } + + private static double estimateLaunchAngle(double range, double heightDiff, double velocity) { + double angle = 0.4; + int iterations = (int) kAngleIterationCount.get(); + for (int i = 0; i < iterations; i++) { + double sin = Math.sin(angle); + double cos = Math.cos(angle); + double t = range / (velocity * cos); + double y = velocity * sin * t - 0.5 * GRAVITY * t * t - heightDiff; + double dyda = velocity * cos * t + 1e-6; + angle -= y / dyda; + if (Math.abs(y) < kAngleTolerance.get()) break; + } + return angle; + } + + /** Logs whether a shot hit or missed. */ + public static void logShotResult(boolean hit) { + Logger.recordOutput("FiringSolver/Hit", hit); + } + + public record FiringSolution(double yawRadians, double pitchRadians, double exitVelocity) {} +} diff --git a/src/main/java/frc/robot/outReach/RobotContainer.java b/src/main/java/frc/robot/outReach/RobotContainer.java index 1cb5f33..48bbfde 100644 --- a/src/main/java/frc/robot/outReach/RobotContainer.java +++ b/src/main/java/frc/robot/outReach/RobotContainer.java @@ -31,10 +31,9 @@ import frc.robot.generic.util.LoggedTalon.NoOppTalonFX; import frc.robot.generic.util.LoggedTalon.PhoenixTalonFX; import frc.robot.generic.util.LoggedTalon.SimpleMotorSim; -import frc.robot.outReach.subsystems.turret.Turret; import frc.robot.generic.util.RobotConfig; import frc.robot.generic.util.SwerveBuilder; - +import frc.robot.outReach.subsystems.turret.Turret; import org.littletonrobotics.junction.networktables.LoggedDashboardChooser; /** diff --git a/src/main/java/frc/robot/outReach/subsystems/shooter/Shooter.java b/src/main/java/frc/robot/outReach/subsystems/shooter/Shooter.java index 0e97dd4..e98a6d8 100644 --- a/src/main/java/frc/robot/outReach/subsystems/shooter/Shooter.java +++ b/src/main/java/frc/robot/outReach/subsystems/shooter/Shooter.java @@ -3,9 +3,6 @@ // the WPILib BSD license file in the root directory of this project. package frc.robot.outReach.subsystems.shooter; -import frc.robot.generic.util.LoggedTalon.LoggedTalonFX; - -import java.util.function.DoubleSupplier; import com.ctre.phoenix6.configs.CurrentLimitsConfigs; import com.ctre.phoenix6.configs.FeedbackConfigs; @@ -13,13 +10,12 @@ import com.ctre.phoenix6.configs.Slot0Configs; import com.ctre.phoenix6.configs.TalonFXConfiguration; import com.ctre.phoenix6.controls.NeutralOut; -import com.ctre.phoenix6.controls.VelocityDutyCycle; import com.ctre.phoenix6.controls.VelocityVoltage; -import com.ctre.phoenix6.hardware.TalonFX; import com.ctre.phoenix6.signals.NeutralModeValue; - import edu.wpi.first.wpilibj2.command.Command; import edu.wpi.first.wpilibj2.command.SubsystemBase; +import frc.robot.generic.util.LoggedTalon.LoggedTalonFX; +import java.util.function.DoubleSupplier; public class Shooter extends SubsystemBase { private final LoggedTalonFX shooterMotor; @@ -28,29 +24,30 @@ public class Shooter extends SubsystemBase { /** Creates a new Torrent. */ public Shooter(LoggedTalonFX shooterMotor) { - var config = - new TalonFXConfiguration() - .withCurrentLimits( - new CurrentLimitsConfigs().withStatorCurrentLimit(30).withSupplyCurrentLimit(60)) - .withSlot0(new Slot0Configs().withKP(0).withKI(0).withKD(0).withKS(0).withKV(0)) - .withFeedback(new FeedbackConfigs().withSensorToMechanismRatio(0)) - .withMotorOutput(new MotorOutputConfigs().withNeutralMode(NeutralModeValue.Coast)); + var config = + new TalonFXConfiguration() + .withCurrentLimits( + new CurrentLimitsConfigs().withStatorCurrentLimit(30).withSupplyCurrentLimit(60)) + .withSlot0(new Slot0Configs().withKP(0).withKI(0).withKD(0).withKS(0).withKV(0)) + .withFeedback(new FeedbackConfigs().withSensorToMechanismRatio(0)) + .withMotorOutput(new MotorOutputConfigs().withNeutralMode(NeutralModeValue.Coast)); + + this.shooterMotor = shooterMotor.withConfig(config); + } - this.shooterMotor = shooterMotor.withConfig(config); + public Command spinToVelocityAndRotationsCommand(DoubleSupplier velocityRPS) { + return runEnd( + () -> { + shooterMotor.setControl(velocityPIDRequest.withVelocity(velocityRPS.getAsDouble())); + }, + () -> { + shooterMotor.setControl(neutralOut); + }); } -public Command spinToVelocityAndRotationsCommand(DoubleSupplier velocityRPS) { - return runEnd(()->{ - shooterMotor.setControl(velocityPIDRequest.withVelocity(velocityRPS.getAsDouble())); - }, ()-> { - shooterMotor.setControl(neutralOut); - }); -} - -@Override + @Override public void periodic() { // This method will be called once per scheduler run shooterMotor.periodic(); } } - \ No newline at end of file