diff --git a/api/main.py b/api/main.py index 1680ec1..f866f7b 100644 --- a/api/main.py +++ b/api/main.py @@ -376,6 +376,13 @@ class OptimizationLegModel(BaseModel): pitch_deg: Optional[float] = None # Weather provenance per leg data_source: Optional[str] = None # "forecast (high confidence)" etc. + # Extended weather fields (SPEC-P1) + swell_hs_m: Optional[float] = None + windsea_hs_m: Optional[float] = None + current_effect_kts: Optional[float] = None + visibility_m: Optional[float] = None + sst_celsius: Optional[float] = None + ice_concentration: Optional[float] = None class SafetySummary(BaseModel): @@ -910,6 +917,104 @@ def get_current_field( return current_data +def get_sst_field( + lat_min: float, lat_max: float, + lon_min: float, lon_max: float, + resolution: float = 1.0, + time: datetime = None, +) -> WeatherData: + """ + Get SST field data. + + Provider chain: CMEMS live → Synthetic. + """ + if time is None: + time = datetime.utcnow() + + cache_key = _get_cache_key("sst", lat_min, lat_max, lon_min, lon_max) + cached = _redis_cache_get(cache_key) + if cached is not None: + return cached + + sst_data = copernicus_provider.fetch_sst_data(lat_min, lat_max, lon_min, lon_max, time) + if sst_data is None: + logger.info("CMEMS SST unavailable, using synthetic data") + sst_data = synthetic_provider.generate_sst_field( + lat_min, lat_max, lon_min, lon_max, resolution, time + ) + + _redis_cache_put(cache_key, sst_data, CACHE_TTL_MINUTES * 60) + return sst_data + + +def get_visibility_field( + lat_min: float, lat_max: float, + lon_min: float, lon_max: float, + resolution: float = 1.0, + time: datetime = None, +) -> WeatherData: + """ + Get visibility field data. + + Provider chain: GFS live → Synthetic. + """ + if time is None: + time = datetime.utcnow() + + cache_key = _get_cache_key("visibility", lat_min, lat_max, lon_min, lon_max) + cached = _redis_cache_get(cache_key) + if cached is not None: + return cached + + vis_data = gfs_provider.fetch_visibility_data(lat_min, lat_max, lon_min, lon_max, time) + if vis_data is None: + logger.info("GFS visibility unavailable, using synthetic data") + vis_data = synthetic_provider.generate_visibility_field( + lat_min, lat_max, lon_min, lon_max, resolution, time + ) + + _redis_cache_put(cache_key, vis_data, CACHE_TTL_MINUTES * 60) + return vis_data + + +def get_ice_field( + lat_min: float, lat_max: float, + lon_min: float, lon_max: float, + resolution: float = 1.0, + time: datetime = None, +) -> WeatherData: + """ + Get sea ice concentration field. + + Provider chain: Redis → DB → CMEMS live → Synthetic. + """ + if time is None: + time = datetime.utcnow() + + cache_key = _get_cache_key("ice", lat_min, lat_max, lon_min, lon_max) + cached = _redis_cache_get(cache_key) + if cached is not None: + return cached + + # Try PostgreSQL (from previous ice forecast ingestion) + if db_weather is not None: + ice_data = db_weather.get_ice_from_db(lat_min, lat_max, lon_min, lon_max, time) + if ice_data is not None: + logger.info("Ice data served from DB") + _redis_cache_put(cache_key, ice_data, CACHE_TTL_MINUTES * 60) + return ice_data + + ice_data = copernicus_provider.fetch_ice_data(lat_min, lat_max, lon_min, lon_max, time) + if ice_data is None: + logger.info("CMEMS ice data unavailable, using synthetic data") + ice_data = synthetic_provider.generate_ice_field( + lat_min, lat_max, lon_min, lon_max, resolution, time + ) + + _redis_cache_put(cache_key, ice_data, CACHE_TTL_MINUTES * 60) + return ice_data + + def get_weather_at_point(lat: float, lon: float, time: datetime) -> Tuple[Dict, Optional[WeatherDataSource]]: """ Get weather at a specific point. @@ -1250,7 +1355,10 @@ async def api_get_wind_field( # High-resolution ocean mask (0.05° ≈ 5.5km) via vectorized numpy mask_lats, mask_lons, ocean_mask = _build_ocean_mask(lat_min, lat_max, lon_min, lon_max, step=0.05) - return { + # SPEC-P1: Piggyback SST on wind endpoint (same bounding box) + sst_data = get_sst_field(lat_min, lat_max, lon_min, lon_max, resolution, time) + + response = { "parameter": "wind", "time": time.isoformat(), "bbox": { @@ -1275,6 +1383,14 @@ async def api_get_wind_field( else "synthetic" ), } + if sst_data is not None and sst_data.values is not None: + response["sst"] = { + "lats": sst_data.lats.tolist(), + "lons": sst_data.lons.tolist(), + "data": sst_data.values.tolist(), + "unit": "°C", + } + return response @app.get("/api/weather/wind/velocity") @@ -1361,6 +1477,108 @@ async def api_get_wind_velocity_format( # In-progress prefetch tracking _prefetch_running = False _prefetch_lock = None # Will be a threading.Lock +_last_wind_prefetch_run = None # (run_date, run_hour) tuple from last successful prefetch + +# File-based cache for wind forecast frames (shared across gunicorn workers) +_WIND_CACHE_DIR = Path("/tmp/windmar_wind_cache") +_WIND_CACHE_DIR.mkdir(exist_ok=True) + + +def _wind_cache_key(lat_min, lat_max, lon_min, lon_max): + return f"wind_{lat_min:.0f}_{lat_max:.0f}_{lon_min:.0f}_{lon_max:.0f}" + + +def _wind_cache_get(cache_key: str) -> dict | None: + p = _WIND_CACHE_DIR / f"{cache_key}.json" + if p.exists(): + try: + import json as _json + return _json.loads(p.read_text()) + except Exception: + return None + return None + + +def _wind_cache_put(cache_key: str, data: dict): + import json as _json + p = _WIND_CACHE_DIR / f"{cache_key}.json" + tmp = p.with_suffix(".tmp") + tmp.write_text(_json.dumps(data)) + tmp.rename(p) # atomic on same filesystem + + +def _build_wind_frames(lat_min, lat_max, lon_min, lon_max, run_date, run_hour): + """Process all cached GRIB files into leaflet-velocity frames dict. + + Called once after prefetch completes. Result is saved to file cache. + """ + run_time = datetime.strptime(f"{run_date}{run_hour}", "%Y%m%d%H") + hours_status = gfs_provider.get_cached_forecast_hours(lat_min, lat_max, lon_min, lon_max, run_date, run_hour) + + frames = {} + for h_info in hours_status: + if not h_info["cached"]: + continue + fh = h_info["forecast_hour"] + wind_data = gfs_provider.fetch_wind_data(lat_min, lat_max, lon_min, lon_max, forecast_hour=fh, run_date=run_date, run_hour=run_hour) + if wind_data is None: + continue + + u_masked, v_masked = _apply_ocean_mask_velocity( + wind_data.u_component, wind_data.v_component, + wind_data.lats, wind_data.lons, + ) + actual_lats = wind_data.lats + actual_lons = wind_data.lons + actual_dx = abs(float(actual_lons[1] - actual_lons[0])) if len(actual_lons) > 1 else 0.25 + actual_dy = abs(float(actual_lats[1] - actual_lats[0])) if len(actual_lats) > 1 else 0.25 + + if len(actual_lats) > 1 and actual_lats[1] > actual_lats[0]: + u_ordered = u_masked[::-1] + v_ordered = v_masked[::-1] + lat_north = float(actual_lats[-1]) + lat_south = float(actual_lats[0]) + else: + u_ordered = u_masked + v_ordered = v_masked + lat_north = float(actual_lats[0]) + lat_south = float(actual_lats[-1]) + + valid_time = run_time + timedelta(hours=fh) + header = { + "parameterCategory": 2, + "parameterNumber": 2, + "lo1": float(actual_lons[0]), + "la1": lat_north, + "lo2": float(actual_lons[-1]), + "la2": lat_south, + "dx": actual_dx, + "dy": actual_dy, + "nx": len(actual_lons), + "ny": len(actual_lats), + "refTime": valid_time.isoformat(), + "forecastHour": fh, + } + frames[str(fh)] = [ + {"header": {**header, "parameterNumber": 2}, "data": u_ordered.flatten().tolist()}, + {"header": {**header, "parameterNumber": 3}, "data": v_ordered.flatten().tolist()}, + ] + logger.info(f"Wind frame f{fh:03d} processed ({len(actual_lats)}x{len(actual_lons)})") + + result = { + "run_date": run_date, + "run_hour": run_hour, + "run_time": run_time.isoformat(), + "total_hours": len(GFSDataProvider.FORECAST_HOURS), + "cached_hours": len(frames), + "source": "gfs", + "frames": frames, + } + + cache_key = _wind_cache_key(lat_min, lat_max, lon_min, lon_max) + _wind_cache_put(cache_key, result) + logger.info(f"Wind frames cache saved: {len(frames)} frames, key={cache_key}") + return result def _get_prefetch_lock(): @@ -1382,10 +1600,34 @@ async def api_get_forecast_status( """ Get GFS forecast prefetch status. - Returns current GFS run info and which forecast hours are cached. + Checks the file cache first (instant). Falls back to scanning GRIB files. """ - run_date, run_hour = gfs_provider._get_latest_run() - hours = gfs_provider.get_cached_forecast_hours(lat_min, lat_max, lon_min, lon_max) + cache_key = _wind_cache_key(lat_min, lat_max, lon_min, lon_max) + cached = _wind_cache_get(cache_key) + if cached and not _prefetch_running: + cached_hours = len(cached.get("frames", {})) + return { + "run_date": cached["run_date"], + "run_hour": cached["run_hour"], + "total_hours": cached.get("total_hours", 41), + "cached_hours": cached_hours, + "complete": True, + "prefetch_running": False, + } + + # No file cache — fall back to scanning GRIB files + if _last_wind_prefetch_run: + run_date, run_hour = _last_wind_prefetch_run + else: + run_date, run_hour = gfs_provider._get_latest_run() + hours = gfs_provider.get_cached_forecast_hours(lat_min, lat_max, lon_min, lon_max, run_date, run_hour) + cached_count = sum(1 for h in hours if h["cached"]) + + if cached_count == 0 and not _prefetch_running: + best = gfs_provider.find_best_cached_run(lat_min, lat_max, lon_min, lon_max) + if best: + run_date, run_hour = best + hours = gfs_provider.get_cached_forecast_hours(lat_min, lat_max, lon_min, lon_max, run_date, run_hour) cached_count = sum(1 for h in hours if h["cached"]) total_count = len(hours) @@ -1394,9 +1636,8 @@ async def api_get_forecast_status( "run_hour": run_hour, "total_hours": total_count, "cached_hours": cached_count, - "complete": cached_count == total_count, + "complete": cached_count == total_count and not _prefetch_running, "prefetch_running": _prefetch_running, - "hours": hours, } @@ -1422,15 +1663,21 @@ async def api_trigger_forecast_prefetch( lock.release() def _do_prefetch(): - global _prefetch_running + global _prefetch_running, _last_wind_prefetch_run pflock = _get_prefetch_lock() if not pflock.acquire(blocking=False): return try: _prefetch_running = True - logger.info("GFS forecast prefetch started") + # Capture the run info before downloading so status/frames endpoints + # can reference the same run even if the GFS cycle rolls over mid-prefetch. + run_date, run_hour = gfs_provider._get_latest_run() + _last_wind_prefetch_run = (run_date, run_hour) + logger.info(f"GFS forecast prefetch started (run {run_date}/{run_hour}z)") gfs_provider.prefetch_forecast_hours(lat_min, lat_max, lon_min, lon_max) - logger.info("GFS forecast prefetch completed") + logger.info("GFS forecast prefetch completed, building frames cache...") + _build_wind_frames(lat_min, lat_max, lon_min, lon_max, run_date, run_hour) + logger.info("Wind frames cache ready") except Exception as e: logger.error(f"GFS forecast prefetch failed: {e}") finally: @@ -1450,80 +1697,28 @@ async def api_get_forecast_frames( lon_max: float = Query(40.0), ): """ - Bulk endpoint returning all cached GFS forecast frames in leaflet-velocity format. + Return all wind forecast frames from file cache (instant). - Returns a single JSON object with frames keyed by forecast hour. - Call after prefetch completes for best results. + The cache is built once during prefetch. No GRIB parsing happens here. + Serves the raw JSON file to avoid parse+re-serialize overhead. """ + from starlette.responses import Response + cache_key = _wind_cache_key(lat_min, lat_max, lon_min, lon_max) + cache_file = _WIND_CACHE_DIR / f"{cache_key}.json" + if cache_file.exists(): + return Response(content=cache_file.read_bytes(), media_type="application/json") + + # No file cache — return empty (prefetch hasn't run or hasn't finished yet) run_date, run_hour = gfs_provider._get_latest_run() run_time = datetime.strptime(f"{run_date}{run_hour}", "%Y%m%d%H") - hours_status = gfs_provider.get_cached_forecast_hours(lat_min, lat_max, lon_min, lon_max) - - frames = {} - for h_info in hours_status: - if not h_info["cached"]: - continue - - fh = h_info["forecast_hour"] - wind_data = gfs_provider.fetch_wind_data(lat_min, lat_max, lon_min, lon_max, forecast_hour=fh) - if wind_data is None: - continue - - # Apply ocean mask - u_masked, v_masked = _apply_ocean_mask_velocity( - wind_data.u_component, wind_data.v_component, - wind_data.lats, wind_data.lons, - ) - - actual_lats = wind_data.lats - actual_lons = wind_data.lons - actual_dx = abs(float(actual_lons[1] - actual_lons[0])) if len(actual_lons) > 1 else 0.25 - actual_dy = abs(float(actual_lats[1] - actual_lats[0])) if len(actual_lats) > 1 else 0.25 - - # leaflet-velocity expects N→S ordering - if len(actual_lats) > 1 and actual_lats[1] > actual_lats[0]: - u_ordered = u_masked[::-1] - v_ordered = v_masked[::-1] - lat_north = float(actual_lats[-1]) - lat_south = float(actual_lats[0]) - else: - u_ordered = u_masked - v_ordered = v_masked - lat_north = float(actual_lats[0]) - lat_south = float(actual_lats[-1]) - - valid_time = run_time + timedelta(hours=fh) - header = { - "parameterCategory": 2, - "parameterNumber": 2, - "lo1": float(actual_lons[0]), - "la1": lat_north, - "lo2": float(actual_lons[-1]), - "la2": lat_south, - "dx": actual_dx, - "dy": actual_dy, - "nx": len(actual_lons), - "ny": len(actual_lats), - "refTime": valid_time.isoformat(), - "forecastHour": fh, - } - - frames[str(fh)] = [ - {"header": {**header, "parameterNumber": 2}, "data": u_ordered.flatten().tolist()}, - {"header": {**header, "parameterNumber": 3}, "data": v_ordered.flatten().tolist()}, - ] - - cached_count = len(frames) - total_count = len(GFSDataProvider.FORECAST_HOURS) - return { "run_date": run_date, "run_hour": run_hour, "run_time": run_time.isoformat(), - "total_hours": total_count, - "cached_hours": cached_count, + "total_hours": len(GFSDataProvider.FORECAST_HOURS), + "cached_hours": 0, "source": "gfs", - "frames": frames, + "frames": {}, } @@ -2213,56 +2408,359 @@ async def api_get_current_forecast_frames( return cached -@app.get("/api/weather/waves") -async def api_get_wave_field( - lat_min: float = Query(30.0), - lat_max: float = Query(60.0), - lon_min: float = Query(-15.0), - lon_max: float = Query(40.0), - resolution: float = Query(1.0), - time: Optional[datetime] = None, -): - """ - Get wave height field for visualization. +# ========================================================================= +# Ice Forecast Endpoints (CMEMS, 10-day daily) +# ========================================================================= - Uses Copernicus CMEMS when available, falls back to synthetic data. - """ - if time is None: - time = datetime.utcnow() +_ice_prefetch_running = False +_ice_prefetch_lock = None +_ICE_CACHE_DIR = Path("/tmp/windmar_ice_cache") +_ICE_CACHE_DIR.mkdir(exist_ok=True) - # Get wind first for synthetic fallback - wind_data = get_wind_field(lat_min, lat_max, lon_min, lon_max, resolution, time) +_REDIS_ICE_PREFETCH_LOCK = "windmar:ice_prefetch_lock" +_REDIS_ICE_PREFETCH_STATUS = "windmar:ice_prefetch_running" - # Get waves (CMEMS or synthetic) - wave_data = get_wave_field(lat_min, lat_max, lon_min, lon_max, resolution, wind_data) - # High-resolution ocean mask (0.05° ≈ 5.5km) via vectorized numpy - mask_lats, mask_lons, ocean_mask = _build_ocean_mask(lat_min, lat_max, lon_min, lon_max, step=0.05) +def _get_ice_prefetch_lock(): + global _ice_prefetch_lock + if _ice_prefetch_lock is None: + import threading + _ice_prefetch_lock = threading.Lock() + return _ice_prefetch_lock - # Build response with combined data - response = { - "parameter": "wave_height", - "time": time.isoformat(), - "bbox": { - "lat_min": lat_min, - "lat_max": lat_max, - "lon_min": lon_min, - "lon_max": lon_max, - }, - "resolution": resolution, - "nx": len(wave_data.lons), - "ny": len(wave_data.lats), - "lats": wave_data.lats.tolist(), - "lons": wave_data.lons.tolist(), - "data": wave_data.values.tolist(), - "unit": "m", - "ocean_mask": ocean_mask, - "ocean_mask_lats": mask_lats, - "ocean_mask_lons": mask_lons, - "source": "copernicus" if copernicus_provider._has_copernicusmarine else "synthetic", - "colorscale": { - "min": 0, - "max": 6, + +def _is_ice_prefetch_running() -> bool: + r = _get_redis() + if r is not None: + try: + return r.exists(_REDIS_ICE_PREFETCH_STATUS) > 0 + except Exception: + pass + return _ice_prefetch_running + + +def _ice_cache_path(cache_key: str) -> Path: + return _ICE_CACHE_DIR / f"{cache_key}.json" + + +def _ice_cache_get(cache_key: str) -> dict | None: + p = _ice_cache_path(cache_key) + if p.exists(): + try: + import json as _json + return _json.loads(p.read_text()) + except Exception: + return None + return None + + +def _ice_cache_put(cache_key: str, data: dict): + import json as _json + p = _ice_cache_path(cache_key) + tmp = p.with_suffix(".tmp") + tmp.write_text(_json.dumps(data)) + tmp.rename(p) + + +def _rebuild_ice_cache_from_db(cache_key, lat_min, lat_max, lon_min, lon_max): + """Rebuild ice forecast file cache from PostgreSQL data.""" + if db_weather is None: + return None + + run_time, hours = db_weather.get_available_hours_by_source("cmems_ice") + if not hours: + return None + + logger.info(f"Rebuilding ice cache from DB: {len(hours)} hours") + + grids = db_weather.get_grids_for_timeline( + "cmems_ice", ["ice_siconc"], + lat_min, lat_max, lon_min, lon_max, hours + ) + + if not grids or "ice_siconc" not in grids or not grids["ice_siconc"]: + return None + + first_fh = min(grids["ice_siconc"].keys()) + lats_full, lons_full, _ = grids["ice_siconc"][first_fh] + max_dim = max(len(lats_full), len(lons_full)) + STEP = max(1, round(max_dim / 250)) + shared_lats = lats_full[::STEP].tolist() + shared_lons = lons_full[::STEP].tolist() + + mask_lats_arr, mask_lons_arr, ocean_mask_arr = _build_ocean_mask( + lat_min, lat_max, lon_min, lon_max + ) + + frames = {} + for fh in sorted(hours): + if fh in grids["ice_siconc"]: + _, _, d = grids["ice_siconc"][fh] + frames[str(fh)] = { + "data": np.round(d[::STEP, ::STEP], 4).tolist(), + } + + cache_data = { + "run_time": run_time.isoformat() if run_time else "", + "total_hours": len(frames), + "cached_hours": len(frames), + "source": "cmems", + "lats": shared_lats, + "lons": shared_lons, + "ny": len(shared_lats), + "nx": len(shared_lons), + "ocean_mask": ocean_mask_arr, + "ocean_mask_lats": mask_lats_arr, + "ocean_mask_lons": mask_lons_arr, + "frames": frames, + } + + _ice_cache_put(cache_key, cache_data) + logger.info(f"Ice cache rebuilt from DB: {len(frames)} frames") + return cache_data + + +@app.get("/api/weather/forecast/ice/status") +async def api_get_ice_forecast_status( + lat_min: float = Query(30.0), + lat_max: float = Query(60.0), + lon_min: float = Query(-15.0), + lon_max: float = Query(40.0), +): + """Get ice forecast prefetch status.""" + cache_key = f"ice_{lat_min:.0f}_{lat_max:.0f}_{lon_min:.0f}_{lon_max:.0f}" + cached = _ice_cache_get(cache_key) + + prefetch_running = _is_ice_prefetch_running() + + if cached: + cached_hours = len(cached.get("frames", {})) + # total_hours matches cached_hours once prefetch finishes (CMEMS may have < 10 days) + total_hours = cached_hours if not prefetch_running else max(cached_hours + 1, 10) + return { + "total_hours": total_hours, + "cached_hours": cached_hours, + "complete": cached_hours > 0 and not prefetch_running, + "prefetch_running": prefetch_running, + } + + return { + "total_hours": 10, + "cached_hours": 0, + "complete": False, + "prefetch_running": prefetch_running, + } + + +@app.post("/api/weather/forecast/ice/prefetch") +async def api_trigger_ice_forecast_prefetch( + background_tasks: BackgroundTasks, + lat_min: float = Query(30.0), + lat_max: float = Query(60.0), + lon_min: float = Query(-15.0), + lon_max: float = Query(40.0), +): + """Trigger background download of CMEMS ice forecast (10-day daily).""" + global _ice_prefetch_running + + if _is_ice_prefetch_running(): + return {"status": "already_running", "message": "Ice prefetch is already in progress"} + + lock = _get_ice_prefetch_lock() + if not lock.acquire(blocking=False): + return {"status": "already_running", "message": "Ice prefetch is already in progress"} + lock.release() + + def _do_ice_prefetch(): + global _ice_prefetch_running + pflock = _get_ice_prefetch_lock() + if not pflock.acquire(blocking=False): + return + + r = _get_redis() + try: + if r is not None: + acquired = r.set(_REDIS_ICE_PREFETCH_LOCK, "1", nx=True, ex=1200) + if not acquired: + pflock.release() + return + r.setex(_REDIS_ICE_PREFETCH_STATUS, 1200, "1") + + _ice_prefetch_running = True + + cache_key_chk = f"ice_{lat_min:.0f}_{lat_max:.0f}_{lon_min:.0f}_{lon_max:.0f}" + existing = _ice_cache_get(cache_key_chk) + if existing and len(existing.get("frames", {})) >= 10 and _cache_covers_bounds(existing, lat_min, lat_max, lon_min, lon_max): + logger.info("Ice forecast file cache already complete, skipping CMEMS download") + return + + # Try to rebuild from DB before downloading from CMEMS + if db_weather is not None: + rebuilt = _rebuild_ice_cache_from_db(cache_key_chk, lat_min, lat_max, lon_min, lon_max) + if rebuilt and len(rebuilt.get("frames", {})) >= 10 and _cache_covers_bounds(rebuilt, lat_min, lat_max, lon_min, lon_max): + logger.info("Ice forecast rebuilt from DB, skipping CMEMS download") + return + + # Remove stale file cache + stale_path = _ice_cache_path(cache_key_chk) + if stale_path.exists(): + stale_path.unlink(missing_ok=True) + logger.info(f"Removed stale ice cache: {cache_key_chk}") + + logger.info("CMEMS ice forecast prefetch started") + + result = copernicus_provider.fetch_ice_forecast(lat_min, lat_max, lon_min, lon_max) + if result is None: + # Fallback to synthetic + logger.info("CMEMS ice forecast unavailable, generating synthetic") + result = synthetic_provider.generate_ice_forecast(lat_min, lat_max, lon_min, lon_max) + + if not result: + logger.error("Ice forecast fetch returned empty") + return + + first_wd = next(iter(result.values())) + max_dim = max(len(first_wd.lats), len(first_wd.lons)) + STEP = max(1, round(max_dim / 250)) + logger.info(f"Ice forecast: grid {len(first_wd.lats)}x{len(first_wd.lons)}, STEP={STEP}") + sub_lats = first_wd.lats[::STEP] + sub_lons = first_wd.lons[::STEP] + + mask_lats_arr, mask_lons_arr, ocean_mask_arr = _build_ocean_mask( + lat_min, lat_max, lon_min, lon_max + ) + + frames = {} + for fh, wd in sorted(result.items()): + siconc = wd.ice_concentration if wd.ice_concentration is not None else wd.values + if siconc is not None: + frames[str(fh)] = { + "data": np.round(siconc[::STEP, ::STEP], 4).tolist(), + } + + cache_key = f"ice_{lat_min:.0f}_{lat_max:.0f}_{lon_min:.0f}_{lon_max:.0f}" + _ice_cache_put(cache_key, { + "run_time": first_wd.time.isoformat() if first_wd.time else "", + "total_hours": len(frames), + "cached_hours": len(frames), + "source": "cmems", + "lats": sub_lats.tolist() if hasattr(sub_lats, 'tolist') else list(sub_lats), + "lons": sub_lons.tolist() if hasattr(sub_lons, 'tolist') else list(sub_lons), + "ny": len(sub_lats), + "nx": len(sub_lons), + "ocean_mask": ocean_mask_arr, + "ocean_mask_lats": mask_lats_arr, + "ocean_mask_lons": mask_lons_arr, + "frames": frames, + }) + logger.info(f"Ice forecast cached: {len(frames)} frames") + + # Store in PostgreSQL for persistence + if weather_ingestion is not None: + try: + logger.info("Ingesting ice forecast frames into PostgreSQL...") + weather_ingestion.ingest_ice_forecast_frames(result) + except Exception as db_e: + logger.error(f"Ice forecast DB ingestion failed: {db_e}") + + except Exception as e: + logger.error(f"Ice forecast prefetch failed: {e}") + finally: + _ice_prefetch_running = False + if r is not None: + try: + r.delete(_REDIS_ICE_PREFETCH_LOCK, _REDIS_ICE_PREFETCH_STATUS) + except Exception: + pass + pflock.release() + + background_tasks.add_task(_do_ice_prefetch) + return {"status": "started", "message": "Ice forecast prefetch triggered in background"} + + +@app.get("/api/weather/forecast/ice/frames") +async def api_get_ice_forecast_frames( + lat_min: float = Query(30.0), + lat_max: float = Query(60.0), + lon_min: float = Query(-15.0), + lon_max: float = Query(40.0), +): + """Return all cached CMEMS ice forecast frames.""" + cache_key = f"ice_{lat_min:.0f}_{lat_max:.0f}_{lon_min:.0f}_{lon_max:.0f}" + cached = _ice_cache_get(cache_key) + + # Fallback: rebuild from PostgreSQL + if not cached: + cached = await asyncio.to_thread( + _rebuild_ice_cache_from_db, cache_key, lat_min, lat_max, lon_min, lon_max + ) + + if not cached: + return { + "run_time": "", + "total_hours": 0, + "cached_hours": 0, + "source": "none", + "lats": [], + "lons": [], + "ny": 0, + "nx": 0, + "frames": {}, + } + + return cached + + +@app.get("/api/weather/waves") +async def api_get_wave_field( + lat_min: float = Query(30.0), + lat_max: float = Query(60.0), + lon_min: float = Query(-15.0), + lon_max: float = Query(40.0), + resolution: float = Query(1.0), + time: Optional[datetime] = None, +): + """ + Get wave height field for visualization. + + Uses Copernicus CMEMS when available, falls back to synthetic data. + """ + if time is None: + time = datetime.utcnow() + + # Get wind first for synthetic fallback + wind_data = get_wind_field(lat_min, lat_max, lon_min, lon_max, resolution, time) + + # Get waves (CMEMS or synthetic) + wave_data = get_wave_field(lat_min, lat_max, lon_min, lon_max, resolution, wind_data) + + # High-resolution ocean mask (0.05° ≈ 5.5km) via vectorized numpy + mask_lats, mask_lons, ocean_mask = _build_ocean_mask(lat_min, lat_max, lon_min, lon_max, step=0.05) + + # Build response with combined data + response = { + "parameter": "wave_height", + "time": time.isoformat(), + "bbox": { + "lat_min": lat_min, + "lat_max": lat_max, + "lon_min": lon_min, + "lon_max": lon_max, + }, + "resolution": resolution, + "nx": len(wave_data.lons), + "ny": len(wave_data.lats), + "lats": wave_data.lats.tolist(), + "lons": wave_data.lons.tolist(), + "data": wave_data.values.tolist(), + "unit": "m", + "ocean_mask": ocean_mask, + "ocean_mask_lats": mask_lats, + "ocean_mask_lons": mask_lons, + "source": "copernicus" if copernicus_provider._has_copernicusmarine else "synthetic", + "colorscale": { + "min": 0, + "max": 6, "colors": ["#00ff00", "#ffff00", "#ff8800", "#ff0000", "#800000"], }, } @@ -2432,6 +2930,223 @@ async def api_get_weather_point( } +# ============================================================================ +# API Endpoints - Extended Weather Fields (SPEC-P1) +# ============================================================================ + +@app.get("/api/weather/sst") +async def api_get_sst_field( + lat_min: float = Query(30.0, ge=-90, le=90), + lat_max: float = Query(60.0, ge=-90, le=90), + lon_min: float = Query(-15.0, ge=-180, le=180), + lon_max: float = Query(40.0, ge=-180, le=180), + resolution: float = Query(1.0, ge=0.25, le=5.0), + time: Optional[datetime] = None, +): + """ + Get sea surface temperature field for visualization. + + Returns SST grid in degrees Celsius. + Uses CMEMS physics when available, falls back to synthetic data. + """ + if time is None: + time = datetime.utcnow() + + sst_data = get_sst_field(lat_min, lat_max, lon_min, lon_max, resolution, time) + + mask_lats, mask_lons, ocean_mask = _build_ocean_mask(lat_min, lat_max, lon_min, lon_max, step=0.05) + + return { + "parameter": "sst", + "time": time.isoformat(), + "bbox": { + "lat_min": lat_min, + "lat_max": lat_max, + "lon_min": lon_min, + "lon_max": lon_max, + }, + "resolution": resolution, + "nx": len(sst_data.lons), + "ny": len(sst_data.lats), + "lats": sst_data.lats.tolist(), + "lons": sst_data.lons.tolist(), + "data": np.nan_to_num(sst_data.values, nan=15.0).tolist(), + "unit": "°C", + "ocean_mask": ocean_mask, + "ocean_mask_lats": mask_lats, + "ocean_mask_lons": mask_lons, + "source": "copernicus" if copernicus_provider._has_copernicusmarine else "synthetic", + "colorscale": { + "min": -2, + "max": 32, + "colors": ["#0000ff", "#00ccff", "#00ff88", "#ffff00", "#ff8800", "#ff0000"], + }, + } + + +@app.get("/api/weather/visibility") +async def api_get_visibility_field( + lat_min: float = Query(30.0, ge=-90, le=90), + lat_max: float = Query(60.0, ge=-90, le=90), + lon_min: float = Query(-15.0, ge=-180, le=180), + lon_max: float = Query(40.0, ge=-180, le=180), + resolution: float = Query(1.0, ge=0.25, le=5.0), + time: Optional[datetime] = None, +): + """ + Get visibility field for visualization. + + Returns visibility grid in kilometers. + Uses GFS when available, falls back to synthetic data. + """ + if time is None: + time = datetime.utcnow() + + vis_data = get_visibility_field(lat_min, lat_max, lon_min, lon_max, resolution, time) + + mask_lats, mask_lons, ocean_mask = _build_ocean_mask(lat_min, lat_max, lon_min, lon_max, step=0.05) + + return { + "parameter": "visibility", + "time": time.isoformat(), + "bbox": { + "lat_min": lat_min, + "lat_max": lat_max, + "lon_min": lon_min, + "lon_max": lon_max, + }, + "resolution": resolution, + "nx": len(vis_data.lons), + "ny": len(vis_data.lats), + "lats": vis_data.lats.tolist(), + "lons": vis_data.lons.tolist(), + "data": np.nan_to_num(vis_data.values, nan=50.0).tolist(), + "unit": "km", + "ocean_mask": ocean_mask, + "ocean_mask_lats": mask_lats, + "ocean_mask_lons": mask_lons, + "source": "gfs" if vis_data.time is not None else "synthetic", + "colorscale": { + "min": 0, + "max": 50, + "colors": ["#ff0000", "#ff8800", "#ffff00", "#88ff00", "#00ff00"], + }, + } + + +@app.get("/api/weather/ice") +async def api_get_ice_field( + lat_min: float = Query(30.0, ge=-90, le=90), + lat_max: float = Query(60.0, ge=-90, le=90), + lon_min: float = Query(-15.0, ge=-180, le=180), + lon_max: float = Query(40.0, ge=-180, le=180), + resolution: float = Query(1.0, ge=0.25, le=5.0), + time: Optional[datetime] = None, +): + """ + Get sea ice concentration field for visualization. + + Returns ice concentration grid as fraction (0-1). + Uses CMEMS when available, falls back to synthetic data. + Only relevant for high-latitude regions (>55°). + """ + if time is None: + time = datetime.utcnow() + + ice_data = get_ice_field(lat_min, lat_max, lon_min, lon_max, resolution, time) + + mask_lats, mask_lons, ocean_mask = _build_ocean_mask(lat_min, lat_max, lon_min, lon_max, step=0.05) + + return { + "parameter": "ice_concentration", + "time": time.isoformat(), + "bbox": { + "lat_min": lat_min, + "lat_max": lat_max, + "lon_min": lon_min, + "lon_max": lon_max, + }, + "resolution": resolution, + "nx": len(ice_data.lons), + "ny": len(ice_data.lats), + "lats": ice_data.lats.tolist(), + "lons": ice_data.lons.tolist(), + "data": np.nan_to_num(ice_data.values, nan=0.0).tolist(), + "unit": "fraction", + "ocean_mask": ocean_mask, + "ocean_mask_lats": mask_lats, + "ocean_mask_lons": mask_lons, + "source": "copernicus" if copernicus_provider._has_copernicusmarine else "synthetic", + "colorscale": { + "min": 0, + "max": 1, + "colors": ["#ffffff", "#ccddff", "#6688ff", "#0033cc", "#001166"], + }, + } + + +@app.get("/api/weather/swell") +async def api_get_swell_field( + lat_min: float = Query(30.0, ge=-90, le=90), + lat_max: float = Query(60.0, ge=-90, le=90), + lon_min: float = Query(-15.0, ge=-180, le=180), + lon_max: float = Query(40.0, ge=-180, le=180), + resolution: float = Query(1.0, ge=0.25, le=5.0), + time: Optional[datetime] = None, +): + """ + Get partitioned swell field (primary swell + wind-sea). + + Returns swell and wind-sea decomposition from CMEMS wave data. + Same query params as /api/weather/waves. + """ + if time is None: + time = datetime.utcnow() + + wave_data = get_wave_field(lat_min, lat_max, lon_min, lon_max, resolution) + + mask_lats, mask_lons, ocean_mask = _build_ocean_mask(lat_min, lat_max, lon_min, lon_max, step=0.05) + + # Build swell decomposition response + has_decomposition = wave_data.swell_height is not None + swell_hs = wave_data.swell_height.tolist() if has_decomposition else None + swell_tp = wave_data.swell_period.tolist() if wave_data.swell_period is not None else None + swell_dir = wave_data.swell_direction.tolist() if wave_data.swell_direction is not None else None + windsea_hs = wave_data.windwave_height.tolist() if wave_data.windwave_height is not None else None + windsea_tp = wave_data.windwave_period.tolist() if wave_data.windwave_period is not None else None + windsea_dir = wave_data.windwave_direction.tolist() if wave_data.windwave_direction is not None else None + + return { + "parameter": "swell", + "time": time.isoformat(), + "bbox": { + "lat_min": lat_min, + "lat_max": lat_max, + "lon_min": lon_min, + "lon_max": lon_max, + }, + "resolution": resolution, + "has_decomposition": has_decomposition, + "nx": len(wave_data.lons), + "ny": len(wave_data.lats), + "lats": wave_data.lats.tolist(), + "lons": wave_data.lons.tolist(), + "total_hs": wave_data.values.tolist(), + "data": np.nan_to_num(wave_data.values, nan=0.0).tolist(), + "swell_hs": swell_hs, + "swell_tp": swell_tp, + "swell_dir": swell_dir, + "windsea_hs": windsea_hs, + "windsea_tp": windsea_tp, + "windsea_dir": windsea_dir, + "unit": "m", + "ocean_mask": ocean_mask, + "ocean_mask_lats": mask_lats, + "ocean_mask_lons": mask_lons, + "source": "copernicus" if has_decomposition else "synthetic", + } + + # ============================================================================ # API Endpoints - Routes (Layer 2) # ============================================================================ @@ -2631,8 +3346,13 @@ async def calculate_voyage(request: VoyageRequest): logger.info(f" Waves loaded in {_time.monotonic()-t1:.1f}s") t2 = _time.monotonic() currents = get_current_field(lat_min, lat_max, lon_min, lon_max, 0.5) - logger.info(f" Currents loaded in {_time.monotonic()-t2:.1f}s. Total prefetch: {_time.monotonic()-t0:.1f}s") - grid_wx = GridWeatherProvider(wind, waves, currents) + logger.info(f" Currents loaded in {_time.monotonic()-t2:.1f}s") + # Extended fields (SPEC-P1) + sst = get_sst_field(lat_min, lat_max, lon_min, lon_max, 0.5, departure) + vis = get_visibility_field(lat_min, lat_max, lon_min, lon_max, 0.5, departure) + ice = get_ice_field(lat_min, lat_max, lon_min, lon_max, 0.5, departure) + logger.info(f" Total prefetch: {_time.monotonic()-t0:.1f}s (incl. SST/vis/ice)") + grid_wx = GridWeatherProvider(wind, waves, currents, sst, vis, ice) data_source_type = "temporal" if used_temporal else "forecast" wx_callable = temporal_wx.get_weather if temporal_wx else grid_wx.get_weather @@ -3034,8 +3754,13 @@ def _optimize_route_sync(request: "OptimizationRequest") -> "OptimizationRespons logger.info(f" Waves loaded in {_time.monotonic()-t1:.1f}s") t2 = _time.monotonic() currents = get_current_field(lat_min, lat_max, lon_min, lon_max, request.grid_resolution_deg) - logger.info(f" Currents loaded in {_time.monotonic()-t2:.1f}s. Total fallback: {_time.monotonic()-t0:.1f}s") - grid_wx = GridWeatherProvider(wind, waves, currents) + logger.info(f" Currents loaded in {_time.monotonic()-t2:.1f}s") + # Extended fields (SPEC-P1) + sst = get_sst_field(lat_min, lat_max, lon_min, lon_max, request.grid_resolution_deg, departure) + vis = get_visibility_field(lat_min, lat_max, lon_min, lon_max, request.grid_resolution_deg, departure) + ice = get_ice_field(lat_min, lat_max, lon_min, lon_max, request.grid_resolution_deg, departure) + logger.info(f" Total fallback: {_time.monotonic()-t0:.1f}s (incl. SST/vis/ice)") + grid_wx = GridWeatherProvider(wind, waves, currents, sst, vis, ice) # Select weather provider callable wx_provider = temporal_wx.get_weather if temporal_wx else grid_wx.get_weather @@ -3104,6 +3829,12 @@ def _optimize_route_sync(request: "OptimizationRequest") -> "OptimizationRespons roll_deg=round(leg['roll_deg'], 1) if leg.get('roll_deg') else None, pitch_deg=round(leg['pitch_deg'], 1) if leg.get('pitch_deg') else None, data_source=data_source_label, + swell_hs_m=round(leg['swell_hs_m'], 2) if leg.get('swell_hs_m') is not None else None, + windsea_hs_m=round(leg['windsea_hs_m'], 2) if leg.get('windsea_hs_m') is not None else None, + current_effect_kts=round(leg['current_effect_kts'], 2) if leg.get('current_effect_kts') is not None else None, + visibility_m=round(leg['visibility_m'], 0) if leg.get('visibility_m') is not None else None, + sst_celsius=round(leg['sst_celsius'], 1) if leg.get('sst_celsius') is not None else None, + ice_concentration=round(leg['ice_concentration'], 3) if leg.get('ice_concentration') is not None else None, )) # Build safety summary @@ -3136,6 +3867,12 @@ def _optimize_route_sync(request: "OptimizationRequest") -> "OptimizationRespons safety_status=leg.get('safety_status'), roll_deg=round(leg['roll_deg'], 1) if leg.get('roll_deg') else None, pitch_deg=round(leg['pitch_deg'], 1) if leg.get('pitch_deg') else None, + swell_hs_m=round(leg['swell_hs_m'], 2) if leg.get('swell_hs_m') is not None else None, + windsea_hs_m=round(leg['windsea_hs_m'], 2) if leg.get('windsea_hs_m') is not None else None, + current_effect_kts=round(leg['current_effect_kts'], 2) if leg.get('current_effect_kts') is not None else None, + visibility_m=round(leg['visibility_m'], 0) if leg.get('visibility_m') is not None else None, + sst_celsius=round(leg['sst_celsius'], 1) if leg.get('sst_celsius') is not None else None, + ice_concentration=round(leg['ice_concentration'], 3) if leg.get('ice_concentration') is not None else None, )) scenario_models.append(SpeedScenarioModel( strategy=sc.strategy, diff --git a/frontend/app/page.tsx b/frontend/app/page.tsx index abf8bd2..992386e 100644 --- a/frontend/app/page.tsx +++ b/frontend/app/page.tsx @@ -7,14 +7,14 @@ import MapOverlayControls from '@/components/MapOverlayControls'; import RouteIndicatorPanel from '@/components/RouteIndicatorPanel'; import AnalysisSlidePanel from '@/components/AnalysisSlidePanel'; import { useVoyage } from '@/components/VoyageContext'; -import { apiClient, Position, WindFieldData, WaveFieldData, VelocityData, OptimizationResponse, CreateZoneRequest, WaveForecastFrames, OptimizedRouteKey, AllOptimizationResults, EMPTY_ALL_RESULTS } from '@/lib/api'; +import { apiClient, Position, WindFieldData, WaveFieldData, VelocityData, OptimizationResponse, CreateZoneRequest, WaveForecastFrames, IceForecastFrames, OptimizedRouteKey, AllOptimizationResults, EMPTY_ALL_RESULTS } from '@/lib/api'; import { getAnalyses, saveAnalysis, deleteAnalysis, updateAnalysisMonteCarlo, AnalysisEntry } from '@/lib/analysisStorage'; import { debugLog } from '@/lib/debugLog'; import DebugConsole from '@/components/DebugConsole'; const MapComponent = dynamic(() => import('@/components/MapComponent'), { ssr: false }); -type WeatherLayer = 'wind' | 'waves' | 'currents' | 'none'; +type WeatherLayer = 'wind' | 'waves' | 'currents' | 'ice' | 'visibility' | 'sst' | 'swell' | 'none'; export default function HomePage() { // Voyage context (shared with header dropdowns, persisted across navigation) @@ -35,9 +35,13 @@ export default function HomePage() { // Weather visualization const [weatherLayer, setWeatherLayer] = useState('none'); const [windData, setWindData] = useState(null); + const windDataBaseRef = useRef(null); // preserve ocean mask for forecast + const windFieldCacheRef = useRef>({}); // per-frame cache + const windFieldCacheVersionRef = useRef(''); // invalidated on new GFS run const [waveData, setWaveData] = useState(null); const [windVelocityData, setWindVelocityData] = useState(null); const [currentVelocityData, setCurrentVelocityData] = useState(null); + const [extendedWeatherData, setExtendedWeatherData] = useState(null); const [isLoadingWeather, setIsLoadingWeather] = useState(false); // Viewport state @@ -136,6 +140,11 @@ export default function HomePage() { resolution: getResolutionForZoom(v.zoom), }; + // Clear stale extended data immediately when switching layers + if (activeLayer === 'ice' || activeLayer === 'visibility' || activeLayer === 'sst' || activeLayer === 'swell') { + setExtendedWeatherData(null); + } + setIsLoadingWeather(true); const t0 = performance.now(); debugLog('info', 'API', `Loading ${activeLayer} weather: zoom=${v.zoom}, bbox=[${params.lat_min.toFixed(1)},${params.lat_max.toFixed(1)},${params.lon_min.toFixed(1)},${params.lon_max.toFixed(1)}]`); @@ -148,6 +157,7 @@ export default function HomePage() { const dt = (performance.now() - t0).toFixed(0); debugLog('info', 'API', `Wind loaded in ${dt}ms: grid=${wind?.ny}x${wind?.nx}`); setWindData(wind); + windDataBaseRef.current = wind; // stash for forecast frame reconstruction setWindVelocityData(windVel); } else if (activeLayer === 'waves') { const waves = await apiClient.getWaveField(params); @@ -159,6 +169,26 @@ export default function HomePage() { const dt = (performance.now() - t0).toFixed(0); debugLog('info', 'API', `Currents loaded in ${dt}ms: ${currentVel ? 'yes' : 'no data'}`); setCurrentVelocityData(currentVel); + } else if (activeLayer === 'ice') { + const data = await apiClient.getIceField(params); + const dt = (performance.now() - t0).toFixed(0); + debugLog('info', 'API', `Ice loaded in ${dt}ms: grid=${data?.ny}x${data?.nx}`); + setExtendedWeatherData(data); + } else if (activeLayer === 'visibility') { + const data = await apiClient.getVisibilityField(params); + const dt = (performance.now() - t0).toFixed(0); + debugLog('info', 'API', `Visibility loaded in ${dt}ms: grid=${data?.ny}x${data?.nx}`); + setExtendedWeatherData(data); + } else if (activeLayer === 'sst') { + const data = await apiClient.getSstField(params); + const dt = (performance.now() - t0).toFixed(0); + debugLog('info', 'API', `SST loaded in ${dt}ms: grid=${data?.ny}x${data?.nx}`); + setExtendedWeatherData(data); + } else if (activeLayer === 'swell') { + const data = await apiClient.getSwellField(params); + const dt = (performance.now() - t0).toFixed(0); + debugLog('info', 'API', `Swell loaded in ${dt}ms: grid=${data?.ny}x${data?.nx}`); + setExtendedWeatherData(data); } } catch (error) { debugLog('error', 'API', `Weather load failed: ${error}`); @@ -177,8 +207,53 @@ export default function HomePage() { // Handle wind forecast hour change from timeline const handleForecastHourChange = useCallback((hour: number, data: VelocityData[] | null) => { setForecastHour(hour); - if (data) { + if (data && data.length >= 2) { setWindVelocityData(data); + + // Lazy-cache: build WindFieldData once per (run, hour), reuse on loops + const hdr = data[0].header; + const version = hdr.refTime || ''; + if (version !== windFieldCacheVersionRef.current) { + windFieldCacheRef.current = {}; + windFieldCacheVersionRef.current = version; + } + const key = String(hour); + let field = windFieldCacheRef.current[key]; + if (!field) { + const nx = hdr.nx; + const ny = hdr.ny; + const lats = Array.from({ length: ny }, (_, j) => hdr.la1 - j * hdr.dy); + const lons = Array.from({ length: nx }, (_, i) => hdr.lo1 + i * hdr.dx); + const flatU = data[0].data; + const flatV = data[1].data; + const u2d = Array.from({ length: ny }, (_, j) => { + const off = j * nx; + return Array.from({ length: nx }, (_, i) => flatU[off + i] ?? 0); + }); + const v2d = Array.from({ length: ny }, (_, j) => { + const off = j * nx; + return Array.from({ length: nx }, (_, i) => flatV[off + i] ?? 0); + }); + const base = windDataBaseRef.current; + field = { + parameter: 'wind', + time: version, + bbox: { + lat_min: Math.min(hdr.la1, hdr.la2), + lat_max: Math.max(hdr.la1, hdr.la2), + lon_min: hdr.lo1, + lon_max: hdr.lo2, + }, + resolution: hdr.dx, + nx, ny, lats, lons, + u: u2d, v: v2d, + ocean_mask: base?.ocean_mask, + ocean_mask_lats: base?.ocean_mask_lats, + ocean_mask_lons: base?.ocean_mask_lons, + }; + windFieldCacheRef.current[key] = field; + } + setWindData(field); } else if (hour === 0) { loadWeatherData(); } @@ -230,6 +305,43 @@ export default function HomePage() { setWaveData(synth); }, [loadWeatherData]); + // Handle ice forecast hour change + const handleIceForecastHourChange = useCallback((hour: number, allFrames: IceForecastFrames | null) => { + setForecastHour(hour); + if (!allFrames) { + debugLog('warn', 'ICE', `Hour ${hour}: no frame data available`); + if (hour === 0) loadWeatherData(); + return; + } + const frame = allFrames.frames?.[String(hour)]; + if (!frame || !frame.data) { + debugLog('warn', 'ICE', `Hour ${hour}: frame not found in ${Object.keys(allFrames.frames).length} frames`); + return; + } + debugLog('info', 'ICE', `Frame Day ${hour / 24}: grid=${allFrames.ny}x${allFrames.nx}`); + + setExtendedWeatherData({ + parameter: 'ice_concentration', + time: allFrames.run_time, + bbox: { + lat_min: allFrames.lats[0], + lat_max: allFrames.lats[allFrames.lats.length - 1], + lon_min: allFrames.lons[0], + lon_max: allFrames.lons[allFrames.lons.length - 1], + }, + resolution: allFrames.lats.length > 1 ? Math.abs(allFrames.lats[1] - allFrames.lats[0]) : 1, + nx: allFrames.nx, + ny: allFrames.ny, + lats: allFrames.lats, + lons: allFrames.lons, + data: frame.data, + unit: 'fraction', + ocean_mask: allFrames.ocean_mask, + ocean_mask_lats: allFrames.ocean_mask_lats, + ocean_mask_lons: allFrames.ocean_mask_lons, + }); + }, [loadWeatherData]); + // Handle current forecast hour change const handleCurrentForecastHourChange = useCallback((hour: number, allFrames: any | null) => { setForecastHour(hour); @@ -497,6 +609,10 @@ export default function HomePage() { if (weatherLayer === 'wind') return 'NOAA GFS 0.25\u00B0'; if (weatherLayer === 'waves') return 'CMEMS WAV 1/12\u00B0'; if (weatherLayer === 'currents') return 'CMEMS PHY 1/12\u00B0'; + if (weatherLayer === 'ice') return 'CMEMS ICE'; + if (weatherLayer === 'visibility') return 'NOAA GFS'; + if (weatherLayer === 'sst') return 'CMEMS PHY'; + if (weatherLayer === 'swell') return 'CMEMS WAV'; return undefined; }, [weatherLayer]); @@ -529,9 +645,11 @@ export default function HomePage() { onForecastHourChange={handleForecastHourChange} onWaveForecastHourChange={handleWaveForecastHourChange} onCurrentForecastHourChange={handleCurrentForecastHourChange} + onIceForecastHourChange={handleIceForecastHourChange} onViewportChange={setViewport} viewportBounds={viewport?.bounds ?? null} weatherModelLabel={weatherModelLabel} + extendedWeatherData={extendedWeatherData} > void; /** Callback for current forecast frame changes */ onCurrentForecastHourChange?: (hour: number, frame: CurrentForecastFrames | null) => void; + /** Callback for ice forecast frame changes */ + onIceForecastHourChange?: (hour: number, frame: IceForecastFrames | null) => void; layerType?: LayerType; viewportBounds?: ViewportBounds | null; dataTimestamp?: string | null; } -const FORECAST_HOURS = Array.from({ length: 41 }, (_, i) => i * 3); // 0,3,6,...,120 +// Default forecast hours (used until actual frame data arrives from DB) +const DEFAULT_FORECAST_HOURS = Array.from({ length: 41 }, (_, i) => i * 3); // 0,3,6,...,120 +const DEFAULT_ICE_FORECAST_HOURS = Array.from({ length: 10 }, (_, i) => i * 24); // 0,24,48,...,216 const SPEED_OPTIONS = [1, 2, 4]; const SPEED_INTERVAL: Record = { 1: 2000, 2: 1000, 4: 500 }; +/** Extract sorted numeric hours from frame keys returned by the backend. */ +function deriveHoursFromFrames(frames: Record): number[] { + const hours = Object.keys(frames).map(Number).filter(n => !isNaN(n)).sort((a, b) => a - b); + return hours.length > 0 ? hours : []; +} + export default function ForecastTimeline({ visible, onClose, onForecastHourChange, onWaveForecastHourChange, onCurrentForecastHourChange, + onIceForecastHourChange, layerType = 'wind', viewportBounds, dataTimestamp, @@ -44,16 +55,31 @@ export default function ForecastTimeline({ const isWindMode = layerType === 'wind'; const isWaveMode = layerType === 'waves'; const isCurrentMode = layerType === 'currents'; - const hasForecast = isWindMode || isWaveMode || isCurrentMode; + const isIceMode = layerType === 'ice'; + const hasForecast = isWindMode || isWaveMode || isCurrentMode || isIceMode; const [currentHour, setCurrentHour] = useState(0); const [isPlaying, setIsPlaying] = useState(false); const [speed, setSpeed] = useState(1); const [isLoading, setIsLoading] = useState(false); - const [loadProgress, setLoadProgress] = useState({ cached: 0, total: 41 }); + const [loadProgress, setLoadProgress] = useState({ cached: 0, total: 0 }); const [runTime, setRunTime] = useState(null); const [prefetchComplete, setPrefetchComplete] = useState(false); + // Dynamic hours derived from actual DB/API response (replaces hardcoded constants) + const [availableHours, setAvailableHours] = useState([]); + const availableHoursRef = useRef([]); + useEffect(() => { availableHoursRef.current = availableHours; }, [availableHours]); + + // Reset available hours when layer changes so stale data from a previous layer doesn't persist + useEffect(() => { setAvailableHours([]); setCurrentHour(0); }, [layerType]); + + // Effective hours: use DB-derived if available, else defaults + const defaultHours = isIceMode ? DEFAULT_ICE_FORECAST_HOURS : DEFAULT_FORECAST_HOURS; + const activeHours = availableHours.length > 0 ? availableHours : defaultHours; + const sliderMax = activeHours.length > 0 ? activeHours[activeHours.length - 1] : 0; + const sliderStep = activeHours.length >= 2 ? activeHours[1] - activeHours[0] : (isIceMode ? 24 : 3); + // Wind frames const [windFrames, setWindFrames] = useState>({}); const windFramesRef = useRef>({}); @@ -66,8 +92,14 @@ export default function ForecastTimeline({ const [currentFrameData, setCurrentFrameData] = useState(null); const currentFrameDataRef = useRef(null); - const playIntervalRef = useRef | null>(null); + // Ice frames + const [iceFrameData, setIceFrameData] = useState(null); + const iceFrameDataRef = useRef(null); + + const playRafRef = useRef(null); + const playLastTickRef = useRef(0); const pollIntervalRef = useRef | null>(null); + const sliderRafRef = useRef(null); // Snapshot viewport bounds into a ref — never let it revert to null once set const boundsRef = useRef(viewportBounds ?? null); @@ -80,15 +112,18 @@ export default function ForecastTimeline({ useEffect(() => { windFramesRef.current = windFrames; }, [windFrames]); useEffect(() => { waveFrameDataRef.current = waveFrameData; }, [waveFrameData]); useEffect(() => { currentFrameDataRef.current = currentFrameData; }, [currentFrameData]); + useEffect(() => { iceFrameDataRef.current = iceFrameData; }, [iceFrameData]); // ------------------------------------------------------------------ // Wind forecast: load all frames // ------------------------------------------------------------------ - const loadWindFrames = useCallback(async () => { + const loadWindFrames = useCallback(async (bounds?: ViewportBounds | null) => { try { - const bp = boundsRef.current ?? {}; + // Use the provided bounds (from prefetch) to avoid mismatch if the user panned + const bp = bounds ?? boundsRef.current ?? {}; const data: ForecastFrames = await apiClient.getForecastFrames(bp); setWindFrames(data.frames); + setAvailableHours(deriveHoursFromFrames(data.frames)); setRunTime(`${data.run_date} ${data.run_hour}Z`); setPrefetchComplete(true); setIsLoading(false); @@ -122,6 +157,7 @@ export default function ForecastTimeline({ } } setWaveFrameData(data); + setAvailableHours(deriveHoursFromFrames(data.frames)); const rt = data.run_time; if (rt) { try { @@ -167,7 +203,7 @@ export default function ForecastTimeline({ setRunTime(`${st.run_date} ${st.run_hour}Z`); if (st.complete || st.cached_hours === st.total_hours) { if (pollIntervalRef.current) { clearInterval(pollIntervalRef.current); pollIntervalRef.current = null; } - await loadWindFrames(); + await loadWindFrames(bp); } } catch (e) { console.error('Wind forecast poll failed:', e); } }; @@ -233,6 +269,7 @@ export default function ForecastTimeline({ const frameKeys = Object.keys(data.frames); debugLog('info', 'CURRENT', `Loaded ${frameKeys.length} frames in ${dt}s, grid=${data.ny}x${data.nx}`); setCurrentFrameData(data); + setAvailableHours(deriveHoursFromFrames(data.frames)); if (data.run_time) { try { const d = new Date(data.run_time); @@ -293,52 +330,151 @@ export default function ForecastTimeline({ return () => { cancelled = true; if (pollIntervalRef.current) { clearInterval(pollIntervalRef.current); pollIntervalRef.current = null; } }; }, [visible, isCurrentMode, hasBounds, loadCurrentFrames]); + // ------------------------------------------------------------------ + // Ice forecast: load all frames + // ------------------------------------------------------------------ + const loadIceFrames = useCallback(async () => { + try { + debugLog('info', 'ICE', 'Loading ice forecast frames from API...'); + const t0 = performance.now(); + const bp = boundsRef.current ?? {}; + const data: IceForecastFrames = await apiClient.getIceForecastFrames(bp); + const dt = ((performance.now() - t0) / 1000).toFixed(1); + const frameKeys = Object.keys(data.frames); + debugLog('info', 'ICE', `Loaded ${frameKeys.length} frames in ${dt}s, grid=${data.ny}x${data.nx}`); + setIceFrameData(data); + setAvailableHours(deriveHoursFromFrames(data.frames)); + if (data.run_time) { + try { + const d = new Date(data.run_time); + setRunTime( + `${d.getUTCFullYear()}${String(d.getUTCMonth() + 1).padStart(2, '0')}${String(d.getUTCDate()).padStart(2, '0')} ${String(d.getUTCHours()).padStart(2, '0')}Z` + ); + } catch { + setRunTime(data.run_time); + } + } + setPrefetchComplete(true); + setIsLoading(false); + if (data.frames['0'] && onIceForecastHourChange) { + debugLog('info', 'ICE', 'Setting initial ice frame T+0h'); + onIceForecastHourChange(0, data); + } + } catch (e) { + debugLog('error', 'ICE', `Failed to load ice forecast frames: ${e}`); + setIsLoading(false); + } + }, [onIceForecastHourChange]); + + // ------------------------------------------------------------------ + // Ice prefetch effect + // ------------------------------------------------------------------ + useEffect(() => { + if (!visible || !isIceMode || !boundsRef.current) return; + + let cancelled = false; + const bp = boundsRef.current; + + const start = async () => { + setIsLoading(true); + setPrefetchComplete(false); + debugLog('info', 'ICE', 'Triggering ice forecast prefetch...'); + try { + await apiClient.triggerIceForecastPrefetch(bp); + const poll = async () => { + if (cancelled) return; + try { + const st = await apiClient.getIceForecastStatus(bp); + setLoadProgress({ cached: st.cached_hours, total: st.total_hours }); + if (st.complete || st.cached_hours === st.total_hours) { + if (pollIntervalRef.current) { clearInterval(pollIntervalRef.current); pollIntervalRef.current = null; } + await loadIceFrames(); + } + } catch (e) { debugLog('error', 'ICE', `Ice forecast poll failed: ${e}`); } + }; + await poll(); + pollIntervalRef.current = setInterval(poll, 5000); + } catch (e) { + debugLog('error', 'ICE', `Ice forecast prefetch trigger failed: ${e}`); + setIsLoading(false); + } + }; + + start(); + return () => { cancelled = true; if (pollIntervalRef.current) { clearInterval(pollIntervalRef.current); pollIntervalRef.current = null; } }; + }, [visible, isIceMode, hasBounds, loadIceFrames]); + // ------------------------------------------------------------------ // Play/pause // ------------------------------------------------------------------ + // rAF-based playback — aligned with browser paint cycle so frame + // advances never fire while the previous render is still in progress. useEffect(() => { if (isPlaying && prefetchComplete && hasForecast) { - playIntervalRef.current = setInterval(() => { - setCurrentHour((prev) => { - const idx = FORECAST_HOURS.indexOf(prev); - const nextIdx = (idx + 1) % FORECAST_HOURS.length; - const nextHour = FORECAST_HOURS[nextIdx]; - - if (isWindMode) { - const fd = windFramesRef.current[String(nextHour)] || null; - onForecastHourChange(nextHour, fd); - } else if (isWaveMode && onWaveForecastHourChange && waveFrameDataRef.current) { - onWaveForecastHourChange(nextHour, waveFrameDataRef.current); - } else if (isCurrentMode && onCurrentForecastHourChange && currentFrameDataRef.current) { - onCurrentForecastHourChange(nextHour, currentFrameDataRef.current); - } - return nextHour; - }); - }, SPEED_INTERVAL[speed]); + const interval = SPEED_INTERVAL[speed]; + playLastTickRef.current = performance.now(); + + const tick = () => { + const now = performance.now(); + if (now - playLastTickRef.current >= interval) { + playLastTickRef.current = now; + setCurrentHour((prev) => { + const fallback = isIceMode ? DEFAULT_ICE_FORECAST_HOURS : DEFAULT_FORECAST_HOURS; + const hrs = availableHoursRef.current.length > 0 ? availableHoursRef.current : fallback; + const idx = hrs.indexOf(prev); + const nextIdx = idx >= 0 ? (idx + 1) % hrs.length : 0; + const nextHour = hrs[nextIdx]; + + if (isWindMode) { + onForecastHourChange(nextHour, windFramesRef.current[String(nextHour)] || null); + } else if (isWaveMode && onWaveForecastHourChange && waveFrameDataRef.current) { + onWaveForecastHourChange(nextHour, waveFrameDataRef.current); + } else if (isCurrentMode && onCurrentForecastHourChange && currentFrameDataRef.current) { + onCurrentForecastHourChange(nextHour, currentFrameDataRef.current); + } else if (isIceMode && onIceForecastHourChange && iceFrameDataRef.current) { + onIceForecastHourChange(nextHour, iceFrameDataRef.current); + } + return nextHour; + }); + } + playRafRef.current = requestAnimationFrame(tick); + }; + + playRafRef.current = requestAnimationFrame(tick); } - return () => { if (playIntervalRef.current) { clearInterval(playIntervalRef.current); playIntervalRef.current = null; } }; - }, [isPlaying, speed, prefetchComplete, hasForecast, isWindMode, isWaveMode, isCurrentMode, onForecastHourChange, onWaveForecastHourChange, onCurrentForecastHourChange]); + return () => { if (playRafRef.current !== null) { cancelAnimationFrame(playRafRef.current); playRafRef.current = null; } }; + }, [isPlaying, speed, prefetchComplete, hasForecast, isWindMode, isWaveMode, isCurrentMode, isIceMode, onForecastHourChange, onWaveForecastHourChange, onCurrentForecastHourChange, onIceForecastHourChange]); - // Slider change + // Slider change — rAF-throttled so rapid scrubbing only renders once + // per browser frame. The slider UI updates immediately (setCurrentHour) + // while heavy layer updates are deferred to the next paint. const handleSliderChange = (e: React.ChangeEvent) => { const hour = parseInt(e.target.value); setCurrentHour(hour); - if (isWindMode) { - onForecastHourChange(hour, windFrames[String(hour)] || null); - } else if (isWaveMode && onWaveForecastHourChange && waveFrameData) { - onWaveForecastHourChange(hour, waveFrameData); - } else if (isCurrentMode && onCurrentForecastHourChange && currentFrameData) { - onCurrentForecastHourChange(hour, currentFrameData); - } + if (sliderRafRef.current !== null) cancelAnimationFrame(sliderRafRef.current); + sliderRafRef.current = requestAnimationFrame(() => { + sliderRafRef.current = null; + if (isWindMode) { + onForecastHourChange(hour, windFrames[String(hour)] || null); + } else if (isWaveMode && onWaveForecastHourChange && waveFrameData) { + onWaveForecastHourChange(hour, waveFrameData); + } else if (isCurrentMode && onCurrentForecastHourChange && currentFrameData) { + onCurrentForecastHourChange(hour, currentFrameData); + } else if (isIceMode && onIceForecastHourChange && iceFrameData) { + onIceForecastHourChange(hour, iceFrameData); + } + }); }; // Close const handleClose = () => { setIsPlaying(false); + if (sliderRafRef.current !== null) { cancelAnimationFrame(sliderRafRef.current); sliderRafRef.current = null; } setCurrentHour(0); onForecastHourChange(0, null); if (onWaveForecastHourChange) onWaveForecastHourChange(0, null); if (onCurrentForecastHourChange) onCurrentForecastHourChange(0, null); + if (onIceForecastHourChange) onIceForecastHourChange(0, null); onClose(); }; @@ -395,7 +531,7 @@ export default function ForecastTimeline({ )}
- Forecast timeline available for Wind and Waves layers + Forecast timeline available for Wind, Waves, Currents and Ice layers