diff --git a/migrations/postgres/v7.sql b/migrations/postgres/v7.sql
new file mode 100644
index 0000000..a215724
--- /dev/null
+++ b/migrations/postgres/v7.sql
@@ -0,0 +1,10 @@
+CREATE TABLE metrics_request_rollup_minute (
+ minute TIMESTAMPTZ NOT NULL,
+ path TEXT NOT NULL,
+ request_count INTEGER NOT NULL DEFAULT 0,
+ error_count INTEGER NOT NULL DEFAULT 0,
+ total_latency_ms INTEGER NOT NULL DEFAULT 0,
+ min_latency_ms INTEGER,
+ max_latency_ms INTEGER,
+ PRIMARY KEY (minute, path)
+);
diff --git a/migrations/sqlite/v7.sql b/migrations/sqlite/v7.sql
new file mode 100644
index 0000000..97105bd
--- /dev/null
+++ b/migrations/sqlite/v7.sql
@@ -0,0 +1,10 @@
+CREATE TABLE metrics_request_rollup_minute (
+ minute TEXT NOT NULL,
+ path TEXT NOT NULL,
+ request_count INTEGER NOT NULL DEFAULT 0,
+ error_count INTEGER NOT NULL DEFAULT 0,
+ total_latency_ms INTEGER NOT NULL DEFAULT 0,
+ min_latency_ms INTEGER,
+ max_latency_ms INTEGER,
+ PRIMARY KEY (minute, path)
+);
diff --git a/src/catalog/caching_manager.rs b/src/catalog/caching_manager.rs
index d832994..2a474b5 100644
--- a/src/catalog/caching_manager.rs
+++ b/src/catalog/caching_manager.rs
@@ -1304,4 +1304,15 @@ impl CatalogManager for CachingCatalogManager {
self.cached_read(&key, || self.inner().list_dataset_table_names(&schema))
.await
}
+
+ async fn record_request_rollup_minute(
+ &self,
+ minute: &str,
+ path: &str,
+ bucket: &crate::metrics::RollupBucket,
+ ) -> Result<()> {
+ self.inner()
+ .record_request_rollup_minute(minute, path, bucket)
+ .await
+ }
}
diff --git a/src/catalog/manager.rs b/src/catalog/manager.rs
index 3ef88ed..71b1291 100644
--- a/src/catalog/manager.rs
+++ b/src/catalog/manager.rs
@@ -872,4 +872,17 @@ pub trait CatalogManager: Debug + Send + Sync {
/// Delete a dataset by ID. Returns the deleted dataset if it existed.
async fn delete_dataset(&self, id: &str) -> Result