diff --git a/CHANGELOG.md b/CHANGELOG.md
index 518c419b..2d410020 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [0.8.6] - 2026-03-31
+
+### Fixed
+- **ClickHouse traces/metrics data-availability always empty**: `queryTraces` and `queryMetrics` passed raw `0` for epoch dates as `DateTime64(3)` parameter, which ClickHouse can't parse; now uses the same `toDateTime64()` clamp used by log queries
+- **Stale session after volume reset**: dashboard only checked `localStorage` for a token without validating it against the backend; now calls `/auth/me` on load and auto-logs out if the session is invalid
+
## [0.8.5] - 2026-03-28
### Security
diff --git a/README.md b/README.md
index e4eca827..f40badb7 100644
--- a/README.md
+++ b/README.md
@@ -16,14 +16,14 @@
-
+
-> **🚀 RELEASE 0.8.5:** LogTide now supports **Multi-Engine Storage** (ClickHouse, MongoDB) and **Advanced Browser Observability**.
+> **🚀 RELEASE 0.8.6:** LogTide now supports **Multi-Engine Storage** (ClickHouse, MongoDB) and **Advanced Browser Observability**.
---
@@ -46,7 +46,7 @@ Designed for teams that need **GDPR compliance**, **full data ownership**, and *
### Logs Explorer

-### Performance & Metrics (New in 0.8.5)
+### Performance & Metrics (New in 0.8.6)

### Distributed Tracing
@@ -124,7 +124,7 @@ We host it for you. Perfect for testing. [**Sign up at logtide.dev**](https://lo
---
-## ✨ Core Features (v0.8.5)
+## ✨ Core Features (v0.8.6)
* 🚀 **Multi-Engine Reservoir:** Pluggable storage layer supporting **TimescaleDB**, **ClickHouse**, and **MongoDB**.
* 🌐 **Browser SDK Enhancements:** Automatic collection of **Web Vitals** (LCP, INP, CLS), user session tracking, and click/network breadcrumbs.
diff --git a/package.json b/package.json
index 16fa18f8..8a0dc534 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "logtide",
- "version": "0.8.5",
+ "version": "0.8.6",
"private": true,
"description": "LogTide - Self-hosted log management platform",
"author": "LogTide Team",
diff --git a/packages/backend/package.json b/packages/backend/package.json
index 9168c32b..bc623e21 100644
--- a/packages/backend/package.json
+++ b/packages/backend/package.json
@@ -1,6 +1,6 @@
{
"name": "@logtide/backend",
- "version": "0.8.5",
+ "version": "0.8.6",
"private": true,
"description": "LogTide Backend API",
"type": "module",
diff --git a/packages/backend/src/utils/internal-logger.ts b/packages/backend/src/utils/internal-logger.ts
index 54e9526f..8c173f2b 100644
--- a/packages/backend/src/utils/internal-logger.ts
+++ b/packages/backend/src/utils/internal-logger.ts
@@ -58,7 +58,7 @@ export async function initializeInternalLogging(): Promise {
dsn,
service: process.env.SERVICE_NAME || 'logtide-backend',
environment: process.env.NODE_ENV || 'development',
- release: process.env.npm_package_version || '0.8.5',
+ release: process.env.npm_package_version || '0.8.6',
batchSize: 5, // Smaller batch for internal logs to see them faster
flushInterval: 5000,
maxBufferSize: 1000,
diff --git a/packages/frontend/package.json b/packages/frontend/package.json
index c80cfdce..a69ca69f 100644
--- a/packages/frontend/package.json
+++ b/packages/frontend/package.json
@@ -1,6 +1,6 @@
{
"name": "@logtide/frontend",
- "version": "0.8.5",
+ "version": "0.8.6",
"private": true,
"description": "LogTide Frontend Dashboard",
"type": "module",
diff --git a/packages/frontend/src/hooks.client.ts b/packages/frontend/src/hooks.client.ts
index 116199dc..01722311 100644
--- a/packages/frontend/src/hooks.client.ts
+++ b/packages/frontend/src/hooks.client.ts
@@ -9,7 +9,7 @@ if (dsn) {
dsn,
service: 'logtide-frontend-client',
environment: env.PUBLIC_NODE_ENV || 'production',
- release: env.PUBLIC_APP_VERSION || '0.8.5',
+ release: env.PUBLIC_APP_VERSION || '0.8.6',
debug: env.PUBLIC_NODE_ENV === 'development',
browser: {
// Core Web Vitals (LCP, INP, CLS, TTFB)
diff --git a/packages/frontend/src/hooks.server.ts b/packages/frontend/src/hooks.server.ts
index 28cdad4a..34830219 100644
--- a/packages/frontend/src/hooks.server.ts
+++ b/packages/frontend/src/hooks.server.ts
@@ -82,7 +82,7 @@ export const handle = dsn
dsn,
service: 'logtide-frontend',
environment: privateEnv?.NODE_ENV || 'production',
- release: process.env.npm_package_version || '0.8.5', }) as unknown as Handle,
+ release: process.env.npm_package_version || '0.8.6', }) as unknown as Handle,
requestLogHandle,
configHandle
)
diff --git a/packages/frontend/src/lib/components/Footer.svelte b/packages/frontend/src/lib/components/Footer.svelte
index 5b6d9607..e55e0611 100644
--- a/packages/frontend/src/lib/components/Footer.svelte
+++ b/packages/frontend/src/lib/components/Footer.svelte
@@ -1,7 +1,7 @@
diff --git a/packages/frontend/src/lib/components/RequireOrganization.svelte b/packages/frontend/src/lib/components/RequireOrganization.svelte
index 9d086f8f..7f9d3b7b 100644
--- a/packages/frontend/src/lib/components/RequireOrganization.svelte
+++ b/packages/frontend/src/lib/components/RequireOrganization.svelte
@@ -57,7 +57,17 @@
}
} else {
// Standard mode: check for token
- if (!$authStore.user) {
+ if (!$authStore.token) {
+ goto('/login');
+ return;
+ }
+
+ // Validate token against backend (catches stale localStorage tokens)
+ try {
+ const { user } = await authAPI.getMe($authStore.token);
+ authStore.updateUser(user);
+ } catch {
+ authStore.clearAuth();
goto('/login');
return;
}
diff --git a/packages/reservoir/package.json b/packages/reservoir/package.json
index dc36e5a9..01eddede 100644
--- a/packages/reservoir/package.json
+++ b/packages/reservoir/package.json
@@ -1,6 +1,6 @@
{
"name": "@logtide/reservoir",
- "version": "0.8.5",
+ "version": "0.8.6",
"description": "Pluggable storage abstraction for Logtide log management",
"type": "module",
"main": "./dist/index.js",
diff --git a/packages/reservoir/src/engines/clickhouse/clickhouse-engine.ts b/packages/reservoir/src/engines/clickhouse/clickhouse-engine.ts
index d5e4c404..55354347 100644
--- a/packages/reservoir/src/engines/clickhouse/clickhouse-engine.ts
+++ b/packages/reservoir/src/engines/clickhouse/clickhouse-engine.ts
@@ -57,7 +57,7 @@ import type {
MetricsOverviewParams,
MetricsOverviewResult,
} from '../../core/types.js';
-import { ClickHouseQueryTranslator } from './query-translator.js';
+import { ClickHouseQueryTranslator, toDateTime64 } from './query-translator.js';
export interface ClickHouseEngineOptions {
/** Use an existing ClickHouse client instead of creating a new one */
@@ -841,9 +841,9 @@ export class ClickHouseEngine extends StorageEngine {
// Time range
conditions.push(`time ${params.fromExclusive ? '>' : '>='} {p_from:DateTime64(3)}`);
- queryParams.p_from = Math.floor(params.from.getTime() / 1000);
+ queryParams.p_from = toDateTime64(params.from);
conditions.push(`time ${params.toExclusive ? '<' : '<='} {p_to:DateTime64(3)}`);
- queryParams.p_to = Math.floor(params.to.getTime() / 1000);
+ queryParams.p_to = toDateTime64(params.to);
if (params.projectId) {
const pids = Array.isArray(params.projectId) ? params.projectId : [params.projectId];
@@ -920,9 +920,9 @@ export class ClickHouseEngine extends StorageEngine {
const queryParams: Record = {};
conditions.push(`start_time >= {p_from:DateTime64(3)}`);
- queryParams.p_from = Math.floor(params.from.getTime() / 1000);
+ queryParams.p_from = toDateTime64(params.from);
conditions.push(`start_time <= {p_to:DateTime64(3)}`);
- queryParams.p_to = Math.floor(params.to.getTime() / 1000);
+ queryParams.p_to = toDateTime64(params.to);
if (params.projectId) {
const pids = Array.isArray(params.projectId) ? params.projectId : [params.projectId];
@@ -996,11 +996,11 @@ export class ClickHouseEngine extends StorageEngine {
if (from) {
timeFilter += ` AND child.start_time >= {p_from:DateTime64(3)}`;
- queryParams.p_from = Math.floor(from.getTime() / 1000);
+ queryParams.p_from = toDateTime64(from);
}
if (to) {
timeFilter += ` AND child.start_time <= {p_to:DateTime64(3)}`;
- queryParams.p_to = Math.floor(to.getTime() / 1000);
+ queryParams.p_to = toDateTime64(to);
}
const resultSet = await client.query({
@@ -1058,8 +1058,8 @@ export class ClickHouseEngine extends StorageEngine {
];
const queryParams: Record = {
p_pids: pids,
- p_from: Math.floor(params.from.getTime() / 1000),
- p_to: Math.floor(params.to.getTime() / 1000),
+ p_from: toDateTime64(params.from),
+ p_to: toDateTime64(params.to),
};
if (params.serviceName) {
@@ -1161,9 +1161,9 @@ export class ClickHouseEngine extends StorageEngine {
// Time range
conditions.push(`time ${params.fromExclusive ? '>' : '>='} {p_from:DateTime64(3)}`);
- queryParams.p_from = Math.floor(params.from.getTime() / 1000);
+ queryParams.p_from = toDateTime64(params.from);
conditions.push(`time ${params.toExclusive ? '<' : '<='} {p_to:DateTime64(3)}`);
- queryParams.p_to = Math.floor(params.to.getTime() / 1000);
+ queryParams.p_to = toDateTime64(params.to);
if (params.organizationId) {
const oids = Array.isArray(params.organizationId) ? params.organizationId : [params.organizationId];
@@ -1305,9 +1305,9 @@ export class ClickHouseEngine extends StorageEngine {
queryParams.p_pids = pids;
conditions.push(`time >= {p_from:DateTime64(3)}`);
- queryParams.p_from = Math.floor(params.from.getTime() / 1000);
+ queryParams.p_from = toDateTime64(params.from);
conditions.push(`time <= {p_to:DateTime64(3)}`);
- queryParams.p_to = Math.floor(params.to.getTime() / 1000);
+ queryParams.p_to = toDateTime64(params.to);
conditions.push(`metric_name = {p_name:String}`);
queryParams.p_name = params.metricName;
@@ -1442,8 +1442,8 @@ export class ClickHouseEngine extends StorageEngine {
const queryParams: Record = {
p_pids: projectIds,
- p_from: Math.floor(params.from.getTime() / 1000),
- p_to: Math.floor(params.to.getTime() / 1000),
+ p_from: toDateTime64(params.from),
+ p_to: toDateTime64(params.to),
p_name: params.metricName,
};
@@ -1513,11 +1513,11 @@ export class ClickHouseEngine extends StorageEngine {
}
if (params.from) {
conditions.push(`time >= {p_from:DateTime64(3)}`);
- queryParams.p_from = Math.floor(params.from.getTime() / 1000);
+ queryParams.p_from = toDateTime64(params.from);
}
if (params.to) {
conditions.push(`time <= {p_to:DateTime64(3)}`);
- queryParams.p_to = Math.floor(params.to.getTime() / 1000);
+ queryParams.p_to = toDateTime64(params.to);
}
const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
@@ -1560,11 +1560,11 @@ export class ClickHouseEngine extends StorageEngine {
}
if (params.from) {
conditions.push(`time >= {p_from:DateTime64(3)}`);
- queryParams.p_from = Math.floor(params.from.getTime() / 1000);
+ queryParams.p_from = toDateTime64(params.from);
}
if (params.to) {
conditions.push(`time <= {p_to:DateTime64(3)}`);
- queryParams.p_to = Math.floor(params.to.getTime() / 1000);
+ queryParams.p_to = toDateTime64(params.to);
}
const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
@@ -1606,11 +1606,11 @@ export class ClickHouseEngine extends StorageEngine {
}
if (params.from) {
conditions.push(`time >= {p_from:DateTime64(3)}`);
- queryParams.p_from = Math.floor(params.from.getTime() / 1000);
+ queryParams.p_from = toDateTime64(params.from);
}
if (params.to) {
conditions.push(`time <= {p_to:DateTime64(3)}`);
- queryParams.p_to = Math.floor(params.to.getTime() / 1000);
+ queryParams.p_to = toDateTime64(params.to);
}
const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
@@ -1640,8 +1640,8 @@ export class ClickHouseEngine extends StorageEngine {
];
const queryParams: Record = {
p_pids: pids,
- p_from: Math.floor(params.from.getTime() / 1000),
- p_to: Math.floor(params.to.getTime() / 1000),
+ p_from: toDateTime64(params.from),
+ p_to: toDateTime64(params.to),
};
if (params.metricName) {
@@ -1671,8 +1671,8 @@ export class ClickHouseEngine extends StorageEngine {
query: `ALTER TABLE metric_exemplars DELETE WHERE ${exemplarConditions.join(' AND ')}`,
query_params: {
p_pids: pids,
- p_from: Math.floor(params.from.getTime() / 1000),
- p_to: Math.floor(params.to.getTime() / 1000),
+ p_from: toDateTime64(params.from),
+ p_to: toDateTime64(params.to),
},
});
@@ -1685,8 +1685,8 @@ export class ClickHouseEngine extends StorageEngine {
const projectIds = Array.isArray(params.projectId) ? params.projectId : [params.projectId];
const queryParams: Record = {
p_pids: projectIds,
- p_from: Math.floor(params.from.getTime() / 1000),
- p_to: Math.floor(params.to.getTime() / 1000),
+ p_from: toDateTime64(params.from),
+ p_to: toDateTime64(params.to),
};
let serviceFilter = '';
diff --git a/packages/reservoir/src/engines/clickhouse/query-translator.ts b/packages/reservoir/src/engines/clickhouse/query-translator.ts
index 43172ea3..3dc6531d 100644
--- a/packages/reservoir/src/engines/clickhouse/query-translator.ts
+++ b/packages/reservoir/src/engines/clickhouse/query-translator.ts
@@ -10,7 +10,7 @@ import type {
} from '../../core/types.js';
/** ClickHouse can't parse 0 as DateTime64(3) — clamp to 1ms after epoch */
-function toDateTime64(date: Date): number {
+export function toDateTime64(date: Date): number {
return Math.max(date.getTime() / 1000, 0.001);
}
diff --git a/packages/shared/package.json b/packages/shared/package.json
index 1a575e94..e6f9523f 100644
--- a/packages/shared/package.json
+++ b/packages/shared/package.json
@@ -1,6 +1,6 @@
{
"name": "@logtide/shared",
- "version": "0.8.5",
+ "version": "0.8.6",
"private": true,
"description": "Shared types, schemas and utilities for LogTide",
"type": "module",