diff --git a/android/app/src/main/AndroidManifest.xml b/android/app/src/main/AndroidManifest.xml
index 72c9895..00ef991 100644
--- a/android/app/src/main/AndroidManifest.xml
+++ b/android/app/src/main/AndroidManifest.xml
@@ -8,6 +8,7 @@
+
-
+ android:name="io.flutter.embedding.android.NormalTheme"
+ android:resource="@style/NormalTheme" />
-
+
-
-
+
\ No newline at end of file
diff --git a/lib/models/api_models.dart b/lib/models/api_models.dart
new file mode 100644
index 0000000..d36a229
--- /dev/null
+++ b/lib/models/api_models.dart
@@ -0,0 +1,72 @@
+// Small data models added so that frontend doesn't need to work directly with raw JSON maps.
+// These models represent the backend REST response shapes in typed Dart objects.
+
+class Category {
+ final int id;
+ final String name;
+
+ const Category({
+ required this.id,
+ required this.name,
+ });
+
+ factory Category.fromJson(Map json) {
+ return Category(
+ id: json['id'] as int,
+ // Defensive parsing to avoid crashing if backend gives null or odd types.
+ name: (json['name'] ?? '').toString(),
+ );
+ }
+
+ @override
+ String toString() => name;
+}
+
+class Conversation {
+ final int id;
+ final String name;
+ final String summary;
+ final int? categoryId;
+ final DateTime? timestamp;
+
+ const Conversation({
+ required this.id,
+ required this.name,
+ required this.summary,
+ required this.categoryId,
+ required this.timestamp,
+ });
+
+ factory Conversation.fromJson(Map json) {
+ return Conversation(
+ id: json['id'] as int,
+ name: (json['name'] ?? '').toString(),
+ summary: (json['summary'] ?? '').toString(),
+ categoryId: json['category_id'] as int?,
+ // Defensive parsing for nullable timestamp so malformed or missing values don't crash the app or side panel UI
+ timestamp: json['timestamp'] != null
+ ? DateTime.tryParse(json['timestamp'].toString())
+ : null,
+ );
+ }
+}
+
+class ConversationVector {
+ final int id;
+ final String text;
+ final int conversationId;
+
+ const ConversationVector({
+ required this.id,
+ required this.text,
+ required this.conversationId,
+ });
+
+ factory ConversationVector.fromJson(Map json) {
+ return ConversationVector(
+ id: json['id'] as int,
+ text: (json['text'] ?? '').toString(),
+ conversationId: json['conversation_id'] as int,
+ );
+ }
+}
diff --git a/lib/screens/landing_screen.dart b/lib/screens/landing_screen.dart
index db042dd..06e326b 100644
--- a/lib/screens/landing_screen.dart
+++ b/lib/screens/landing_screen.dart
@@ -1,43 +1,59 @@
+import 'dart:async';
+
import 'package:even_realities_g1/even_realities_g1.dart';
import 'package:flutter/material.dart';
-import 'package:front/services/lc3_decoder.dart';
import 'package:front/services/audio_pipeline.dart';
-import '../widgets/g1_connection.dart';
-import '../services/websocket_service.dart';
+import 'package:front/services/lc3_decoder.dart';
+
+import '../models/api_models.dart';
+import '../services/calendar_service.dart';
import '../services/phone_audio_service.dart';
+import '../services/rest_api_service.dart';
+import '../services/websocket_service.dart';
+import '../widgets/g1_connection.dart';
+import '../widgets/side_panel.dart';
import 'login_screen.dart';
import 'register_screen.dart';
-import '../services/calendar_service.dart';
-import 'dart:async';
/// Landing screen of the app. Manages BLE glasses connection,
/// audio streaming, and live transcription display.
/// Also manages display of the landing page and navigation to login/register screens.
-
class LandingScreen extends StatefulWidget {
- /// All dependencies are optional — defaults are created in initState
- /// so they can be injected as mocks in tests.
final G1Manager? manager;
final WebsocketService? ws;
final Lc3Decoder? decoder;
final AudioPipeline? audioPipeline;
- const LandingScreen(
- {this.manager, this.decoder, this.ws, this.audioPipeline, super.key});
+ final RestApiService? api;
+
+ const LandingScreen({
+ this.manager,
+ this.decoder,
+ this.ws,
+ this.audioPipeline,
+ this.api,
+ super.key,
+ });
@override
State createState() => _LandingScreenState();
}
class _LandingScreenState extends State {
+ final GlobalKey _scaffoldKey = GlobalKey();
+
late final G1Manager _manager;
late final Lc3Decoder _decoder;
late final WebsocketService _ws;
late final AudioPipeline _audioPipeline;
+ late final RestApiService _api;
late final CalendarService _calendarService;
- late final PhoneAudioService _phoneAudio;
+
+ PhoneAudioService? _phoneAudio;
+ bool _phoneAudioInitialized = false;
bool _usePhoneMic = false;
bool _isMuted = false;
+
final ValueNotifier _isRecording = ValueNotifier(false);
final ValueNotifier _isRecordingBusy = ValueNotifier(false);
@@ -48,11 +64,12 @@ class _LandingScreenState extends State {
void initState() {
super.initState();
- // Use injected dependencies or create real ones
_manager = widget.manager ?? G1Manager();
_decoder = widget.decoder ?? Lc3Decoder();
_ws = widget.ws ?? WebsocketService();
+ _api = widget.api ?? const RestApiService();
_calendarService = CalendarService();
+
_audioPipeline = widget.audioPipeline ??
AudioPipeline(
_manager,
@@ -62,13 +79,7 @@ class _LandingScreenState extends State {
},
);
- // Connect to backend WebSocket
_ws.connect();
-
- _phoneAudio = PhoneAudioService();
- _phoneAudio.init();
-
- // React to committed (final) text only — interim is too noisy for glasses
_ws.aiResponse.addListener(_onAiResponse);
}
@@ -78,15 +89,53 @@ class _LandingScreenState extends State {
_isRecording.dispose();
_isRecordingBusy.dispose();
_audioPipeline.dispose();
- _phoneAudio.dispose();
+ if (_phoneAudio != null) {
+ unawaited(_phoneAudio!.dispose());
+ }
_ws.dispose();
_manager.dispose();
super.dispose();
}
+ Future _ensurePhoneAudioReady() async {
+ _phoneAudio ??= PhoneAudioService();
+ if (!_phoneAudioInitialized) {
+ await _phoneAudio!.init();
+ _phoneAudioInitialized = true;
+ }
+ }
+
+ Future _sendCalendarContextIfAvailable() async {
+ debugPrint('CALENDAR: trying to fetch calendar context');
+
+ final granted = await _calendarService.requestPermission();
+ debugPrint('CALENDAR: permission granted = $granted');
+
+ if (!granted) {
+ debugPrint('CALENDAR: permission denied, skipping calendar context');
+ return;
+ }
+
+ final events = await _calendarService.getUpcomingEvents();
+ debugPrint('CALENDAR: events found = ${events.length}');
+
+ final activeEvent = _calendarService.selectActiveContext(events);
+ debugPrint('CALENDAR: selected event = ${activeEvent?.title ?? "none"}');
+
+ final payload = _calendarService.buildCalendarPayload(activeEvent);
+ debugPrint('CALENDAR: payload = $payload');
+
+ _ws.sendCalendarContext(payload);
+ }
+
+ void _handleCategorySelected(Category? category) {
+ final categoryId = category?.id;
+ debugPrint('CATEGORY: selected category id = $categoryId');
+ _ws.sendSelectedCategory(categoryId);
+ }
+
void _onAiResponse() {
final aiResponse = _ws.aiResponse.value;
-
debugPrint(aiResponse);
if (!_isMuted) {
@@ -99,57 +148,51 @@ class _LandingScreenState extends State {
}
}
- /// Adds a sentence to the on-screen queue.
- ///
- /// Each sentence is a separate BLE packet (lineNumber 1..N).
- /// When the list is full, the oldest sentence is evicted to make room.
- /// Sentences are also automatically removed after a fixed timeout
- /// (currently 10 seconds), so older lines can clear even without new ones.
void _addSentenceToDisplay(String sentence) {
if (_isMuted) return;
+ if (sentence.trim().isEmpty) return;
+
if (_displayedSentences.length >= _maxDisplayedSentences) {
_displayedSentences.removeAt(0);
}
-
_displayedSentences.add(sentence);
- _manager.transcription.displayLines(
- List.unmodifiable(_displayedSentences),
- );
+ _manager.transcription.displayLines(List.unmodifiable(_displayedSentences));
Future.delayed(const Duration(seconds: 10), () {
if (!mounted || _isMuted) return;
_displayedSentences.remove(sentence);
- _manager.transcription.displayLines(
- List.unmodifiable(_displayedSentences),
- );
+ _manager.transcription
+ .displayLines(List.unmodifiable(_displayedSentences));
});
}
void _clearDisplayQueue() {
_displayedSentences.clear();
+ if (_manager.isConnected && _manager.transcription.isActive.value) {
+ _manager.transcription.displayLines(const []);
+ }
}
- /// Begin a transcription session
Future _startTranscription() async {
if (_isRecordingBusy.value) return;
if (!_usePhoneMic && !_manager.isConnected) return;
+
_isRecordingBusy.value = true;
try {
+ await _sendCalendarContextIfAvailable();
+
if (_manager.isConnected) {
- //glasses implementation
- await _manager.transcription.stop(); // pakota clean stop ensin
+ await _manager.transcription.stop();
await Future.delayed(const Duration(milliseconds: 300));
- _ws.clearCommittedText(); // reset accumulated text — backend starts fresh too
+ _ws.clearCommittedText();
_clearDisplayQueue();
-
await _ws.startAudioStream();
await _manager.transcription.start();
if (_usePhoneMic) {
- await _phoneAudio.start((pcm) {
- if (_ws.connected.value) {
- _ws.sendAudio(pcm);
- }
+ await _ensurePhoneAudioReady();
+ await _phoneAudio!.start((pcm) {
+ if (_ws.connected.value) _ws.sendAudio(pcm);
});
} else {
await _manager.microphone.enable();
@@ -157,27 +200,26 @@ class _LandingScreenState extends State {
}
await _manager.transcription.displayText('Recording started.');
- debugPrint("Transcription (re)started");
+ debugPrint('Transcription (re)started');
} else {
- //wo glasses
- _ws.clearCommittedText(); // reset accumulated text — backend starts fresh too
+ _ws.clearCommittedText();
_clearDisplayQueue();
await _ws.startAudioStream();
- await _phoneAudio.start(
- (pcm) {
- if (_ws.connected.value) _ws.sendAudio(pcm);
- },
- );
+ await _ensurePhoneAudioReady();
+ await _phoneAudio!.start((pcm) {
+ if (_ws.connected.value) _ws.sendAudio(pcm);
+ });
}
+
_isRecording.value = true;
} finally {
_isRecordingBusy.value = false;
}
}
- /// End a transcription session
Future _stopTranscription() async {
if (_isRecordingBusy.value) return;
+
_isRecordingBusy.value = true;
_isRecording.value = false;
try {
@@ -185,18 +227,19 @@ class _LandingScreenState extends State {
_clearDisplayQueue();
await _manager.transcription.displayText('Recording stopped.');
await Future.delayed(const Duration(seconds: 2));
+
if (_usePhoneMic) {
- await _phoneAudio.stop();
+ await _phoneAudio?.stop();
} else {
await _manager.microphone.disable();
await _audioPipeline.stop();
}
- // lisätty jotta paketit kerkiävät lähteä ennen sulkemista
+
await Future.delayed(const Duration(milliseconds: 200));
await _ws.stopAudioStream();
await _manager.transcription.stop();
} else {
- await _phoneAudio.stop();
+ await _phoneAudio?.stop();
await _ws.stopAudioStream();
}
} finally {
@@ -204,31 +247,34 @@ class _LandingScreenState extends State {
}
}
+ void _openDrawer() => _scaffoldKey.currentState?.openDrawer();
+
@override
Widget build(BuildContext context) {
return Scaffold(
+ key: _scaffoldKey,
backgroundColor: Colors.white,
+ drawer: SidePanel(
+ api: _api,
+ onCategorySelected: _handleCategorySelected,
+ ),
body: SafeArea(
child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 18, vertical: 10),
child: Column(
children: [
- // ===== YLÄBANNERI =====
Row(
children: [
- // Vasen
SizedBox(
width: 96,
child: Align(
alignment: Alignment.centerLeft,
child: IconButton(
- onPressed: () {},
+ onPressed: _openDrawer,
icon: const Icon(Icons.menu, color: Color(0xFF00239D)),
),
),
),
-
- // Logo keskelle
Expanded(
child: Center(
child: Image.asset(
@@ -238,8 +284,6 @@ class _LandingScreenState extends State {
),
),
),
-
- // Oikea
SizedBox(
width: 96,
child: Row(
@@ -270,388 +314,381 @@ class _LandingScreenState extends State {
),
],
),
-
Expanded(
- child: Column(
- mainAxisAlignment: MainAxisAlignment.center,
- children: [
- Image.asset(
- 'assets/images/g1-smart-glasses.webp',
- height: 120,
- fit: BoxFit.contain,
- ),
-
- const SizedBox(height: 6),
-
- const Text(
- 'Even realities G1 smart glasses',
- style: TextStyle(
- fontSize: 12,
- fontWeight: FontWeight.bold,
- ),
- ),
-
- const SizedBox(height: 34),
-
- // ===== CONNECT
- Row(
- children: [
- // Connect / Disconnect
- Expanded(
- child: GlassesConnection(
- manager: _manager,
- onRecordToggle: () async {
- if (!_manager.transcription.isActive.value) {
- final granted =
- await _calendarService.requestPermission();
- if (granted) {
- final events = await _calendarService
- .getUpcomingEvents();
- final activeEvent = _calendarService
- .selectActiveContext(events);
- if (activeEvent != null) {
- final payload = _calendarService
- .buildCalendarPayload(activeEvent);
- _ws.sendCalendarContext(payload);
- }
- }
- await _startTranscription();
- } else {
- await _stopTranscription();
- }
- },
- ),
- ),
-
- const SizedBox(width: 14),
-
- // Mic toggle
- Expanded(
- child: ListenableBuilder(
- listenable: Listenable.merge(
- [_isRecording, _isRecordingBusy]),
- builder: (context, _) {
- final isLocked =
- _isRecording.value || _isRecordingBusy.value;
-
- final borderColor = isLocked
- ? Colors.black26
- : (_usePhoneMic
- ? Colors.lightGreen
- : Colors.black12);
- final backgroundColor = isLocked
- ? Colors.black.withAlpha((0.04 * 255).round())
- : (_usePhoneMic
- ? Colors.lightGreen
- .withAlpha((0.15 * 255).round())
- : Colors.transparent);
-
- final textColor = isLocked
- ? Colors.black38
- : (_usePhoneMic
- ? Colors.lightGreen
- : Colors.black);
-
- return Opacity(
- opacity: isLocked ? 0.55 : 1,
- child: InkWell(
- onTap: isLocked
- ? null
- : () {
- setState(() {
- _usePhoneMic = !_usePhoneMic;
- });
- },
- child: Container(
- height: 72,
- padding: const EdgeInsets.symmetric(
- horizontal: 14),
- decoration: BoxDecoration(
- color: backgroundColor,
- border: Border.all(color: borderColor),
- borderRadius: BorderRadius.circular(8),
+ child: LayoutBuilder(
+ builder: (context, constraints) {
+ return SingleChildScrollView(
+ keyboardDismissBehavior:
+ ScrollViewKeyboardDismissBehavior.onDrag,
+ child: ConstrainedBox(
+ constraints:
+ BoxConstraints(minHeight: constraints.maxHeight),
+ child: Column(
+ mainAxisAlignment: MainAxisAlignment.center,
+ children: [
+ Image.asset(
+ 'assets/images/g1-smart-glasses.webp',
+ height: 120,
+ fit: BoxFit.contain,
+ ),
+ const SizedBox(height: 6),
+ const Text(
+ 'Even realities G1 smart glasses',
+ style: TextStyle(
+ fontSize: 12,
+ fontWeight: FontWeight.bold,
+ ),
+ ),
+ const SizedBox(height: 34),
+ Row(
+ children: [
+ Expanded(
+ child: GlassesConnection(
+ manager: _manager,
+ onRecordToggle: () async {
+ if (!_manager
+ .transcription.isActive.value) {
+ await _startTranscription();
+ } else {
+ await _stopTranscription();
+ }
+ },
+ ),
+ ),
+ const SizedBox(width: 14),
+ Expanded(
+ child: ListenableBuilder(
+ listenable: Listenable.merge(
+ [_isRecording, _isRecordingBusy],
),
- child: Row(
- mainAxisAlignment:
- MainAxisAlignment.center,
- children: [
- _usePhoneMic
- ? Icon(
- Icons.phone_android,
- size: 22,
- color: isLocked
- ? Colors.black38
- : Colors.lightGreen,
- )
- : Image.asset(
- 'assets/images/g1-smart-glasses.webp',
- height: 22,
- fit: BoxFit.contain,
- color: isLocked
- ? Colors.black38
- : null,
- colorBlendMode: isLocked
- ? BlendMode.srcIn
- : null,
- ),
- const SizedBox(width: 10),
- Expanded(
- child: Text(
- _usePhoneMic
- ? 'Switch to glasses mic'
- : 'Switch to phone mic',
- textAlign: TextAlign.center,
- style: TextStyle(
- fontSize: 13,
- fontWeight: _usePhoneMic
- ? FontWeight.bold
- : FontWeight.normal,
- color: textColor,
+ builder: (context, _) {
+ final isLocked = _isRecording.value ||
+ _isRecordingBusy.value;
+ final borderColor = isLocked
+ ? Colors.black26
+ : (_usePhoneMic
+ ? Colors.lightGreen
+ : Colors.black12);
+ final backgroundColor = isLocked
+ ? Colors.black
+ .withAlpha((0.04 * 255).round())
+ : (_usePhoneMic
+ ? Colors.lightGreen.withAlpha(
+ (0.15 * 255).round())
+ : Colors.transparent);
+ final textColor = isLocked
+ ? Colors.black38
+ : (_usePhoneMic
+ ? Colors.lightGreen
+ : Colors.black);
+
+ return Opacity(
+ opacity: isLocked ? 0.55 : 1,
+ child: InkWell(
+ onTap: isLocked
+ ? null
+ : () => setState(
+ () => _usePhoneMic =
+ !_usePhoneMic,
+ ),
+ child: Container(
+ height: 72,
+ padding: const EdgeInsets.symmetric(
+ horizontal: 14,
+ ),
+ decoration: BoxDecoration(
+ color: backgroundColor,
+ border: Border.all(
+ color: borderColor),
+ borderRadius:
+ BorderRadius.circular(8),
+ ),
+ child: Row(
+ mainAxisAlignment:
+ MainAxisAlignment.center,
+ children: [
+ _usePhoneMic
+ ? Icon(
+ Icons.phone_android,
+ size: 22,
+ color: isLocked
+ ? Colors.black38
+ : Colors.lightGreen,
+ )
+ : Image.asset(
+ 'assets/images/g1-smart-glasses.webp',
+ height: 22,
+ fit: BoxFit.contain,
+ color: isLocked
+ ? Colors.black38
+ : null,
+ colorBlendMode: isLocked
+ ? BlendMode.srcIn
+ : null,
+ ),
+ const SizedBox(width: 10),
+ Expanded(
+ child: Text(
+ _usePhoneMic
+ ? 'Switch to glasses mic'
+ : 'Switch to phone mic',
+ textAlign: TextAlign.center,
+ style: TextStyle(
+ fontSize: 13,
+ fontWeight: _usePhoneMic
+ ? FontWeight.bold
+ : FontWeight.normal,
+ color: textColor,
+ ),
+ ),
+ ),
+ ],
),
),
),
- ],
- ),
+ );
+ },
),
),
- );
- },
- ),
- ),
- ],
- ),
-
- const SizedBox(height: 14),
-
- Row(
- children: [
- // Start / Stop recording
- Expanded(
- child: StreamBuilder(
- stream: _manager.connectionState,
- initialData: G1ConnectionEvent(
- state: _manager.isConnected
- ? G1ConnectionState.connected
- : G1ConnectionState.disconnected,
+ ],
),
- builder: (context, snapshot) {
- final isGlassesConnected = snapshot.data?.state ==
- G1ConnectionState.connected;
-
- return ListenableBuilder(
- listenable: Listenable.merge(
- [_isRecording, _isRecordingBusy]),
- builder: (context, _) {
- final isRecording = _isRecording.value;
- final isBusy = _isRecordingBusy.value;
-
- final canStart = _usePhoneMic ||
- isGlassesConnected == true;
-
- final isDisabled =
- isBusy || (!isRecording && !canStart);
-
- final borderColor = isDisabled
- ? Colors.black26
- : (isRecording
- ? Colors.red
- : Colors.black12);
- final backgroundColor = isDisabled
- ? Colors.black
- .withAlpha((0.04 * 255).round())
- : (isRecording
- ? Colors.red
- .withAlpha((0.15 * 255).round())
- : Colors.transparent);
-
- final foregroundColor = isDisabled
- ? Colors.black38
- : (isRecording
- ? Colors.red
- : Colors.grey[800]);
-
- return Opacity(
- opacity: isDisabled ? 0.55 : 1,
- child: InkWell(
- onTap: isDisabled
- ? null
- : () async {
- if (!isRecording) {
- await _startTranscription();
- } else {
- await _stopTranscription();
- }
- },
- child: Container(
- height: 72,
- padding: const EdgeInsets.symmetric(
- horizontal: 14),
- decoration: BoxDecoration(
- color: backgroundColor,
- border:
- Border.all(color: borderColor),
- borderRadius:
- BorderRadius.circular(8),
+ const SizedBox(height: 14),
+ Row(
+ children: [
+ Expanded(
+ child: StreamBuilder(
+ stream: _manager.connectionState,
+ initialData: G1ConnectionEvent(
+ state: _manager.isConnected
+ ? G1ConnectionState.connected
+ : G1ConnectionState.disconnected,
+ ),
+ builder: (context, snapshot) {
+ final isGlassesConnected =
+ snapshot.data?.state ==
+ G1ConnectionState.connected;
+
+ return ListenableBuilder(
+ listenable: Listenable.merge(
+ [_isRecording, _isRecordingBusy],
),
- child: Row(
- mainAxisAlignment:
- MainAxisAlignment.center,
- children: [
- Icon(
- isRecording
- ? Icons.stop_circle_outlined
- : Icons.fiber_manual_record,
- size: 22,
- color: foregroundColor,
- ),
- const SizedBox(width: 10),
- Expanded(
- child: Text(
- isRecording
- ? 'Stop\nRecording'
- : 'Start\nRecording',
- textAlign: TextAlign.center,
- style: TextStyle(
- fontSize: 13,
- fontWeight: FontWeight.bold,
- color: foregroundColor,
+ builder: (context, _) {
+ final isRecording =
+ _isRecording.value;
+ final isBusy = _isRecordingBusy.value;
+ final canStart = _usePhoneMic ||
+ isGlassesConnected;
+ final isDisabled = isBusy ||
+ (!isRecording && !canStart);
+ final borderColor = isDisabled
+ ? Colors.black26
+ : (isRecording
+ ? Colors.red
+ : Colors.black12);
+ final backgroundColor = isDisabled
+ ? Colors.black.withAlpha(
+ (0.04 * 255).round())
+ : (isRecording
+ ? Colors.red.withAlpha(
+ (0.15 * 255).round(),
+ )
+ : Colors.transparent);
+ final foregroundColor = isDisabled
+ ? Colors.black38
+ : (isRecording
+ ? Colors.red
+ : Colors.grey[800]);
+
+ return Opacity(
+ opacity: isDisabled ? 0.55 : 1,
+ child: InkWell(
+ onTap: isDisabled
+ ? null
+ : () async {
+ if (!isRecording) {
+ await _startTranscription();
+ } else {
+ await _stopTranscription();
+ }
+ },
+ child: Container(
+ height: 72,
+ padding:
+ const EdgeInsets.symmetric(
+ horizontal: 14,
+ ),
+ decoration: BoxDecoration(
+ color: backgroundColor,
+ border: Border.all(
+ color: borderColor),
+ borderRadius:
+ BorderRadius.circular(8),
+ ),
+ child: Row(
+ mainAxisAlignment:
+ MainAxisAlignment.center,
+ children: [
+ Icon(
+ isRecording
+ ? Icons
+ .stop_circle_outlined
+ : Icons
+ .fiber_manual_record,
+ size: 22,
+ color: foregroundColor,
+ ),
+ const SizedBox(width: 10),
+ Expanded(
+ child: Text(
+ isRecording
+ ? 'Stop\nRecording'
+ : 'Start\nRecording',
+ textAlign:
+ TextAlign.center,
+ style: TextStyle(
+ fontSize: 13,
+ fontWeight:
+ FontWeight.bold,
+ color:
+ foregroundColor,
+ ),
+ ),
+ ),
+ ],
),
),
),
- ],
- ),
- ),
- ),
- );
- },
- );
- },
- ),
- ),
-
- const SizedBox(width: 14),
-
- // Mute button
- Expanded(
- child: InkWell(
- onTap: () {
- setState(() {
- _isMuted = !_isMuted;
- });
- },
- child: Container(
- height: 72,
- padding:
- const EdgeInsets.symmetric(horizontal: 14),
- decoration: BoxDecoration(
- color: _isMuted
- ? Colors.orange
- .withAlpha((0.15 * 255).round())
- : Colors.transparent,
- border: Border.all(
- color:
- _isMuted ? Colors.orange : Colors.black12,
- ),
- borderRadius: BorderRadius.circular(8),
- ),
- child: Row(
- mainAxisAlignment: MainAxisAlignment.center,
- children: [
- Icon(
- _isMuted
- ? Icons.comments_disabled_outlined
- : Icons.comment_outlined,
- size: 22,
- color: _isMuted
- ? Colors.orange
- : Colors.grey[700],
+ );
+ },
+ );
+ },
),
- const SizedBox(width: 10),
- Expanded(
- child: Text(
- _isMuted
- ? 'Unmute display'
- : 'Mute display',
- textAlign: TextAlign.center,
- style: TextStyle(
- fontSize: 14,
- fontWeight: _isMuted
- ? FontWeight.bold
- : FontWeight.normal,
+ ),
+ const SizedBox(width: 14),
+ Expanded(
+ child: InkWell(
+ onTap: () =>
+ setState(() => _isMuted = !_isMuted),
+ child: Container(
+ height: 72,
+ padding: const EdgeInsets.symmetric(
+ horizontal: 14,
+ ),
+ decoration: BoxDecoration(
color: _isMuted
? Colors.orange
- : Colors.grey[800],
+ .withAlpha((0.15 * 255).round())
+ : Colors.transparent,
+ border: Border.all(
+ color: _isMuted
+ ? Colors.orange
+ : Colors.black12,
+ ),
+ borderRadius: BorderRadius.circular(8),
+ ),
+ child: Row(
+ mainAxisAlignment:
+ MainAxisAlignment.center,
+ children: [
+ Icon(
+ _isMuted
+ ? Icons
+ .comments_disabled_outlined
+ : Icons.comment_outlined,
+ size: 22,
+ color: _isMuted
+ ? Colors.orange
+ : Colors.grey[700],
+ ),
+ const SizedBox(width: 10),
+ Expanded(
+ child: Text(
+ _isMuted
+ ? 'Unmute display'
+ : 'Mute display',
+ textAlign: TextAlign.center,
+ style: TextStyle(
+ fontSize: 14,
+ fontWeight: _isMuted
+ ? FontWeight.bold
+ : FontWeight.normal,
+ color: _isMuted
+ ? Colors.orange
+ : Colors.grey[800],
+ ),
+ ),
+ ),
+ ],
),
),
),
- ],
+ ),
+ ],
+ ),
+ const SizedBox(height: 22),
+ ValueListenableBuilder(
+ valueListenable: _ws.aiResponse,
+ builder: (context, aiResponse, _) {
+ if (aiResponse.isEmpty) {
+ return const SizedBox.shrink();
+ }
+ return Container(
+ width: double.infinity,
+ padding: const EdgeInsets.symmetric(
+ horizontal: 14,
+ vertical: 10,
+ ),
+ decoration: BoxDecoration(
+ border: Border.all(color: Colors.black12),
+ borderRadius: BorderRadius.circular(8),
+ ),
+ child: Text(
+ aiResponse,
+ style: const TextStyle(fontSize: 14),
+ ),
+ );
+ },
+ ),
+ const SizedBox(height: 8),
+ Center(
+ child: Container(
+ padding: const EdgeInsets.symmetric(
+ horizontal: 14,
+ vertical: 5,
+ ),
+ decoration: BoxDecoration(
+ border: Border.all(color: Colors.black12),
+ borderRadius: BorderRadius.circular(8),
+ ),
+ child: const Row(
+ mainAxisSize: MainAxisSize.min,
+ children: [
+ Icon(Icons.battery_full, size: 18),
+ SizedBox(width: 8),
+ Text('G1 smart glasses'),
+ ],
+ ),
),
),
- ),
- ),
- ],
- ),
-
- const SizedBox(height: 22),
-
- ValueListenableBuilder(
- valueListenable: _ws.aiResponse,
- builder: (context, aiResponse, _) {
- if (aiResponse.isEmpty) return const SizedBox.shrink();
- return Container(
- width: double.infinity,
- padding: const EdgeInsets.symmetric(
- horizontal: 14, vertical: 10),
- decoration: BoxDecoration(
- border: Border.all(color: Colors.black12),
- borderRadius: BorderRadius.circular(8),
- ),
- child: Text(
- aiResponse,
- style: const TextStyle(fontSize: 14),
- ),
- );
- },
- ),
-
- const SizedBox(height: 8),
- Center(
- child: Container(
- padding: const EdgeInsets.symmetric(
- horizontal: 14, vertical: 5),
- decoration: BoxDecoration(
- border: Border.all(color: Colors.black12),
- borderRadius: BorderRadius.circular(8),
- ),
- child: const Row(
- mainAxisSize: MainAxisSize.min,
- children: [
- Icon(Icons.battery_full, size: 18),
- SizedBox(width: 8),
- Text('G1 smart glasses'),
],
),
),
- ),
- ],
+ );
+ },
),
),
-
- // ===== LOGIN / REGISTER =====
Padding(
padding: const EdgeInsets.only(bottom: 8),
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
TextButton(
- onPressed: () {
- Navigator.push(
- context,
- MaterialPageRoute(
- builder: (_) => const LoginScreen(),
- ),
- );
- },
+ onPressed: () => Navigator.push(
+ context,
+ MaterialPageRoute(
+ builder: (_) => const LoginScreen(),
+ ),
+ ),
child: const Text(
'Sign in',
style: TextStyle(
@@ -662,14 +699,12 @@ class _LandingScreenState extends State {
),
const Text('|'),
TextButton(
- onPressed: () {
- Navigator.push(
- context,
- MaterialPageRoute(
- builder: (_) => const RegisterScreen(),
- ),
- );
- },
+ onPressed: () => Navigator.push(
+ context,
+ MaterialPageRoute(
+ builder: (_) => const RegisterScreen(),
+ ),
+ ),
child: const Text(
'Register',
style: TextStyle(
@@ -688,51 +723,3 @@ class _LandingScreenState extends State {
);
}
}
-
-class LandingTile extends StatelessWidget {
- final IconData icon;
- final String label;
- final VoidCallback onTap;
- final bool enabled;
-
- const LandingTile({
- super.key,
- required this.icon,
- required this.label,
- required this.onTap,
- this.enabled = true,
- });
-
- @override
- Widget build(BuildContext context) {
- return InkWell(
- onTap: onTap,
- child: Container(
- height: 72,
- padding: const EdgeInsets.symmetric(horizontal: 14),
- decoration: BoxDecoration(
- border: Border.all(color: Colors.black12),
- borderRadius: BorderRadius.circular(8),
- ),
- child: Row(
- children: [
- Icon(
- icon,
- size: 22,
- color: Colors.grey[700],
- ),
- const SizedBox(width: 10),
- Expanded(
- child: Text(
- label,
- style: TextStyle(
- fontSize: 14,
- color: Colors.grey[800],
- ),
- )),
- ],
- ),
- ),
- );
- }
-}
diff --git a/lib/services/calendar_service.dart b/lib/services/calendar_service.dart
index 2eabc15..b420a60 100644
--- a/lib/services/calendar_service.dart
+++ b/lib/services/calendar_service.dart
@@ -1,39 +1,50 @@
import 'package:device_calendar/device_calendar.dart';
+import 'package:flutter/foundation.dart';
class CalendarService {
final DeviceCalendarPlugin _calendarPlugin = DeviceCalendarPlugin();
- //Requests calendar permission
+ /// Requests calendar permission.
Future requestPermission() async {
- var permissionsGranted = await _calendarPlugin.requestPermissions();
- if (permissionsGranted.isSuccess && permissionsGranted.data == true) {
- return true;
- // Permission granted, you can now access the calendar
- } else {
- return false;
- // Permission denied, handle accordingly
- }
+ debugPrint('CALENDAR SERVICE: requesting permission');
+ final permissionsGranted = await _calendarPlugin.requestPermissions();
+
+ final granted =
+ permissionsGranted.isSuccess && permissionsGranted.data == true;
+
+ debugPrint('CALENDAR SERVICE: permission granted = $granted');
+ return granted;
}
- //Searches for upcoming events in the next 7 days
+ /// Searches for upcoming events in the next 7 days.
Future> getUpcomingEvents() async {
- var calendarResult = await _calendarPlugin.retrieveCalendars();
+ debugPrint('CALENDAR SERVICE: retrieving calendars');
+
+ final calendarResult = await _calendarPlugin.retrieveCalendars();
if (calendarResult.isSuccess && calendarResult.data != null) {
- List calendars = calendarResult.data!;
- List events = [];
+ final List calendars = calendarResult.data!;
+ final List events = [];
+
+ final DateTime startDate = DateTime.now();
+ final DateTime endDate = startDate.add(const Duration(days: 7));
- DateTime startDate = DateTime.now();
- DateTime endDate = startDate.add(const Duration(days: 7));
+ debugPrint(
+ 'CALENDAR SERVICE: searching events from $startDate to $endDate',
+ );
- for (var calendar in calendars) {
- var eventResult = await _calendarPlugin.retrieveEvents(
+ for (final calendar in calendars) {
+ debugPrint(
+ 'CALENDAR SERVICE: checking calendar ${calendar.name} (${calendar.id})',
+ );
+
+ final eventResult = await _calendarPlugin.retrieveEvents(
calendar.id!,
RetrieveEventsParams(startDate: startDate, endDate: endDate),
);
if (eventResult.isSuccess && eventResult.data != null) {
- List calendarEvents = eventResult.data!;
- for (var event in calendarEvents) {
+ final List calendarEvents = eventResult.data!;
+ for (final event in calendarEvents) {
if (event.start != null && event.end != null) {
events.add(
CalendarEventModel(
@@ -47,57 +58,66 @@ class CalendarService {
}
}
}
+
events.sort((a, b) => a.start.compareTo(b.start));
+ debugPrint('CALENDAR SERVICE: found ${events.length} upcoming events');
return events;
}
+
+ debugPrint(
+ 'CALENDAR SERVICE: no calendars or failed to retrieve calendars');
return [];
}
- //Selects the active or upcoming event
+ /// Selects the active or upcoming event.
CalendarEventModel? selectActiveContext(List events) {
- DateTime now = DateTime.now();
- //Event is happening now
- for (var event in events) {
+ final DateTime now = DateTime.now();
+
+ for (final event in events) {
if (event.start.isBefore(now) && event.end.isAfter(now)) {
+ debugPrint('CALENDAR SERVICE: active event found = ${event.title}');
return event;
}
}
- //Upcoming event
- for (var event in events) {
+
+ for (final event in events) {
if (event.start.isAfter(now)) {
+ debugPrint('CALENDAR SERVICE: upcoming event found = ${event.title}');
return event;
}
}
- //No active or upcoming events
+
+ debugPrint('CALENDAR SERVICE: no active or upcoming event found');
return null;
}
- //Builds the payload to send to backend
+ /// Builds the payload to send to backend.
Map buildCalendarPayload(CalendarEventModel? event) {
if (event == null) {
return {
- "type": "calendar_context",
- "data": {
- "title": "General conversation",
- "description": null,
- "start": null,
- "end": null
- }
+ 'type': 'calendar_context',
+ 'data': {
+ 'title': 'General conversation',
+ 'description': null,
+ 'start': null,
+ 'end': null,
+ },
};
}
+
return {
- "type": "calendar_context",
- "data": {
- "title": event.title,
- "description": event.description,
- "start": event.start.toIso8601String(),
- "end": event.end.toIso8601String()
- }
+ 'type': 'calendar_context',
+ 'data': {
+ 'title': event.title,
+ 'description': event.description,
+ 'start': event.start.toIso8601String(),
+ 'end': event.end.toIso8601String(),
+ },
};
}
}
-// Model to represent calendar events in a simplified way for our application
+/// Model to represent calendar events in a simplified way for our application.
class CalendarEventModel {
final String title;
final String? description;
@@ -110,4 +130,9 @@ class CalendarEventModel {
required this.start,
required this.end,
});
+
+ @override
+ String toString() {
+ return 'CalendarEventModel(title: $title, start: $start, end: $end)';
+ }
}
diff --git a/lib/services/phone_audio_service.dart b/lib/services/phone_audio_service.dart
index 675eda1..5b03a1d 100644
--- a/lib/services/phone_audio_service.dart
+++ b/lib/services/phone_audio_service.dart
@@ -1,26 +1,38 @@
+import 'dart:async';
+import 'dart:typed_data';
+
import 'package:flutter_sound/flutter_sound.dart';
import 'package:permission_handler/permission_handler.dart';
-import 'dart:typed_data';
-import 'dart:async';
class PhoneAudioService {
final FlutterSoundRecorder _recorder = FlutterSoundRecorder();
- final StreamController _controller = StreamController();
+ final StreamController _controller =
+ StreamController.broadcast();
+
+ StreamSubscription? _controllerSubscription;
bool _initialized = false;
+ bool _initializing = false;
+
+ Function(Uint8List)? _onPcm;
Future init() async {
- await Permission.microphone.request();
- await _recorder.openRecorder();
+ if (_initialized || _initializing) return;
- _controller.stream.listen((buffer) {
- _onPcm?.call(buffer);
- });
+ _initializing = true;
+ try {
+ await Permission.microphone.request();
+ await _recorder.openRecorder();
- _initialized = true;
- }
+ _controllerSubscription ??= _controller.stream.listen((buffer) {
+ _onPcm?.call(buffer);
+ });
- Function(Uint8List)? _onPcm;
+ _initialized = true;
+ } finally {
+ _initializing = false;
+ }
+ }
Future start(Function(Uint8List pcm) onPcm) async {
if (!_initialized) {
@@ -29,6 +41,10 @@ class PhoneAudioService {
_onPcm = onPcm;
+ if (_recorder.isRecording) {
+ return;
+ }
+
await _recorder.startRecorder(
codec: Codec.pcm16,
sampleRate: 16000,
@@ -44,6 +60,8 @@ class PhoneAudioService {
}
Future dispose() async {
+ await stop();
+ await _controllerSubscription?.cancel();
await _controller.close();
await _recorder.closeRecorder();
}
diff --git a/lib/services/rest_api_service.dart b/lib/services/rest_api_service.dart
new file mode 100644
index 0000000..69e5d77
--- /dev/null
+++ b/lib/services/rest_api_service.dart
@@ -0,0 +1,122 @@
+import 'dart:convert';
+
+import 'package:http/http.dart' as http;
+
+import '../models/api_models.dart';
+
+// Service was added to keep all REST API logic in one place.
+// The UI should ask this service for data instead of building HTTP requests itself.
+class RestApiService {
+ final String baseUrl;
+
+ const RestApiService({
+ // Uses same API_URL idea as the websocket setup.
+ // Makes local device/emulator testing configurable.
+ this.baseUrl = const String.fromEnvironment(
+ 'API_URL',
+ defaultValue: '127.0.0.1:8000',
+ ),
+ });
+
+ Uri _uri(
+ String path, {
+ Map? queryParameters,
+ }) {
+ return Uri.parse('http://$baseUrl$path').replace(
+ queryParameters: queryParameters,
+ );
+ }
+
+// Added for the side panel category chip list.
+ Future> getCategories() async {
+ final res = await http.get(_uri('/get/categories'));
+ _checkStatus(res, 'GET /get/categories');
+
+ return (jsonDecode(res.body) as List)
+ .map((e) => Category.fromJson(e as Map))
+ .toList();
+ }
+
+// Added so that the side panel can create new categories from the inline form.
+ Future createCategory(String name) async {
+ final trimmed = name.trim();
+ if (trimmed.isEmpty) {
+ throw const ApiException(
+ statusCode: 0,
+ message: 'Name cannot be empty',
+ );
+ }
+
+ final res = await http.post(
+ _uri('/create/category', queryParameters: {'name': trimmed}),
+ );
+ _checkStatus(res, 'POST /create/category');
+
+ return Category.fromJson(jsonDecode(res.body) as Map);
+ }
+
+// Added for the conversations section.
+// Optional categoryId is used when filtering by selected category chip.
+ Future> getConversations({int? categoryId}) async {
+ final params = categoryId != null ? {'cat_id': '$categoryId'} : null;
+
+ final res = await http.get(
+ _uri('/get/conversations', queryParameters: params),
+ );
+ _checkStatus(res, 'GET /get/conversations');
+
+ return (jsonDecode(res.body) as List)
+ .map((e) => Conversation.fromJson(e as Map))
+ .toList();
+ }
+
+// Added for the transcript/segments sections that appears when a conversation is selected in the side panel.
+ Future> getVectors(int conversationId) async {
+ final res = await http.get(
+ _uri('/get/vectors', queryParameters: {'conv_id': '$conversationId'}),
+ );
+ _checkStatus(res, 'GET /get/vectors?conv_id=$conversationId');
+
+ return (jsonDecode(res.body) as List)
+ .map((e) => ConversationVector.fromJson(e as Map))
+ .toList();
+ }
+
+ void _checkStatus(http.Response res, String label) {
+ if (res.statusCode >= 200 && res.statusCode < 300) return;
+
+//Added to surface useful backend error details in the UI instead of just showing generic failed request.
+ String message = 'HTTP ${res.statusCode}';
+ try {
+ final decoded = jsonDecode(res.body);
+ if (decoded is Map) {
+ message =
+ (decoded['detail'] ?? decoded['message'] ?? message).toString();
+ } else if (decoded is String && decoded.isNotEmpty) {
+ message = decoded;
+ }
+ } catch (_) {
+ if (res.body.trim().isNotEmpty) {
+ message = res.body.trim();
+ }
+ }
+
+ throw ApiException(
+ statusCode: res.statusCode,
+ message: '[$label] $message',
+ );
+ }
+}
+
+class ApiException implements Exception {
+ final int statusCode;
+ final String message;
+
+ const ApiException({
+ required this.statusCode,
+ required this.message,
+ });
+
+ @override
+ String toString() => 'ApiException($statusCode): $message';
+}
diff --git a/lib/services/websocket_service.dart b/lib/services/websocket_service.dart
index f4555d2..67a49ee 100644
--- a/lib/services/websocket_service.dart
+++ b/lib/services/websocket_service.dart
@@ -1,32 +1,19 @@
import 'dart:async';
import 'dart:convert';
+
import 'package:flutter/foundation.dart';
import 'package:web_socket_channel/web_socket_channel.dart';
-/// Handles all communication with the backend over a WebSocket.
-///
-/// Responsibilities:
-/// - Connect/disconnect to `ws://:/ws/`
-/// - Send raw PCM audio bytes for speech-to-text
-/// - Send control commands (start/stop audio stream)
-/// - Receive and expose transcription results (committed + interim text)
-///
-/// Message protocol (JSON):
-/// Incoming:
-/// { "type": "control", "cmd": "ready" | "asr_started" | "asr_stopped" }
-/// { "type": "transcript", "data": { "status": "partial"|"final", "text": "..." } }
-/// { "type": "error", ... }
-/// Outgoing:
-/// { "type": "control", "cmd": "start" | "stop" }
-/// Raw PCM bytes (binary frame)
class WebsocketService {
final String baseUrl;
static const String defaultBaseUrl = '127.0.0.1:8000';
WebsocketService({String? baseUrl})
: baseUrl = baseUrl ??
- const String.fromEnvironment('API_URL',
- defaultValue: defaultBaseUrl) {
+ const String.fromEnvironment(
+ 'API_URL',
+ defaultValue: defaultBaseUrl,
+ ) {
if (baseUrl == null && const String.fromEnvironment('API_URL').isEmpty) {
debugPrint(
'WARNING: API_URL is not set; using default baseUrl=$defaultBaseUrl. '
@@ -38,13 +25,9 @@ class WebsocketService {
WebSocketChannel? _audioChannel;
final connected = ValueNotifier(false);
-
final committedText = ValueNotifier('');
final interimText = ValueNotifier('');
final aiResponse = ValueNotifier('');
-
- /// Whether the backend's ASR (speech recognition) engine is active.
- /// Can be used for UI indicator
final asrActive = ValueNotifier(false);
void clearCommittedText() {
@@ -55,7 +38,7 @@ class WebsocketService {
if (connected.value) return;
final Uri uri;
try {
- if (baseUrl.contains(":443")) {
+ if (baseUrl.contains(':443')) {
uri = Uri.parse('wss://$baseUrl/ws/');
} else {
uri = Uri.parse('ws://$baseUrl/ws/');
@@ -81,33 +64,36 @@ class WebsocketService {
debugPrint("→ Final/committed updated: ${committedText.value}");
}
} else if (type == 'control') {
- // Server signals readiness or ASR state changes
if (data['cmd'] == 'ready') {
connected.value = true;
} else if (data['cmd'] == 'asr_started') {
asrActive.value = true;
} else if (data['cmd'] == 'asr_stopped') {
asrActive.value = false;
+ } else if (data['cmd'] == 'calendar_context_received') {
+ debugPrint('WS: calendar context received by backend');
+ } else if (data['cmd'] == 'selected_category_received') {
+ debugPrint('WS: selected category received by backend');
}
} else if (type == 'ai') {
- String response = data['data'];
+ final String response = data['data'];
aiResponse.value = response;
debugPrint(response);
} else if (type == 'error') {
- //todo
+ debugPrint('WS ERROR: ${data['message']}');
}
},
onError: (_) => disconnect(),
onDone: () => disconnect(),
);
- } catch (e) {
+ } catch (_) {
await disconnect();
}
}
Future disconnect() async {
final channel = _audioChannel;
- _audioChannel = null; // Asetetaan heti nulliksi
+ _audioChannel = null;
connected.value = false;
try {
channel?.sink.add(jsonEncode({'type': 'control', 'cmd': 'stop'}));
@@ -120,10 +106,10 @@ class WebsocketService {
committedText.value = '';
interimText.value = '';
aiResponse.value = '';
+ asrActive.value = false;
}
}
- /// Send raw PCM audio bytes to the backend for transcription.
void sendAudio(Uint8List pcmData) {
if (connected.value) {
_audioChannel?.sink.add(pcmData);
@@ -136,14 +122,21 @@ class WebsocketService {
}
}
- /// Tell the backend to stop expecting audio data.
+ void sendSelectedCategory(int? categoryId) {
+ if (connected.value) {
+ _audioChannel?.sink.add(jsonEncode({
+ 'type': 'selected_category',
+ 'category_id': categoryId,
+ }));
+ }
+ }
+
Future stopAudioStream() async {
if (connected.value) {
_audioChannel?.sink.add(jsonEncode({'type': 'control', 'cmd': 'stop'}));
}
}
- /// Tell the backend to start expecting audio data.
Future startAudioStream() async {
if (connected.value) {
_audioChannel?.sink.add(jsonEncode({'type': 'control', 'cmd': 'start'}));
@@ -155,9 +148,6 @@ class WebsocketService {
}
void dispose() {
- // `disconnect()` is async and may touch ValueNotifiers after awaiting.
- // During dispose we must not schedule work that can run after notifiers
- // are disposed.
final channel = _audioChannel;
_audioChannel = null;
connected.value = false;
@@ -165,7 +155,7 @@ class WebsocketService {
channel?.sink.add(jsonEncode({'type': 'control', 'cmd': 'stop'}));
channel?.sink.close();
} catch (_) {
- // ignore: connection may already be closed
+ //
} finally {
committedText.value = '';
interimText.value = '';
diff --git a/lib/widgets/side_panel.dart b/lib/widgets/side_panel.dart
new file mode 100644
index 0000000..eb21857
--- /dev/null
+++ b/lib/widgets/side_panel.dart
@@ -0,0 +1,748 @@
+import 'package:flutter/material.dart';
+
+import '../models/api_models.dart';
+import '../services/rest_api_service.dart';
+
+class SidePanel extends StatefulWidget {
+ const SidePanel({
+ super.key,
+ required this.api,
+ required this.onCategorySelected,
+ });
+
+ final RestApiService api;
+ final ValueChanged onCategorySelected;
+
+ @override
+ State createState() => _SidePanelState();
+}
+
+class _SidePanelState extends State {
+ List _categories = [];
+ List _conversations = [];
+ List _vectors = [];
+
+ Category? _selectedCategory;
+ Conversation? _selectedConversation;
+
+ bool _loadingCategories = false;
+ bool _loadingConversations = false;
+ bool _loadingVectors = false;
+
+ String? _categoryError;
+ String? _conversationError;
+ String? _vectorError;
+
+ bool _showNewCategoryField = false;
+ final _newCatController = TextEditingController();
+ final _newCatFocusNode = FocusNode();
+ bool _creatingCategory = false;
+ String? _createCategoryError;
+
+ @override
+ void initState() {
+ super.initState();
+ _loadCategories();
+ _loadConversations();
+ }
+
+ @override
+ void dispose() {
+ _newCatController.dispose();
+ _newCatFocusNode.dispose();
+ super.dispose();
+ }
+
+ Future _loadCategories() async {
+ setState(() {
+ _loadingCategories = true;
+ _categoryError = null;
+ });
+
+ try {
+ final cats = await widget.api.getCategories();
+ if (!mounted) return;
+ setState(() => _categories = cats);
+ } catch (e) {
+ if (!mounted) return;
+ setState(() => _categoryError = 'Could not load categories — $e');
+ } finally {
+ if (mounted) {
+ setState(() => _loadingCategories = false);
+ }
+ }
+ }
+
+ Future _loadConversations({int? categoryId}) async {
+ setState(() {
+ _loadingConversations = true;
+ _conversationError = null;
+ _selectedConversation = null;
+ _vectors = [];
+ _vectorError = null;
+ });
+
+ try {
+ final convs = await widget.api.getConversations(categoryId: categoryId);
+ if (!mounted) return;
+ setState(() => _conversations = convs);
+ } catch (e) {
+ if (!mounted) return;
+ setState(() => _conversationError = 'Could not load conversations — $e');
+ } finally {
+ if (mounted) {
+ setState(() => _loadingConversations = false);
+ }
+ }
+ }
+
+ Future _loadVectors(int conversationId) async {
+ setState(() {
+ _loadingVectors = true;
+ _vectorError = null;
+ _vectors = [];
+ });
+
+ try {
+ final vecs = await widget.api.getVectors(conversationId);
+ if (!mounted) return;
+ setState(() => _vectors = vecs);
+ } catch (e) {
+ if (!mounted) return;
+ setState(() => _vectorError = 'Could not load transcripts — $e');
+ } finally {
+ if (mounted) {
+ setState(() => _loadingVectors = false);
+ }
+ }
+ }
+
+ Future _submitNewCategory() async {
+ final name = _newCatController.text.trim();
+ if (name.isEmpty) return;
+
+ setState(() {
+ _creatingCategory = true;
+ _createCategoryError = null;
+ });
+
+ try {
+ await widget.api.createCategory(name);
+ if (!mounted) return;
+
+ await _loadCategories();
+ if (!mounted) return;
+
+ final createdCategory = _categories.cast().firstWhere(
+ (category) =>
+ category?.name.trim().toLowerCase() == name.toLowerCase(),
+ orElse: () => null,
+ );
+
+ _newCatController.clear();
+ FocusScope.of(context).unfocus();
+
+ setState(() {
+ _showNewCategoryField = false;
+ _selectedCategory = createdCategory;
+ });
+
+ widget.onCategorySelected(createdCategory);
+ await _loadConversations(categoryId: createdCategory?.id);
+ } on ApiException catch (e) {
+ if (!mounted) return;
+ setState(() {
+ _createCategoryError =
+ e.statusCode == 409 ? '"$name" already exists' : e.message;
+ });
+ } catch (e) {
+ if (!mounted) return;
+ setState(() => _createCategoryError = e.toString());
+ } finally {
+ if (mounted) {
+ setState(() => _creatingCategory = false);
+ }
+ }
+ }
+
+ Future _refreshAll() async {
+ final selectedCategoryId = _selectedCategory?.id;
+ final selectedConversationId = _selectedConversation?.id;
+
+ await _loadCategories();
+ await _loadConversations(categoryId: selectedCategoryId);
+
+ if (!mounted) return;
+
+ if (selectedConversationId != null) {
+ final restoredConversation =
+ _conversations.cast().firstWhere(
+ (conversation) => conversation?.id == selectedConversationId,
+ orElse: () => null,
+ );
+
+ if (restoredConversation != null) {
+ setState(() {
+ _selectedConversation = restoredConversation;
+ });
+ await _loadVectors(selectedConversationId);
+ }
+ }
+ }
+
+ void _selectCategory(Category? cat) {
+ setState(() => _selectedCategory = cat);
+ widget.onCategorySelected(cat);
+ _loadConversations(categoryId: cat?.id);
+ }
+
+ void _selectConversation(Conversation conv) {
+ if (_selectedConversation?.id == conv.id) {
+ setState(() {
+ _selectedConversation = null;
+ _vectors = [];
+ });
+ } else {
+ setState(() => _selectedConversation = conv);
+ _loadVectors(conv.id);
+ }
+ }
+
+ String _formatDate(DateTime? dt) {
+ if (dt == null) return '—';
+ final local = dt.toLocal();
+ return '${local.day}.${local.month}.${local.year} '
+ '${local.hour.toString().padLeft(2, '0')}:'
+ '${local.minute.toString().padLeft(2, '0')}';
+ }
+
+ void _showSummarySheet(BuildContext context, Conversation conv) {
+ final summary = conv.summary.trim();
+ if (summary.isEmpty) return;
+
+ showModalBottomSheet(
+ context: context,
+ isScrollControlled: true,
+ shape: const RoundedRectangleBorder(
+ borderRadius: BorderRadius.vertical(top: Radius.circular(16)),
+ ),
+ builder: (_) => DraggableScrollableSheet(
+ initialChildSize: 0.5,
+ minChildSize: 0.3,
+ maxChildSize: 0.85,
+ expand: false,
+ builder: (_, scrollController) => Column(
+ crossAxisAlignment: CrossAxisAlignment.start,
+ children: [
+ Center(
+ child: Container(
+ margin: const EdgeInsets.only(top: 12, bottom: 8),
+ width: 36,
+ height: 4,
+ decoration: BoxDecoration(
+ color: Colors.grey[300],
+ borderRadius: BorderRadius.circular(2),
+ ),
+ ),
+ ),
+ Padding(
+ padding: const EdgeInsets.fromLTRB(20, 4, 12, 0),
+ child: Row(
+ children: [
+ const Icon(
+ Icons.summarize_outlined,
+ size: 18,
+ color: Color(0xFF00239D),
+ ),
+ const SizedBox(width: 8),
+ Expanded(
+ child: Text(
+ conv.name,
+ maxLines: 1,
+ overflow: TextOverflow.ellipsis,
+ style: const TextStyle(
+ fontSize: 16,
+ fontWeight: FontWeight.bold,
+ color: Color(0xFF00239D),
+ ),
+ ),
+ ),
+ IconButton(
+ icon: const Icon(Icons.close, size: 20),
+ onPressed: () => Navigator.of(context).pop(),
+ ),
+ ],
+ ),
+ ),
+ Padding(
+ padding: const EdgeInsets.fromLTRB(20, 2, 20, 12),
+ child: Text(
+ _formatDate(conv.timestamp),
+ style: const TextStyle(fontSize: 11, color: Colors.grey),
+ ),
+ ),
+ const Divider(height: 1),
+ Expanded(
+ child: SingleChildScrollView(
+ controller: scrollController,
+ padding: const EdgeInsets.fromLTRB(20, 16, 20, 32),
+ child: Text(
+ summary,
+ style: TextStyle(
+ fontSize: 14,
+ color: Colors.grey[800],
+ height: 1.6,
+ ),
+ ),
+ ),
+ ),
+ ],
+ ),
+ ),
+ );
+ }
+
+ @override
+ Widget build(BuildContext context) {
+ final keyboardBottom = MediaQuery.of(context).viewInsets.bottom;
+
+ return Drawer(
+ child: SafeArea(
+ child: AnimatedPadding(
+ duration: const Duration(milliseconds: 180),
+ curve: Curves.easeOut,
+ padding: EdgeInsets.only(bottom: keyboardBottom),
+ child: Column(
+ crossAxisAlignment: CrossAxisAlignment.start,
+ children: [
+ _buildHeader(context),
+ Expanded(
+ child: RefreshIndicator(
+ onRefresh: _refreshAll,
+ child: ListView(
+ keyboardDismissBehavior:
+ ScrollViewKeyboardDismissBehavior.onDrag,
+ padding: EdgeInsets.zero,
+ children: [
+ _buildSectionLabel('Categories'),
+ _buildCategoryChips(),
+ _buildNewCategoryRow(),
+ const Divider(height: 24),
+ _buildSectionLabel('Conversations'),
+ _buildConversationList(),
+ if (_selectedConversation != null) ...[
+ const Divider(height: 24),
+ _buildSectionLabel('Transcripts'),
+ _buildVectorList(),
+ ],
+ const SizedBox(height: 24),
+ ],
+ ),
+ ),
+ ),
+ ],
+ ),
+ ),
+ ),
+ );
+ }
+
+ Widget _buildHeader(BuildContext context) {
+ return Container(
+ width: double.infinity,
+ color: const Color(0xFF00239D),
+ padding: const EdgeInsets.fromLTRB(16, 20, 8, 16),
+ child: Row(
+ children: [
+ const Expanded(
+ child: Text(
+ 'History',
+ style: TextStyle(
+ color: Colors.white,
+ fontSize: 20,
+ fontWeight: FontWeight.bold,
+ ),
+ ),
+ ),
+ IconButton(
+ icon: const Icon(Icons.refresh, color: Colors.white),
+ tooltip: 'Refresh',
+ onPressed:
+ (_loadingCategories || _loadingConversations || _loadingVectors)
+ ? null
+ : _refreshAll,
+ ),
+ IconButton(
+ icon: const Icon(Icons.close, color: Colors.white),
+ onPressed: () => Navigator.of(context).pop(),
+ ),
+ ],
+ ),
+ );
+ }
+
+ Widget _buildSectionLabel(String text) {
+ return Padding(
+ padding: const EdgeInsets.fromLTRB(16, 8, 16, 4),
+ child: Text(
+ text.toUpperCase(),
+ style: TextStyle(
+ fontSize: 11,
+ fontWeight: FontWeight.w700,
+ letterSpacing: 1.1,
+ color: Colors.grey[600],
+ ),
+ ),
+ );
+ }
+
+ Widget _buildCategoryChips() {
+ if (_loadingCategories) {
+ return const Padding(
+ padding: EdgeInsets.symmetric(horizontal: 16, vertical: 8),
+ child: SizedBox(
+ width: 20,
+ height: 20,
+ child: CircularProgressIndicator(strokeWidth: 2),
+ ),
+ );
+ }
+
+ if (_categoryError != null) {
+ return _ErrorRow(
+ message: _categoryError!,
+ onRetry: _loadCategories,
+ );
+ }
+
+ return SingleChildScrollView(
+ scrollDirection: Axis.horizontal,
+ padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 4),
+ child: Row(
+ children: [
+ Padding(
+ padding: const EdgeInsets.only(right: 6),
+ child: FilterChip(
+ label: const Text('All'),
+ selected: _selectedCategory == null,
+ onSelected: (_) => _selectCategory(null),
+ ),
+ ),
+ ..._categories.map(
+ (cat) => Padding(
+ padding: const EdgeInsets.only(right: 6),
+ child: FilterChip(
+ label: Text(cat.name),
+ selected: _selectedCategory?.id == cat.id,
+ onSelected: (_) => _selectCategory(cat),
+ ),
+ ),
+ ),
+ ActionChip(
+ avatar: const Icon(Icons.add, size: 16),
+ label: const Text('New'),
+ onPressed: () {
+ setState(() {
+ _showNewCategoryField = !_showNewCategoryField;
+ _createCategoryError = null;
+ });
+
+ if (_showNewCategoryField) {
+ WidgetsBinding.instance.addPostFrameCallback((_) {
+ if (mounted) {
+ _newCatFocusNode.requestFocus();
+ }
+ });
+ } else {
+ FocusScope.of(context).unfocus();
+ }
+ },
+ ),
+ ],
+ ),
+ );
+ }
+
+ Widget _buildNewCategoryRow() {
+ if (!_showNewCategoryField) return const SizedBox.shrink();
+
+ return Padding(
+ padding: const EdgeInsets.fromLTRB(16, 4, 16, 8),
+ child: Column(
+ crossAxisAlignment: CrossAxisAlignment.start,
+ mainAxisSize: MainAxisSize.min,
+ children: [
+ Row(
+ children: [
+ Expanded(
+ child: TextField(
+ controller: _newCatController,
+ focusNode: _newCatFocusNode,
+ autofocus: true,
+ textCapitalization: TextCapitalization.sentences,
+ textInputAction: TextInputAction.done,
+ decoration: const InputDecoration(
+ hintText: 'Category name',
+ isDense: true,
+ border: OutlineInputBorder(),
+ contentPadding:
+ EdgeInsets.symmetric(horizontal: 10, vertical: 8),
+ ),
+ onSubmitted: (_) => _submitNewCategory(),
+ ),
+ ),
+ const SizedBox(width: 8),
+ _creatingCategory
+ ? const SizedBox(
+ width: 24,
+ height: 24,
+ child: CircularProgressIndicator(strokeWidth: 2),
+ )
+ : IconButton(
+ icon: const Icon(
+ Icons.check,
+ color: Color(0xFF00239D),
+ ),
+ tooltip: 'Create',
+ onPressed: _submitNewCategory,
+ ),
+ IconButton(
+ icon: const Icon(Icons.close, size: 18),
+ tooltip: 'Cancel',
+ onPressed: () => setState(() {
+ _showNewCategoryField = false;
+ _newCatController.clear();
+ _createCategoryError = null;
+ FocusScope.of(context).unfocus();
+ }),
+ ),
+ ],
+ ),
+ if (_createCategoryError != null)
+ Padding(
+ padding: const EdgeInsets.only(top: 4),
+ child: Text(
+ _createCategoryError!,
+ style: const TextStyle(color: Colors.red, fontSize: 12),
+ ),
+ ),
+ ],
+ ),
+ );
+ }
+
+ Widget _buildConversationList() {
+ if (_loadingConversations) {
+ return const Padding(
+ padding: EdgeInsets.symmetric(horizontal: 16, vertical: 12),
+ child: Center(child: CircularProgressIndicator()),
+ );
+ }
+
+ if (_conversationError != null) {
+ return _ErrorRow(
+ message: _conversationError!,
+ onRetry: () => _loadConversations(categoryId: _selectedCategory?.id),
+ );
+ }
+
+ if (_conversations.isEmpty) {
+ return const Padding(
+ padding: EdgeInsets.symmetric(horizontal: 16, vertical: 12),
+ child: Text(
+ 'No conversations yet.',
+ style: TextStyle(color: Colors.grey),
+ ),
+ );
+ }
+
+ return Column(
+ children: _conversations.map((conv) {
+ final isSelected = _selectedConversation?.id == conv.id;
+ final hasSummary = conv.summary.trim().isNotEmpty;
+
+ return Column(
+ crossAxisAlignment: CrossAxisAlignment.start,
+ children: [
+ ListTile(
+ dense: true,
+ selected: isSelected,
+ selectedTileColor: const Color(0xFF00239D).withAlpha(20),
+ leading: Icon(
+ Icons.chat_bubble_outline,
+ size: 18,
+ color: isSelected ? const Color(0xFF00239D) : Colors.grey[600],
+ ),
+ title: Text(
+ conv.name,
+ style: TextStyle(
+ fontSize: 14,
+ fontWeight: isSelected ? FontWeight.w600 : FontWeight.normal,
+ ),
+ ),
+ subtitle: Text(
+ _formatDate(conv.timestamp),
+ style: const TextStyle(fontSize: 11, color: Colors.grey),
+ ),
+ trailing: Icon(
+ isSelected ? Icons.expand_less : Icons.chevron_right,
+ size: 18,
+ ),
+ onTap: () => _selectConversation(conv),
+ ),
+ if (hasSummary)
+ Padding(
+ padding: const EdgeInsets.fromLTRB(56, 0, 16, 8),
+ child: InkWell(
+ onTap: isSelected
+ ? () => _showSummarySheet(context, conv)
+ : null,
+ borderRadius: BorderRadius.circular(6),
+ child: Container(
+ width: double.infinity,
+ padding:
+ const EdgeInsets.symmetric(horizontal: 8, vertical: 6),
+ decoration: BoxDecoration(
+ color: isSelected
+ ? const Color(0xFF00239D).withAlpha(10)
+ : Colors.transparent,
+ borderRadius: BorderRadius.circular(6),
+ border: Border.all(
+ color: isSelected
+ ? const Color(0xFF00239D).withAlpha(31)
+ : Colors.transparent,
+ ),
+ ),
+ child: Row(
+ crossAxisAlignment: CrossAxisAlignment.start,
+ children: [
+ Expanded(
+ child: Text(
+ conv.summary,
+ maxLines: 2,
+ overflow: TextOverflow.ellipsis,
+ style: TextStyle(
+ fontSize: 12,
+ color: isSelected
+ ? Colors.grey[700]
+ : Colors.grey[500],
+ fontStyle: FontStyle.italic,
+ height: 1.4,
+ ),
+ ),
+ ),
+ if (isSelected) ...[
+ const SizedBox(width: 4),
+ Icon(
+ Icons.open_in_full,
+ size: 13,
+ color: Colors.grey[400],
+ ),
+ ],
+ ],
+ ),
+ ),
+ ),
+ ),
+ ],
+ );
+ }).toList(),
+ );
+ }
+
+ Widget _buildVectorList() {
+ if (_loadingVectors) {
+ return const Padding(
+ padding: EdgeInsets.symmetric(horizontal: 16, vertical: 12),
+ child: Center(child: CircularProgressIndicator()),
+ );
+ }
+
+ if (_vectorError != null) {
+ return _ErrorRow(
+ message: _vectorError!,
+ onRetry: () => _loadVectors(_selectedConversation!.id),
+ );
+ }
+
+ if (_vectors.isEmpty) {
+ return const Padding(
+ padding: EdgeInsets.symmetric(horizontal: 16, vertical: 8),
+ child: Text(
+ 'No transcript segments for this conversation.',
+ style: TextStyle(color: Colors.grey, fontSize: 13),
+ ),
+ );
+ }
+
+ return Column(
+ children: _vectors.asMap().entries.map((entry) {
+ final vec = entry.value;
+ final i = entry.key;
+ return Padding(
+ padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 4),
+ child: Container(
+ width: double.infinity,
+ padding: const EdgeInsets.all(10),
+ decoration: BoxDecoration(
+ color: Colors.grey[100],
+ borderRadius: BorderRadius.circular(8),
+ ),
+ child: Column(
+ crossAxisAlignment: CrossAxisAlignment.start,
+ children: [
+ Text(
+ 'Segment ${i + 1}',
+ style: TextStyle(
+ fontSize: 10,
+ fontWeight: FontWeight.w600,
+ color: Colors.grey[500],
+ letterSpacing: 0.8,
+ ),
+ ),
+ const SizedBox(height: 4),
+ SelectableText(
+ vec.text,
+ style: const TextStyle(fontSize: 13),
+ ),
+ ],
+ ),
+ ),
+ );
+ }).toList(),
+ );
+ }
+}
+
+class _ErrorRow extends StatelessWidget {
+ const _ErrorRow({
+ required this.message,
+ required this.onRetry,
+ });
+
+ final String message;
+ final VoidCallback onRetry;
+
+ @override
+ Widget build(BuildContext context) {
+ return Padding(
+ padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 8),
+ child: Row(
+ children: [
+ const Icon(Icons.error_outline, size: 16, color: Colors.red),
+ const SizedBox(width: 8),
+ Expanded(
+ child: Text(
+ message,
+ style: const TextStyle(color: Colors.red, fontSize: 12),
+ ),
+ ),
+ TextButton(
+ onPressed: onRetry,
+ child: const Text('Retry'),
+ ),
+ ],
+ ),
+ );
+ }
+}
diff --git a/test/mocks/fake_rest_api_service.dart b/test/mocks/fake_rest_api_service.dart
new file mode 100644
index 0000000..7448e87
--- /dev/null
+++ b/test/mocks/fake_rest_api_service.dart
@@ -0,0 +1,106 @@
+import 'package:front/models/api_models.dart';
+import 'package:front/services/rest_api_service.dart';
+
+/// In-memory fake RestApiService for widget tests.
+
+class FakeRestApiService implements RestApiService {
+ FakeRestApiService({
+ List? categories,
+ List? conversations,
+ List? vectors,
+ }) : _categories = List.from(categories ?? const []),
+ _conversations = List.from(conversations ?? const []),
+ _vectors = List.from(vectors ?? const []) {
+ for (final category in _categories) {
+ if (category.id >= _nextCategoryId) {
+ _nextCategoryId = category.id + 1;
+ }
+ }
+ }
+
+ final List _categories;
+ final List _conversations;
+ final List _vectors;
+
+ int _nextCategoryId = 1;
+
+ @override
+ Future> getCategories() async {
+ return List.unmodifiable(_categories);
+ }
+
+ @override
+ Future> getConversations({int? categoryId}) async {
+ if (categoryId == null) {
+ return List.unmodifiable(_conversations);
+ }
+
+ return _conversations
+ .where((conversation) => conversation.categoryId == categoryId)
+ .toList(growable: false);
+ }
+
+ @override
+ Future> getVectors(int conversationId) async {
+ return _vectors
+ .where((vector) => vector.conversationId == conversationId)
+ .toList(growable: false);
+ }
+
+ @override
+ Future createCategory(String name) async {
+ final trimmed = name.trim();
+
+ if (trimmed.isEmpty) {
+ throw const ApiException(
+ statusCode: 0,
+ message: 'Name cannot be empty',
+ );
+ }
+
+ final alreadyExists = _categories.any(
+ (category) => category.name.toLowerCase() == trimmed.toLowerCase(),
+ );
+
+ if (alreadyExists) {
+ throw const ApiException(
+ statusCode: 409,
+ message: 'Category already exists',
+ );
+ }
+
+ final category = Category(
+ id: _nextCategoryId++,
+ name: trimmed,
+ );
+ _categories.add(category);
+ return category;
+ }
+
+ // Test helpers
+
+ void addCategory(Category category) {
+ _categories.add(category);
+ if (category.id >= _nextCategoryId) {
+ _nextCategoryId = category.id + 1;
+ }
+ }
+
+ void addConversation(Conversation conversation) {
+ _conversations.add(conversation);
+ }
+
+ void addVector(ConversationVector vector) {
+ _vectors.add(vector);
+ }
+
+ void clearAll() {
+ _categories.clear();
+ _conversations.clear();
+ _vectors.clear();
+ _nextCategoryId = 1;
+ }
+
+ @override
+ dynamic noSuchMethod(Invocation invocation) => super.noSuchMethod(invocation);
+}
diff --git a/test/mocks/fake_websocket_service.dart b/test/mocks/fake_websocket_service.dart
new file mode 100644
index 0000000..d75a558
--- /dev/null
+++ b/test/mocks/fake_websocket_service.dart
@@ -0,0 +1,103 @@
+import 'package:flutter/foundation.dart';
+import 'package:front/services/websocket_service.dart';
+
+class FakeWebsocketService implements WebsocketService {
+ bool _disposed = false;
+
+ @override
+ final ValueNotifier connected = ValueNotifier(false);
+
+ @override
+ final ValueNotifier committedText = ValueNotifier('');
+
+ @override
+ final ValueNotifier interimText = ValueNotifier('');
+
+ @override
+ final ValueNotifier asrActive = ValueNotifier(false);
+
+ @override
+ final ValueNotifier aiResponse = ValueNotifier('');
+
+ bool audioStreamStarted = false;
+ final List> sentAudioChunks = [];
+
+ @override
+ Future connect() async {
+ connected.value = true;
+ }
+
+ @override
+ Future disconnect() async {
+ connected.value = false;
+ asrActive.value = false;
+ committedText.value = '';
+ interimText.value = '';
+ aiResponse.value = '';
+ audioStreamStarted = false;
+ }
+
+ @override
+ Future startAudioStream() async {
+ audioStreamStarted = true;
+ asrActive.value = true;
+ }
+
+ @override
+ Future stopAudioStream() async {
+ audioStreamStarted = false;
+ asrActive.value = false;
+ }
+
+ @override
+ void sendAudio(List pcm) {
+ sentAudioChunks.add(pcm);
+ }
+
+ @override
+ void clearCommittedText() {
+ committedText.value = '';
+ interimText.value = '';
+ }
+
+ @override
+ String getFullText() => [committedText.value, interimText.value]
+ .where((s) => s.isNotEmpty)
+ .join(' ');
+
+ // Test helper methods
+ void setConnected(bool value) {
+ connected.value = value;
+ }
+
+ void setCommittedText(String text) {
+ committedText.value = text;
+ }
+
+ void setInterimText(String text) {
+ interimText.value = text;
+ }
+
+ void setAiResponse(String text) {
+ aiResponse.value = text;
+ }
+
+ void setAsrActive(bool value) {
+ asrActive.value = value;
+ }
+
+ @override
+ void dispose() {
+ if (_disposed) return;
+ _disposed = true;
+
+ connected.dispose();
+ committedText.dispose();
+ interimText.dispose();
+ asrActive.dispose();
+ aiResponse.dispose();
+ }
+
+ @override
+ dynamic noSuchMethod(Invocation invocation) => super.noSuchMethod(invocation);
+}
diff --git a/test/widget_test.dart b/test/widget_test.dart
index d9ff236..0cb793b 100644
--- a/test/widget_test.dart
+++ b/test/widget_test.dart
@@ -1,24 +1,20 @@
import 'package:even_realities_g1/even_realities_g1.dart';
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
-import 'ble_mock/g1_manager_mock.dart';
-import 'package:front/screens/landing_screen.dart';
-import 'package:front/services/websocket_service.dart';
-class TestWebsocketService extends WebsocketService {
- TestWebsocketService() : super(baseUrl: 'test');
+import 'package:front/screens/landing_screen.dart';
- @override
- Future connect() async {
- connected.value = true;
- }
-}
+import 'ble_mock/g1_manager_mock.dart';
+import 'mocks/fake_rest_api_service.dart';
+import 'mocks/fake_websocket_service.dart';
void main() {
late MockG1Manager mockManager;
+ late FakeWebsocketService fakeWs;
setUp(() {
mockManager = MockG1Manager();
+ fakeWs = FakeWebsocketService();
});
tearDown(() {
@@ -30,9 +26,12 @@ void main() {
MaterialApp(
home: LandingScreen(
manager: mockManager,
+ ws: fakeWs,
+ api: FakeRestApiService(),
),
),
);
+ await tester.pump();
}
Future disposeLanding(WidgetTester tester) async {
@@ -45,8 +44,6 @@ void main() {
await pumpLanding(tester);
expect(find.text('Even realities G1 smart glasses'), findsOneWidget);
- expect(find.text('Connect to glasses'), findsOneWidget);
- expect(find.text('Switch to phone mic'), findsOneWidget);
expect(find.text('Start\nRecording'), findsOneWidget);
expect(find.text('Mute display'), findsOneWidget);
expect(find.text('Sign in'), findsOneWidget);
@@ -55,175 +52,130 @@ void main() {
await disposeLanding(tester);
});
- testWidgets('No text is sent to glasses when display is muted',
- (WidgetTester tester) async {
- final ws = TestWebsocketService();
- mockManager.setConnected(true);
- await mockManager.transcription.start();
-
- await tester.pumpWidget(
- MaterialApp(
- home: LandingScreen(
- manager: mockManager,
- ws: ws,
- ),
- ),
- );
- await tester.pump();
-
- await tester.tap(find.text('Mute display'));
- await tester.pump();
- expect(find.text('Unmute display'), findsOneWidget);
-
- (mockManager.transcription as MockG1Transcription).clearDisplayCalls();
-
- ws.aiResponse.value = 'Hello from backend';
- await tester.pump();
-
- final tx = mockManager.transcription as MockG1Transcription;
- expect(tx.displayTextCalls, isEmpty);
- expect(tx.displayLinesCalls, isEmpty);
-
+ testWidgets('Landing screen shows menu icon button', (tester) async {
+ await pumpLanding(tester);
+ expect(find.byIcon(Icons.menu), findsOneWidget);
await disposeLanding(tester);
});
- testWidgets('Text is sent to glasses when display is not muted',
- (WidgetTester tester) async {
- final ws = TestWebsocketService();
- mockManager.setConnected(true);
- await mockManager.transcription.start();
-
- await tester.pumpWidget(
- MaterialApp(
- home: LandingScreen(
- manager: mockManager,
- ws: ws,
- ),
- ),
- );
- await tester.pump();
-
- final tx = mockManager.transcription as MockG1Transcription;
- tx.clearDisplayCalls();
-
- ws.aiResponse.value = 'Hello from backend';
- await tester.pump();
-
- expect(tx.displayLinesCalls, isNotEmpty);
- expect(tx.displayLinesCalls.last, contains('Hello from backend'));
-
+ testWidgets('Landing screen shows Sign in and Register links',
+ (tester) async {
+ await pumpLanding(tester);
+ expect(find.text('Sign in'), findsOneWidget);
+ expect(find.text('Register'), findsOneWidget);
await disposeLanding(tester);
- // LandingScreen schedules a 10s cleanup timer per sentence.
- // Advance time so the timer fires and doesn't remain pending.
- await tester.pump(const Duration(seconds: 11));
});
- testWidgets('Connecting to glasses text is shown when bluetooth is scanning',
+ testWidgets('Shows Reconnect button when WebSocket is disconnected',
(tester) async {
await pumpLanding(tester);
-
- mockManager.emitState(
- const G1ConnectionEvent(state: G1ConnectionState.connecting));
-
+ fakeWs.setConnected(false);
await tester.pump();
-
- expect(find.text('Connecting to glasses'), findsOneWidget);
- expect(find.byType(CircularProgressIndicator), findsOneWidget);
-
+ expect(find.byIcon(Icons.refresh), findsOneWidget);
await disposeLanding(tester);
});
- testWidgets('Disconnect from glasses button is shown', (tester) async {
+ testWidgets('Shows Connected indicator when WebSocket is connected',
+ (tester) async {
+ fakeWs.setConnected(true);
await pumpLanding(tester);
-
- mockManager.emitState(
- const G1ConnectionEvent(state: G1ConnectionState.disconnected));
-
await tester.pump();
+ expect(find.byIcon(Icons.signal_cellular_alt), findsOneWidget);
+ await disposeLanding(tester);
+ });
- expect(find.text('Connect to glasses'), findsOneWidget);
-
+ testWidgets('Mic toggle shows switch option', (tester) async {
+ await pumpLanding(tester);
+ expect(find.text('Switch to phone mic'), findsOneWidget);
await disposeLanding(tester);
});
- testWidgets('On connecting error right error message is shown',
+ testWidgets('Tapping mic toggle switches to glasses mic option',
(tester) async {
await pumpLanding(tester);
-
- mockManager
- .emitState(const G1ConnectionEvent(state: G1ConnectionState.error));
-
+ await tester.tap(find.text('Switch to phone mic'));
await tester.pump();
+ expect(find.text('Switch to glasses mic'), findsOneWidget);
+ await disposeLanding(tester);
+ });
- expect(find.text('Error in connecting to glasses'), findsOneWidget);
- expect(find.text('Connect to glasses'), findsOneWidget);
-
+ testWidgets('Tapping menu icon opens the side panel drawer', (tester) async {
+ await pumpLanding(tester);
+ await tester.tap(find.byIcon(Icons.menu));
+ await tester.pumpAndSettle();
+ expect(find.text('History'), findsOneWidget);
await disposeLanding(tester);
});
- testWidgets('On scanning Scanning for glasses message is shown',
- (tester) async {
+ testWidgets('No text is sent to glasses when display is muted',
+ (WidgetTester tester) async {
+ mockManager.setConnected(true);
+ await mockManager.transcription.start();
+
await pumpLanding(tester);
- mockManager
- .emitState(const G1ConnectionEvent(state: G1ConnectionState.scanning));
+ await tester.tap(find.text('Mute display'));
+ await tester.pump();
+ expect(find.text('Unmute display'), findsOneWidget);
+
+ (mockManager.transcription as MockG1Transcription).clearDisplayCalls();
+ fakeWs.aiResponse.value = 'Hello from backend';
await tester.pump();
- expect(find.text('Searching for glasses'), findsOneWidget);
+ final tx = mockManager.transcription as MockG1Transcription;
+ expect(tx.displayTextCalls, isEmpty);
+ expect(tx.displayLinesCalls, isEmpty);
await disposeLanding(tester);
});
- testWidgets('When connected show right text', (tester) async {
+ testWidgets('Text is sent to glasses when display is not muted',
+ (WidgetTester tester) async {
+ mockManager.setConnected(true);
+ await mockManager.transcription.start();
+
await pumpLanding(tester);
- mockManager
- .emitState(const G1ConnectionEvent(state: G1ConnectionState.connected));
+ final tx = mockManager.transcription as MockG1Transcription;
+ tx.clearDisplayCalls();
+ fakeWs.aiResponse.value = 'Hello from backend';
await tester.pump();
- expect(find.text('Connected'), findsOneWidget);
+ expect(tx.displayLinesCalls, isNotEmpty);
+ expect(tx.displayLinesCalls.last, contains('Hello from backend'));
await disposeLanding(tester);
+ await tester.pump(const Duration(seconds: 11));
});
- testWidgets('Shows scanning state when connecting', (tester) async {
+ testWidgets('Connect to glasses button is shown when disconnected',
+ (tester) async {
await pumpLanding(tester);
- mockManager
- .emitState(const G1ConnectionEvent(state: G1ConnectionState.scanning));
- await tester.pump();
- expect(find.text('Searching for glasses'), findsOneWidget);
-
mockManager.emitState(
- const G1ConnectionEvent(state: G1ConnectionState.connecting));
- await tester.pump();
- expect(find.text('Connecting to glasses'), findsOneWidget);
+ const G1ConnectionEvent(state: G1ConnectionState.disconnected),
+ );
- mockManager
- .emitState(const G1ConnectionEvent(state: G1ConnectionState.connected));
await tester.pump();
- expect(find.text('Connected'), findsOneWidget);
+
+ expect(find.text('Connect to glasses'), findsOneWidget);
await disposeLanding(tester);
});
test('Can send text to glasses when connected', () async {
mockManager.setConnected(true);
-
await mockManager.sendTextToGlasses('test');
-
final mockDisplay = mockManager.display as MockG1Display;
expect(mockDisplay.getText, contains('test'));
});
test('Cannot send text to glasses when not connected', () async {
mockManager.setConnected(false);
-
await mockManager.sendTextToGlasses('test');
-
final mockDisplay = mockManager.display as MockG1Display;
- expect(mockDisplay.getText, []);
+ expect(mockDisplay.getText, isEmpty);
});
}